diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..aab4195 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,4 @@ +Dockerfile +swagger +docker-compose.* +routers/commentsRouter* \ No newline at end of file diff --git a/.gitignore b/.gitignore index adf8f72..98910be 100644 --- a/.gitignore +++ b/.gitignore @@ -1,23 +1,15 @@ -# ---> Go -# If you prefer the allow list template instead of the deny list, see community template: -# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore -# -# Binaries for programs and plugins -*.exe -*.exe~ -*.dll -*.so -*.dylib +lastupdate.tmp +oc-catalog +swagger +routers/commentsRouter* +.vscode +main +selfapi_bak -# Test binary, built with `go test -c` -*.test - -# Output of the go coverage tool, specifically when used with LiteIDE -*.out - -# Dependency directories (remove the comment below to include it) -# vendor/ - -# Go workspace file -go.work +# Scripts should contain isolated data models to populate. Just one file will be used as demo ref +scripts/* +!scripts/populate_models.sh +!scripts/demo*.json +!scripts/local_imgs +!scripts/generate_selfapi.sh \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..c76708d --- /dev/null +++ b/Dockerfile @@ -0,0 +1,59 @@ +FROM golang as builder + +LABEL maintainer="Valentin KIVACHUK BURDA" + +ENV DOCKER_ENVIRONMENT=true +ENV CGO_ENABLED=0 +ENV GOOS=linux +ENV GO111MODULE=on + +EXPOSE 49618 + +WORKDIR /go/src/oc-catalog + +####################################################### +RUN go get github.com/beego/bee/v2 + +# Manually download swagger during build +RUN ["/bin/bash", "-c", \ + "set -eo pipefail; \ + mkdir -p swagger; \ + curl -sL https://github.com/beego/swagger/archive/v3.tar.gz | tar xvvvz --overwrite -C swagger --strip-components=1"] + + +COPY go.mod . +COPY go.sum . +RUN go mod download -x + +# COPY . . +COPY main.go go.mod go.sum ./ + +COPY controllers controllers +COPY models models +COPY routers routers +COPY selfapi selfapi +COPY services services +COPY conf conf +COPY scripts scripts + +# RUN go build -a -tags netgo -ldflags '-w -extldflags "-static"' -installsuffix cgo . + +RUN bee generate docs + +# COPY . . + + +# FROM golang + +# WORKDIR /go/src/oc-catalog + +# COPY --from=builder /go/src/oc-catalog . + +ENV DOCKER_ENVIRONMENT=true + +RUN go build . + +# UglyFix: Generate comments from swagger +RUN timeout 10 bee run -runargs test || exit 0 + +CMD [ "bee", "run", "-gendoc=true" ] \ No newline at end of file diff --git a/README.md b/README.md index 859b2f2..ccfb89a 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,25 @@ -# oc-catalog +# OC Catalog +OpenCloud Catalog API + + +## Full deploy + +Deploy with docker: +`docker-compose -f docker-compose.yml -f docker-compose.backend.yml up --build` + +and populate DB (or other scripts) with: +`docker exec -it oc-catalog_oc-catalog_1 ./scripts/populate_models.sh ./scripts/demo.json` + +## Dev + +Start DB with `docker-compose up -d` and run the API with `bee run -downdoc=true -gendoc=true` + + +## Multinode + +Deploy +`docker-compose -f docker-compose.yml -f docker-compose.backend.yml -f docker-compose.multi.yml up --build` + +Populate +`./scripts/multinode.sh ./scripts/demo.json` \ No newline at end of file diff --git a/conf/app.conf b/conf/app.conf new file mode 100644 index 0000000..23ca3c0 --- /dev/null +++ b/conf/app.conf @@ -0,0 +1,17 @@ +appname = oc-catalog +httpport = 49618 +runmode = dev +autorender = false +copyrequestbody = true + +EnableDocs = true +SessionOn = true + +DCNAME = "DC_myDC" +DBPOINT = "demo_06042021" + +[mongodb] +url = mongodb://127.0.0.1:27017/beego-demo + +[mongodb_docker] +url = mongodb://mongo:27017/beego-demo diff --git a/controllers/computing.go b/controllers/computing.go new file mode 100644 index 0000000..4cb722d --- /dev/null +++ b/controllers/computing.go @@ -0,0 +1,77 @@ +package controllers + +import ( + "cloud.o-forge.io/core/oc-catalog/models" + + beego "github.com/beego/beego/v2/server/web" +) + +// All operations related to the rType computing +type ComputingController struct { + beego.Controller +} + +// @Title Get computing by ID +// @Description Find a computing resource based on ID +// @Param ID path string true "The ID of the resource" +// @Success 200 {object} models.ComputingModel +// @Failure 403 ID is empty +// @router /:ID [get] +func (o *ComputingController) GetOneComputing(ID string) { + if ID != "" { + ob, err := models.GetOneComputing(ID) + if err != nil { + o.Data["json"] = err.Error() + } else { + o.Data["json"] = ob + } + } + o.ServeJSON() +} + +// @Title Add computing +// @Description Submit a computing object +// @Param body body models.ComputingNEWModel true "The object content" +// @Success 200 {string} ID +// @Failure 403 Missing body or fields +// @router / [post] +func (o *ComputingController) PostComputing(body models.ComputingNEWModel) { + err := validate.Struct(body) + // validationErrors := err.(validator.ValidationErrors) + + if err != nil { + o.Data["json"] = err.Error() + o.Ctx.Output.Status = 403 + o.ServeJSON() + return + } + + ID, err := models.PostOneComputing(body) + if err != nil { + o.Ctx.Output.SetStatus(500) + return + } + + o.Data["json"] = map[string]string{"ID": ID} + o.ServeJSON() +} + +// @Title Get multiple computing by IDs +// @Description Return Computing objects if found in the DB. Not found IDs will be ignored +// @Param IDs path []string true "List of computing IDs" +// @Success 200 {object} []models.ComputingModel +// @Failure 403 IDs are empty +// @router /multi/:IDs [get] +func (o *ComputingController) GetMultipleComputing(IDs []string) { + if len(IDs) != 0 { + ob, err := models.GetMultipleComputing(IDs) + if err != nil { + o.Ctx.Output.SetStatus(500) + } else { + o.Data["json"] = ob + } + } else { + o.Ctx.Output.SetStatus(403) + } + o.ServeJSON() +} diff --git a/controllers/data.go b/controllers/data.go new file mode 100644 index 0000000..5002877 --- /dev/null +++ b/controllers/data.go @@ -0,0 +1,85 @@ +package controllers + +import ( + "cloud.o-forge.io/core/oc-catalog/models" + + beego "github.com/beego/beego/v2/server/web" + "github.com/go-playground/validator/v10" +) + +// All operations related to the rType data +type DataController struct { + beego.Controller +} + +var validate *validator.Validate + +func init() { + validate = validator.New() +} + +// @Title Get data by ID +// @Description Find rType data based on ID +// @Param ID path string true "The ID of the data resource" +// @Success 200 {object} models.DataModel +// @Failure 403 ID is empty +// @router /:ID [get] +func (o *DataController) GetOneData(ID string) { + if ID != "" { + ob, err := models.GetOneData(ID) + if err != nil { + o.Ctx.Output.SetStatus(500) + } else { + o.Data["json"] = ob + } + } else { + o.Ctx.Output.SetStatus(403) + } + o.ServeJSON() +} + +// @Title Get multiple data by IDs +// @Description Return Data object if found in the DB. Not found IDs will be ignored +// @Param IDs path []string true "List of data IDs" +// @Success 200 {object} []models.DataModel +// @Failure 403 IDs are empty +// @router /multi/:IDs [get] +func (o *DataController) GetMultipleData(IDs []string) { + if len(IDs) != 0 { + ob, err := models.GetMultipleData(IDs) + if err != nil { + o.Ctx.Output.SetStatus(500) + } else { + o.Data["json"] = ob + } + } else { + o.Ctx.Output.SetStatus(403) + } + o.ServeJSON() +} + +// @Title Create Data +// @Description Submit data object +// @Param body body models.DataNEWModel true "The object content" +// @Success 200 {string} ID +// @Failure 403 Missing body or fields +// @router / [post] +func (o *DataController) PostData(body models.DataNEWModel) { + err := validate.Struct(body) + // validationErrors := err.(validator.ValidationErrors) + + if err != nil { + o.Ctx.Output.Status = 403 + o.ServeJSON() + return + } + + ID, err := models.PostOneData(body) + if err != nil { + o.Ctx.Output.SetStatus(500) + return + } + + o.Data["json"] = map[string]string{"ID": ID} + o.ServeJSON() +} diff --git a/controllers/datacenter.go b/controllers/datacenter.go new file mode 100644 index 0000000..0387a32 --- /dev/null +++ b/controllers/datacenter.go @@ -0,0 +1,82 @@ +package controllers + +import ( + "cloud.o-forge.io/core/oc-catalog/models" + + beego "github.com/beego/beego/v2/server/web" + "github.com/go-playground/validator/v10" +) + +// DatacenterController operations about datacenters +type DatacenterController struct { + beego.Controller +} + +func init() { + validate = validator.New() +} + +// @Title Get multiple datacenters by IDs +// @Description Return Datacenter objects if found in the DB. Not found IDs will be ignored +// @Param IDs path []string true "List of datacenter IDs" +// @Success 200 {object} []models.ComputingModel +// @Failure 403 IDs are empty +// @router /multi/:IDs [get] +func (o *DatacenterController) GetMultipleDatacenter(IDs []string) { + if len(IDs) != 0 { + ob, err := models.GetMultipleDatacenter(IDs) + if err != nil { + o.Ctx.Output.SetStatus(500) + } else { + o.Data["json"] = ob + } + } else { + o.Ctx.Output.SetStatus(403) + } + o.ServeJSON() +} + +// @Title GetOneDatacenter +// @Description find datacenter by ID +// @Param ID path string true "the ID you want to get" +// @Success 200 {object} models.DatacenterModel +// @Failure 403 ID is empty +// @router /:ID [get] +func (o *DatacenterController) GetOneDatacenter(ID string) { + if ID != "" { + ob, err := models.GetOneDatacenter(ID) + if err != nil { + o.Data["json"] = err.Error() + } else { + o.Data["json"] = ob + } + } + o.ServeJSON() +} + +// @Title Create Datacenter +// @Description submit Datacenter object +// @Param body body models.DatacenterNEWModel true "The object content" +// @Success 200 {string} models.DatacenterModel +// @Failure 403 Missing body or fields +// @router / [post] +func (o *DatacenterController) PostDatacenter(body models.DatacenterNEWModel) { + err := validate.Struct(body) + // validationErrors := err.(validator.ValidationErrors) + + if err != nil { + o.Data["json"] = err.Error() + o.Ctx.Output.Status = 403 + o.ServeJSON() + return + } + + ID, err := models.PostOneDatacenter(body) + if err != nil { + o.Ctx.Output.SetStatus(500) + return + } + + o.Data["json"] = map[string]string{"ID": ID} + o.ServeJSON() +} diff --git a/controllers/schedule.go b/controllers/schedule.go new file mode 100644 index 0000000..545323c --- /dev/null +++ b/controllers/schedule.go @@ -0,0 +1,163 @@ +package controllers + +import ( + "time" + + "cloud.o-forge.io/core/oc-catalog/models" + "github.com/beego/beego/v2/core/logs" + beego "github.com/beego/beego/v2/server/web" +) + +type ScheduleController struct { + beego.Controller +} + +// @Title Create schedule +// @Description Create schedule for a workflow. It will return some future executions just as information +// @Param dcName query string true "Name of the node (oc-catalog) from where the workflow comes." +// @Param workflowName query string true "Workflow Name" +// @Param cron query string true "Cron syntax with year. If no year is specified, will use the current" +// @Param duration query uint true "Duration in seconds" +// @Param startDate query time.Time true "RFC3339 time for startDate" +// @Param stopDate query time.Time true "RFC3339 time for stopDate" +// @Param requirements body models.ExecutionRequirementsModel true "The object content" +// @Success 200 {object} models.ScheduleInfo +// @Failure 403 Authentication issue +// @Failure 400 workflowName not found or empty +// // @Security jwtAPIToken +// @router /book [post] +func (u *ScheduleController) CreateSchedule(dcName, workflowName, cron string, duration uint, startDate, stopDate time.Time, requirements models.ExecutionRequirementsModel) { + // token := u.Ctx.Input.GetData("jwtAPIToken").(string) + + //FIXME: Passing a date as "2021-07-15 00:00:00 0000 UTC" break the controller but return 200. Should return 4xx + + username := "asd" + + nextIters, err := models.CreateScheduleWorkflow(dcName, username, workflowName, cron, duration, startDate, stopDate, requirements) + if err != nil { + u.CustomAbort(400, err.Error()) + } + + u.Data["json"] = nextIters + u.ServeJSON() + + return +} + +//TODO: This node corresponds to a unique DC, which it owns. We must restructure the code in order to +// allow a unique DC. And maybe discuss more this point + +// @Title Check if schedule can be created in this DC +// @Description Check for availability of this DC +// @Param cron query string true "Cron syntax" +// @Param duration query uint true "Duration in seconds" +// @Param startDate query time.Time true "RFC3339 time for startDate" +// @Param stopDate query time.Time true "RFC3339 time for stopDate" +// @Param requirements body models.ExecutionRequirementsModel true "The object content" +// @Success 200 The schedule can be created +// @Failure 403 Authentication issue +// @Failure 400 Other error. Check the output +// // @Security jwtAPIToken +// @router /check [post] +func (u *ScheduleController) CheckSchedule(cron string, duration uint, startDate, stopDate time.Time, requirements models.ExecutionRequirementsModel) { + // token := u.Ctx.Input.GetData("jwtAPIToken").(string) + + // username := "asd" + + if cron == "" { + u.CustomAbort(400, "Tasks cronString must not be empty") + } + + // Check Dates + if startDate.After(stopDate) || startDate.Equal(stopDate) { + u.CustomAbort(400, "startDate must be before stopDate") + } + + if startDate.Before(time.Now().UTC()) { + u.CustomAbort(400, "Current server time ("+time.Now().UTC().String()+") is after the startDate ("+startDate.String()+")") + } + + err := models.CheckSchedule(cron, duration, startDate, stopDate, requirements) + if err != nil { + logs.Warning(err) + u.CustomAbort(400, err.Error()) + } + + // u.Data["json"] = nextIters + // u.ServeJSON() + + return +} + +// @Title Get schedules +// @Description Get a list of next startDates schedules (inclusive). If timezone is not specified, will assume UTC +// @Param startDate query time.Time true "Start date" +// @Param stopDate query time.Time true "End date" +// @Success 200 {object} []models.ScheduleDB +// @Success 201 Too much elements within the range of dates +// @Failure 403 Authentication issue +// @Failure 400 Other error. Check the output +// // @Security jwtAPIToken +// @router / [get] +func (u *ScheduleController) GetSchedules(startDate time.Time, stopDate time.Time) { + // token := u.Ctx.Input.GetData("jwtAPIToken").(string) + + // username := "asd" + + data, maxLimit, err := models.GetSchedules(startDate, stopDate) + if err != nil { + logs.Warning(err) + u.CustomAbort(400, err.Error()) + } + + if maxLimit { + u.Ctx.Output.Status = 201 + } + + u.Data["json"] = data + u.ServeJSON() + + return +} + +// @Title Get next schedule +// @Description Give a date, get the next date where there are at least on schedule. If no hours specified, will assume 00:00 +// @Param baseDate query time.Time true "Base date" +// @Success 200 {object} *time.Time +// @Failure 403 Authentication issue +// @Failure 400 Other error. Check the output +// // @Security jwtAPIToken +// @router /next [get] +func (u *ScheduleController) GetNextSchedules(baseDate time.Time) { + // token := u.Ctx.Input.GetData("jwtAPIToken").(string) + + // username := "asd" + + futureDate := models.GetFarSchedules(baseDate, true) + + u.Data["json"] = futureDate + u.ServeJSON() + + return +} + +// @Title Get previous schedule +// @Description Give a date, get the previous date where there are at least on schedule. If no hours specified, will assume 00:00 +// @Param baseDate query time.Time true "Base date" +// @Success 200 {object} *time.Time +// @Failure 403 Authentication issue +// @Failure 400 Other error. Check the output +// // @Security jwtAPIToken +// @router /previous [get] +func (u *ScheduleController) GetPreviousSchedules(baseDate time.Time) { + // token := u.Ctx.Input.GetData("jwtAPIToken").(string) + + // username := "asd" + + futureDate := models.GetFarSchedules(baseDate, false) + + u.Data["json"] = futureDate + u.ServeJSON() + + return +} diff --git a/controllers/search.go b/controllers/search.go new file mode 100644 index 0000000..66e54f2 --- /dev/null +++ b/controllers/search.go @@ -0,0 +1,31 @@ +package controllers + +import ( + "cloud.o-forge.io/core/oc-catalog/models" + + beego "github.com/beego/beego/v2/server/web" +) + +type SearchController struct { + beego.Controller +} + +// TODO: Search by word is very very inneficent for not small databases +// @Title Search by word +// @Description find resources by word +// @Param word query string true "Word to search across all resources" +// @Success 200 {object} models.SearchResult +// @Failure 503 Internal error +// @router /byWord [get] +func (o *SearchController) FindByWord(word string) { + if word != "" { + ob, err := models.FindByWord(word) + if err != nil { + o.Data["json"] = err.Error() + o.Ctx.Output.Status = 503 + } else { + o.Data["json"] = ob + } + } + o.ServeJSON() +} diff --git a/controllers/storage.go b/controllers/storage.go new file mode 100644 index 0000000..3caddd1 --- /dev/null +++ b/controllers/storage.go @@ -0,0 +1,77 @@ +package controllers + +import ( + "cloud.o-forge.io/core/oc-catalog/models" + + beego "github.com/beego/beego/v2/server/web" +) + +// StorageController operations about storage +type StorageController struct { + beego.Controller +} + +// @Title Get +// @Description find storage by ID +// @Param ID path string true "the ID you want to get" +// @Success 200 {object} models.StorageModel +// @Failure 403 ID is empty +// @router /:ID [get] +func (o *StorageController) GetOneStorage(ID string) { + if ID != "" { + ob, err := models.GetOneStorage(ID) + if err != nil { + o.Data["json"] = err.Error() + } else { + o.Data["json"] = ob + } + } + o.ServeJSON() +} + +// @Title Get multiple storages by IDs +// @Description Return Storage objects if found in the DB. Not found IDs will be ignored +// @Param IDs path []string true "List of storage IDs" +// @Success 200 {object} []models.ComputingModel +// @Failure 403 IDs are empty +// @router /multi/:IDs [get] +func (o *StorageController) GetMultipleStorage(IDs []string) { + if len(IDs) != 0 { + ob, err := models.GetMultipleStorage(IDs) + if err != nil { + o.Ctx.Output.SetStatus(500) + } else { + o.Data["json"] = ob + } + } else { + o.Ctx.Output.SetStatus(403) + } + o.ServeJSON() +} + +// @Title Create Storage +// @Description submit storage object +// @Param body body models.StorageNEWModel true "The object content" +// @Success 200 {string} models.StorageModel +// @Failure 403 Missing body or fields +// @router / [post] +func (o *StorageController) PostStorage(body models.StorageNEWModel) { + err := validate.Struct(body) + // validationErrors := err.(validator.ValidationErrors) + + if err != nil { + o.Data["json"] = err.Error() + o.Ctx.Output.Status = 403 + o.ServeJSON() + return + } + + ID, err := models.PostOneStorage(body) + if err != nil { + o.Ctx.Output.SetStatus(500) + return + } + + o.Data["json"] = map[string]string{"ID": ID} + o.ServeJSON() +} diff --git a/controllers/tokens.go b/controllers/tokens.go new file mode 100644 index 0000000..414cbb9 --- /dev/null +++ b/controllers/tokens.go @@ -0,0 +1,68 @@ +package controllers + +import ( + "errors" + "time" + + "github.com/beego/beego/v2/core/logs" + "github.com/dgrijalva/jwt-go" +) + +const mySuperSecretKey = "jdnfksdmfksd" + +func CreateToken(userId string) (string, error) { + var err error + //Creating Access Token + // os.Setenv("ACCESS_SECRET", "jdnfksdmfksd") //this should be in an env file + atClaims := jwt.MapClaims{} + atClaims["authorized"] = true + atClaims["user_id"] = userId + atClaims["exp"] = time.Now().UTC().Add(time.Hour * 15).Unix() + at := jwt.NewWithClaims(jwt.SigningMethodHS256, atClaims) + token, err := at.SignedString([]byte(mySuperSecretKey)) + if err != nil { + return "", err + } + return token, nil +} + +func IsValidToken(jwtToken string) (*jwt.Token, error) { + token, err := jwt.Parse(jwtToken, func(token *jwt.Token) (interface{}, error) { + //TODO: Validate expected algorithm + return []byte(mySuperSecretKey), nil + }) + + var message string + + if err == nil && token.Valid { + return token, nil + } else if ve, ok := err.(*jwt.ValidationError); ok { + if ve.Errors&jwt.ValidationErrorMalformed != 0 { + message = "Token " + jwtToken + " is not even a token" + } else if ve.Errors&(jwt.ValidationErrorExpired|jwt.ValidationErrorNotValidYet) != 0 { + message = "Token is either expired or not active yet" + } else { + message = "Couldn't handle this token: " + err.Error() + } + } + + logs.Debug(message) + return nil, errors.New(message) +} + +func GetUsernameFromToken(token string) (string, error) { + + tokenObj, err := IsValidToken(token) + if err != nil { + logs.Debug(err) + return "", err + } + + if claims, ok := tokenObj.Claims.(jwt.MapClaims); ok { + return claims["user_id"].(string), nil + } + + logs.Debug("Unknow JWT error") + return "", errors.New("Unknow JWT error") + +} diff --git a/controllers/user.go b/controllers/user.go new file mode 100644 index 0000000..9a19d41 --- /dev/null +++ b/controllers/user.go @@ -0,0 +1,54 @@ +package controllers + +import ( + "cloud.o-forge.io/core/oc-catalog/models" + beego "github.com/beego/beego/v2/server/web" +) + +type UserController struct { + beego.Controller +} + +// @Title Login +// @Description Logs user into the system +// @Param username query string true "The username for login" +// @Param password query string true "The password for login" +// @Success 200 {string} login success +// @Failure 403 user not exist +// @router /login [get] +func (u *UserController) Login() { + username := u.GetString("username") + password := u.GetString("password") + + if models.Login(username, password) { + token, err := CreateToken(username) + + if err != nil { + u.Data["json"] = err.Error() + u.Ctx.Output.Status = 503 + u.ServeJSON() + return + } + + u.Ctx.SetCookie("token", token) + u.Ctx.Output.Header("Authorization", token) //FIXME: Some more generic way to use the name of the header + u.Data["json"] = "login success" + u.ServeJSON() + return + + } + u.Ctx.Output.Status = 403 + u.Data["json"] = "user not exist" + u.ServeJSON() + +} + +// @Title logout +// @Description Logs out current logged in user session +// @Success 200 {string} logout success +// // @Security mySecurityPathNameApiKey +// @router /logout [get] +func (u *UserController) Logout() { + u.Data["json"] = "logout success" + u.ServeJSON() +} diff --git a/controllers/workflow.go b/controllers/workflow.go new file mode 100644 index 0000000..db97d61 --- /dev/null +++ b/controllers/workflow.go @@ -0,0 +1,337 @@ +package controllers + +import ( + "time" + + "cloud.o-forge.io/core/oc-catalog/models" + "github.com/beego/beego/v2/core/logs" + beego "github.com/beego/beego/v2/server/web" + "github.com/vk496/cron" +) + +type WorkflowController struct { + beego.Controller +} + +// @Title Create a new workflow +// @Description Create a name for the new workflow +// @Param workflowName query string true "Name of the workflow" +// @Success 200 {string} Workflow created succeful +// @Failure 403 Authentication issue +// @Failure 400 {string} Other error +// // @Security jwtAPIToken +// @router / [post] +func (u *WorkflowController) CreateWorkflow(workflowName string) { + // token := u.Ctx.Input.GetData("jwtAPIToken").(string) + + // //TODO: Implement as swagger security definition (api key?) + // username, err := GetUsernameFromToken(token) + // if err != nil { + // u.Data["json"] = "No valid token" + // u.Ctx.Output.Status = 403 + // u.ServeJSON() + // return + // } + + username := "asd" + + if err := models.CreateWorkflow(username, workflowName); err != nil { + u.Data["json"] = err.Error() + u.Ctx.Output.Status = 400 + u.ServeJSON() + return + } +} + +// @Title List workflows +// @Description List available workflows +// @Success 200 []string List of workflows +// @Failure 403 Authentication issue +// @Failure 400 {string} Other error +// // @Security jwtAPIToken +// @router / [get] +func (u *WorkflowController) ListWorkflows() { + // token := u.Ctx.Input.GetData("jwtAPIToken").(string) + + username := "asd" + + w := models.GetWorkspace(username) + + if w == nil { + // No username + u.Data["json"] = "Workspace doesn't exist" + u.Ctx.Output.Status = 400 + u.ServeJSON() + } + + u.Data["json"] = w.GetWorkflows() + u.ServeJSON() +} + +// @Title Get Workflow +// @Description Get a workflow by name +// @Param workflowName path string true "Workflow Name" +// @Success 200 {object} models.Workflow +// @Failure 403 Authentication issue +// @Failure 400 {string} Other error +// // @Security jwtAPIToken +// @router /:workflowName [get] +func (u *WorkflowController) GetWorkflow(workflowName string) { + // token := u.Ctx.Input.GetData("jwtAPIToken").(string) + + username := "asd" + + proj, err := models.GetWorkflow(username, workflowName) + if err != nil { + u.SetData(err.Error()) + u.Ctx.Output.Status = 400 + u.ServeJSON() + return + } + + u.Data["json"] = proj + u.ServeJSON() +} + +// @Title Add new object to a Workflow +// @Description Create a Rtype object from already added resources to the workspace +// @Param workflowName path string true "workflow Name" +// @Param rID query string true "rID of already existing item in Workspace" +// @Success 200 {string} ID of the new object (rObjID) +// @Failure 403 Authentication issue +// @Failure 400 {string} Other error +// // @Security jwtAPIToken +// @router /:workflowName/add [post] +func (u *WorkflowController) AddElementWorkflow(workflowName, rID string) { + username := "asd" + + rObjID, err := models.CreateObjectInWorkflow(username, workflowName, rID) + if err != nil { + logs.Debug(err.Error()) + u.Data["json"] = err.Error() + u.Ctx.Output.Status = 400 + u.ServeJSON() + return + } + + u.Data["json"] = rObjID + u.ServeJSON() + return +} + +// @Title Parse mxGraph +// @Description If we use this aproach to transofrm mxgraph representation in our representation, we should not use other API calls for modify the project structure or we'll have inconsistencies. +// @Param workflowName path string true "Workflow Name" +// @Param xmlData body string true "Xml representation of the workflow" +// @Success 200 The xmlgraph consumed correctly +// @Success 201 The xmlgraph consumed with issues +// @Failure 403 Authentication issue +// @Failure 400 {string} Other error +// @router /:workflowName/mxGraphParser [post] +func (u *WorkflowController) MxGraphParser(workflowName, xmlData string) { + username := "asd" + err, mxissues := models.ParseMxGraph(username, workflowName, xmlData) + if err != nil { + logs.Debug(err.Error()) + u.CustomAbort(400, err.Error()) + } + + if len(mxissues) > 0 { + strErrors := make([]string, len(mxissues)) + + for i, err := range mxissues { + strErrors[i] = err.Error() + } + + u.Data["json"] = strErrors + u.Ctx.Output.Status = 201 + u.ServeJSON() + return + } + +} + +// @Title Get mxGraph last status +// @Description Obtain the last mxgraph XML status from the workflow +// @Param workflowName path string true "Workflow Name" +// @Success 200 The xmlgraph +// @Success 201 Empty workflow +// @Failure 403 Authentication issue +// @Failure 400 {string} Other error +// @router /:workflowName/mxGraphParser [get] +func (u *WorkflowController) MxGraphParserConsume(workflowName string) { + username := "asd" + xmlData, err := models.GetMxGraph(username, workflowName) + if err != nil { + logs.Debug(err.Error()) + u.Data["json"] = err.Error() + u.Ctx.Output.Status = 400 + u.ServeJSON() + return + } + + if xmlData == nil { + u.Ctx.Output.Status = 201 + } else { + u.Ctx.Output.Status = 200 + u.Ctx.Output.Body([]byte(*xmlData)) + u.ServeXML() + } + return +} + +// @Title Create a realtionship between two Robjects +// @Description Create a Rtype object from already added resources to the workspace +// @Param workflowName path string true "Workflow Name" +// @Param rObjIDsource query string true "Robject source. Usually Data" +// @Param isInput query bool true "If the operation is for input (true) linkage or output (false)" +// @Param rObjIDtarger query string true "Robject where will be written the association" +// @Success 200 {string} ID of the new object (rObjID) +// @Failure 403 Authentication issue +// @Failure 400 {string} Other error +// // @Security jwtAPIToken +// @router /:workflowName/link [post] +func (u *WorkflowController) LinkElementsWorkflow(workflowName, rObjIDsource, rObjIDtarger string, isInput bool) { + username := "asd" + + err := models.LinkObjectsInWorkspace(username, workflowName, rObjIDsource, isInput, rObjIDtarger) + if err != nil { + logs.Debug(err.Error()) + u.Data["json"] = err.Error() + u.Ctx.Output.Status = 400 + u.ServeJSON() + return + } +} + +// @Title Get Schedule +// @Description Obtain the desired schedule of this workflow +// @Param workflowName path string true "Workflow Name" +// @Success 200 {object} models.ScheduleTime +// @Failure 403 Authentication issue +// @Failure 400 Workflow doesn't exist +// @Failure 401 Other error +// @router /:workflowName/schedule [get] +func (u *WorkflowController) GetWorkflowSchedule(workflowName string) { + username := "asd" + sched, err := models.GetWorkflowSchedule(username, workflowName) + + // Some error + if err != nil { + u.CustomAbort(401, err.Error()) + } + + // No workflow + if sched == nil { + u.Ctx.Output.Status = 400 + return + } + + u.Ctx.Output.Status = 200 + u.Data["json"] = sched + u.ServeJSON() +} + +// @Title Set Schedule +// @Description Set desired schedule by the user. No other effects a part of saving the user input +// @Param workflowName path string true "Workflow Name" +// @Param isService query bool true "True: Service, False: Task" +// @Param startDate query time.Time true "RFC3339 time for startDate" +// @Param stopDate query time.Time true "RFC3339 time for stopDate" +// @Param events query string false "List of events separated by comma" +// @Param cronString query string false "Cron string" +// @Param duration query uint false "Duration in seconds" +// @Success 200 {object} models.ScheduleInfo +// @Failure 403 Authentication issue +// @Failure 400 Workflow doesn't exist +// @Failure 401 Other error +// @Failure 402 Bad user input +// @router /:workflowName/schedule [put] +func (u *WorkflowController) SetWorkflowSchedule(workflowName, cronString, events string, isService bool, startDate, stopDate time.Time, duration uint) { + username := "asd" + + // Check Dates + if startDate.After(stopDate) || startDate.Equal(stopDate) { + u.CustomAbort(402, "startDate must be before stopDate") + } + + if startDate.Before(time.Now().UTC()) { + u.CustomAbort(402, "Current server time ("+time.Now().UTC().String()+") is after the startDate ("+startDate.String()+")") + } + + // Tasks must have cron and duration + if !isService { + if cronString == "" { + u.CustomAbort(402, "Tasks cronString must not be empty") + } + + if duration == 0 { + u.CustomAbort(402, "Tasks duration musn't be 0") + } + + _, err := cron.Parse(cronString) + // Check cron + if err != nil { + u.CustomAbort(402, "Bad cron message: "+err.Error()) + } + + } + + schedInfo, err := models.SetWorkflowSchedule(username, workflowName, cronString, events, isService, startDate, stopDate, duration) + + // Some error + if err != nil { + u.CustomAbort(401, err.Error()) + } + + // No workflow + if schedInfo == nil { + u.CustomAbort(400, "") + } + + u.Ctx.Output.Status = 200 + u.Data["json"] = schedInfo + u.ServeJSON() +} + +// @Title Check Schedule +// @Description Check if we can schedule the project in other DCs. Must set a desired schedule first! +// @Param workflowName path string true "Workflow Name" +// @Success 200 {object} []models.DCstatus +// @Failure 403 Authentication issue +// @Failure 401 Other error +// @router /:workflowName/schedule/check [get] +func (u *WorkflowController) CheckWorkflowSchedule(workflowName string) { + username := "asd" + + data, err := models.CheckAndBookWorkflowSchedule(username, workflowName, false) + if err != nil { + u.CustomAbort(401, err.Error()) + } + + u.Ctx.Output.Status = 200 + u.Data["json"] = data + u.ServeJSON() + return +} + +// @Title Book Schedule +// @Description Book a schedule in all DCs of the workflow. Must set a desired schedule first! +// @Param workflowName path string true "Workflow Name" +// @Success 200 {object} []models.DCstatus +// @Failure 403 Authentication issue +// @Failure 401 Other error. Check output +// @router /:workflowName/schedule/book [post] +func (u *WorkflowController) BookWorkflowSchedule(workflowName string) { + username := "asd" + + data, err := models.CheckAndBookWorkflowSchedule(username, workflowName, true) + if err != nil { + u.CustomAbort(401, err.Error()) + } + + u.Ctx.Output.Status = 200 + u.Data["json"] = data + u.ServeJSON() + return +} diff --git a/controllers/workspace.go b/controllers/workspace.go new file mode 100644 index 0000000..9c69bed --- /dev/null +++ b/controllers/workspace.go @@ -0,0 +1,151 @@ +package controllers + +import ( + "cloud.o-forge.io/core/oc-catalog/models" + beego "github.com/beego/beego/v2/server/web" +) + +type WorkspaceController struct { + beego.Controller +} + +// @Title Add model to workspace +// @Description Insert a resource in the workspace +// @Param id query string true "ID of a resource" +// @Param rtype query string true "Type of resource" +// @Success 200 {string} login success +// @Failure 403 Authentication issue +// @Failure 400 {string} Other error +// // @Security jwtAPIToken +// @router / [post] +func (u *WorkspaceController) AddModel(id, rtype string) { + // token := u.Ctx.Input.GetData("jwtAPIToken").(string) + + // //TODO: Implement as swagger security definition (api key?) + // username, err := GetUsernameFromToken(token) + // if err != nil { + // u.Data["json"] = "No valid token" + // u.Ctx.Output.Status = 403 + // u.ServeJSON() + // return + // } + + var err error + username := "asd" + + w := models.GetWorkspace(username) + + if w == nil { + w, err = models.NewWorkspace(username) + if err != nil { + u.Data["json"] = err.Error() + u.Ctx.Output.Status = 400 + u.ServeJSON() + return + } + } + + // w.NewResource(id, rtype) + + if err := models.AddResource(username, id, rtype); err != nil { + u.Data["json"] = err.Error() + u.Ctx.Output.Status = 400 + u.ServeJSON() + return + } +} + +// @Title Get workspace +// @Description Get workspace elements based on user_id token +// @Success 200 {object} models.Workspace +// @Failure 403 Authentication issue +// // @Security jwtAPIToken +// @router /list [get] +func (u *WorkspaceController) ListWorkspace() { + // token := u.Ctx.Input.GetData("jwtAPIToken").(string) + + // //TODO: Implement as swagger security definition (api key?) + // username, err := GetUsernameFromToken(token) + // if err != nil { + // u.Data["json"] = "No valid token" + // u.Ctx.Output.Status = 403 + // u.ServeJSON() + // return + // } + + username := "asd" + + ws := models.GetWorkspace(username) + + // if ws == nil { + // u.Ctx.Output.Status = 503 + // return + // } + + u.Data["json"] = ws + u.ServeJSON() + +} + +// @Title Get full workspace +// @Description Get full workspace elements based on user_id token +// @Success 200 {object} models.WorkspaceModel +// @Failure 403 Authentication issue +// // @Security jwtAPIToken +// @router /list_model [get] +func (u *WorkspaceController) ListWorkspaceModel() { + // token := u.Ctx.Input.GetData("jwtAPIToken").(string) + + // //TODO: Implement as swagger security definition (api key?) + // username, err := GetUsernameFromToken(token) + // if err != nil { + // u.Data["json"] = "No valid token" + // u.Ctx.Output.Status = 403 + // u.ServeJSON() + // return + // } + + username := "asd" + + val, err := models.ListFullWorkspace(username) + + if err != nil { + u.Ctx.Output.Status = 503 + return + } + + u.Data["json"] = val + u.ServeJSON() + +} + +// @Title Delete element from user workspace +// @Description Remove a resource from the workspace +// @Param id query string true "ID of a resource" +// @Param rtype query string true "Type of resource" +// @Success 200 {string} Removed succeful +// @Failure 403 Authentication issue +// @Failure 400 {string} Other error +// // @Security jwtAPIToken +// @router / [delete] +func (u *WorkspaceController) DeleteElement(id, rtype string) { + // token := u.Ctx.Input.GetData("jwtAPIToken").(string) + + // //TODO: Implement as swagger security definition (api key?) + // username, err := GetUsernameFromToken(token) + // if err != nil { + // u.Data["json"] = "No valid token" + // u.Ctx.Output.Status = 403 + // u.ServeJSON() + // return + // } + + username := "asd" + + if err := models.RemoveResource(username, id, rtype); err != nil { + u.Data["json"] = err.Error() + u.Ctx.Output.Status = 400 + u.ServeJSON() + return + } +} diff --git a/docker-compose.backend.yml b/docker-compose.backend.yml new file mode 100644 index 0000000..0ea62ff --- /dev/null +++ b/docker-compose.backend.yml @@ -0,0 +1,15 @@ +version: '3.4' + +services: + oc-catalog: + build: . + container_name: oc-catalog + restart: always + environment: + - DOCKER_DCNAME=DC_myDC + depends_on: + - mongo + networks: + - catalog + ports: + - 49618:49618 \ No newline at end of file diff --git a/docker-compose.multi.yml b/docker-compose.multi.yml new file mode 100644 index 0000000..4ae8d6e --- /dev/null +++ b/docker-compose.multi.yml @@ -0,0 +1,27 @@ +version: '3.4' + +services: + dc1: + build: . + restart: always + container_name: dc1 + environment: + - DOCKER_DCNAME=DC_superDC1 + depends_on: + - mongo + networks: + - catalog + ports: + - 49619:49618 + dc2: + build: . + restart: always + container_name: dc2 + environment: + - DOCKER_DCNAME=DC_superDC2 + depends_on: + - mongo + networks: + - catalog + ports: + - 49620:49618 \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..32644f9 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,30 @@ +version: '3.4' + +services: + mongo: + image: 'mongo:latest' + networks: + - catalog + ports: + - 27017:27017 + container_name: mongo + volumes: + - oc-catalog-data:/data/db + - oc-catalog-data:/data/configdb + + mongo-express: + image: "mongo-express:latest" + restart: always + depends_on: + - mongo + networks: + - catalog + ports: + - 8081:8081 + +volumes: + oc-catalog-data: + +networks: + catalog: + name: catalog \ No newline at end of file diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..4db8a89 --- /dev/null +++ b/go.mod @@ -0,0 +1,32 @@ +module cloud.o-forge.io/core/oc-catalog + +go 1.15 + +require github.com/beego/beego/v2 v2.0.1 + +require ( + github.com/antihax/optional v1.0.0 + github.com/aws/aws-sdk-go v1.36.29 // indirect + github.com/beego/bee/v2 v2.0.2 + github.com/dgrijalva/jwt-go v3.2.0+incompatible + github.com/go-playground/validator/v10 v10.4.1 + github.com/golang/snappy v0.0.2 // indirect + github.com/klauspost/compress v1.11.7 // indirect + github.com/leodido/go-urn v1.2.1 // indirect + github.com/mitchellh/mapstructure v1.4.1 // indirect + github.com/prometheus/client_golang v1.9.0 // indirect + github.com/prometheus/procfs v0.3.0 // indirect + github.com/shiena/ansicolor v0.0.0-20200904210342-c7312218db18 // indirect + github.com/vk496/cron v1.2.0 + go.mongodb.org/mongo-driver v1.4.5 + golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad // indirect + golang.org/x/mod v0.4.1 // indirect + golang.org/x/net v0.0.0-20210119194325-5f4716e94777 // indirect + golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45 + golang.org/x/sync v0.0.0-20201207232520-09787c993a3a // indirect + golang.org/x/text v0.3.5 // indirect + golang.org/x/tools v0.1.0 // indirect + google.golang.org/protobuf v1.25.0 // indirect + gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..b405084 --- /dev/null +++ b/go.sum @@ -0,0 +1,747 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/Knetic/govaluate v3.0.0+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= +github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= +github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo= +github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= +github.com/VividCortex/gohistogram v1.0.0/go.mod h1:Pf5mBqqDxYaXu3hDrrU+w6nw50o/4+TcAqDqk/vUH7g= +github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= +github.com/alicebob/gopher-json v0.0.0-20180125190556-5a6b3ba71ee6/go.mod h1:SGnFV6hVsYE877CKEZ6tDNTjaSXYUk6QqoIK6PrAtcc= +github.com/alicebob/miniredis v2.5.0+incompatible/go.mod h1:8HZjEj4yU0dwhYHky+DxYx+6BMjkBbe5ONFIF1MXffk= +github.com/antihax/optional v1.0.0 h1:xK2lYat7ZLaVVcIuj82J8kIro4V6kDe0AUDFboUCwcg= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= +github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= +github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= +github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A= +github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU= +github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= +github.com/aws/aws-sdk-go v1.34.28/go.mod h1:H7NKnBqNVzoTJpGfLrQkkD+ytBA93eiDYi/+8rV9s48= +github.com/aws/aws-sdk-go v1.36.29 h1:lM1G3AF1+7vzFm0n7hfH8r2+750BTo+6Lo6FtPB7kzk= +github.com/aws/aws-sdk-go v1.36.29/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro= +github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g= +github.com/beego/bee/v2 v2.0.2 h1:xWARyIqdnnbNMDBDUdb6Gvr9S/yGXC6Ni43kKdS1/eg= +github.com/beego/bee/v2 v2.0.2/go.mod h1:rfZa899qLAF8SYBRvE7mWNPZTU7/qysOBhaCLmZrMX4= +github.com/beego/beego/v2 v2.0.1 h1:07a7Z0Ok5vbqyqh+q53sDPl9LdhKh0ZDy3gbyGrhFnE= +github.com/beego/beego/v2 v2.0.1/go.mod h1:8zyHi1FnWO1mZLwTn62aKRIZF/aIKvkCBB2JYs+eqQI= +github.com/beego/goyaml2 v0.0.0-20130207012346-5545475820dd/go.mod h1:1b+Y/CofkYwXMUU0OhQqGvsY2Bvgr4j6jfT699wyZKQ= +github.com/beego/x2j v0.0.0-20131220205130-a0352aadc542/go.mod h1:kSeGC/p1AbBiEp5kat81+DSQrZenVBZXklMLaELspWU= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84= +github.com/bradfitz/gomemcache v0.0.0-20180710155616-bc664df96737/go.mod h1:PmM6Mmwb0LSuEubjR8N7PtNe1KxZLtOUHtbeikc5h60= +github.com/casbin/casbin v1.7.0/go.mod h1:c67qKN6Oum3UF5Q1+BByfFxkwKvhwW57ITjqwtzR1KE= +github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ= +github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= +github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= +github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/clbanning/x2j v0.0.0-20191024224557-825249438eec/go.mod h1:jMjuTZXRI4dUb/I5gc9Hdhagfvm9+RyrPryS/auMzxE= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cloudflare/golz4 v0.0.0-20150217214814-ef862a3cdc58/go.mod h1:EOBUe0h4xcZ5GoxqC5SDxFQ8gwyZPKQoEzownBlhI80= +github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= +github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI= +github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= +github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= +github.com/coreos/etcd v3.3.25+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= +github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd v0.0.0-20191104093116-d3cd4ed1dbcf/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/cosiner/argv v0.1.0/go.mod h1:EusR6TucWKX+zFgtdUsKT2Cvg45K5rtpCcWz4hK06d8= +github.com/couchbase/go-couchbase v0.0.0-20200519150804-63f3cdb75e0d/go.mod h1:TWI8EKQMs5u5jLKW/tsb9VwauIrMIxQG1r5fMsswK5U= +github.com/couchbase/gomemcached v0.0.0-20200526233749-ec430f949808/go.mod h1:srVSlQLB8iXBVXHgnqemxUXqN6FCvClgCMPCsjBDR7c= +github.com/couchbase/goutils v0.0.0-20180530154633-e865a1461c8a/go.mod h1:BQwMFlJzDjFDG3DJUdU0KORxn88UlsOULuxLExMh3Hs= +github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/cupcake/rdb v0.0.0-20161107195141-43ba34106c76/go.mod h1:vYwsqCOLxGiisLwp9rITslkFNpZD5rz43tf41QFkTWY= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM= +github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= +github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= +github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= +github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU= +github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= +github.com/edsrzf/mmap-go v0.0.0-20170320065105-0bce6a688712/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M= +github.com/edsrzf/mmap-go v1.0.0/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M= +github.com/elastic/go-elasticsearch/v6 v6.8.5/go.mod h1:UwaDJsD3rWLM5rKNFzv9hgox93HoX8utj1kxD9aFUcI= +github.com/elazarl/go-bindata-assetfs v1.0.0 h1:G/bYguwHIzWq9ZoyUQqrjTmJbbYn3j3CKKpKinvZLFk= +github.com/elazarl/go-bindata-assetfs v1.0.0/go.mod h1:v+YaWX3bdea5J/mo8dSETolEo7R71Vk1u8bnjau5yw4= +github.com/envoyproxy/go-control-plane v0.6.9/go.mod h1:SBwIajubJHhxtWwsL9s8ss4safvEdbitLhGGK48rN6g= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/flosch/pongo2 v0.0.0-20190707114632-bbf5a6c351f4/go.mod h1:T9YF2M40nIgbVgp3rreNmTged+9HrbNTIQf1PsaIiTA= +github.com/flosch/pongo2 v0.0.0-20200529170236-5abacdfa4915/go.mod h1:fB4mx6dzqFinCxIf3a7Mf5yLk+18Bia9mPAnuejcvDA= +github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4= +github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/glendc/gopher-json v0.0.0-20170414221815-dc4743023d0c/go.mod h1:Gja1A+xZ9BoviGJNA2E9vFkPjjsl+CoJxSXiQM1UXtw= +github.com/go-check/check v0.0.0-20180628173108-788fd7840127/go.mod h1:9ES+weclKsC9YodN5RgxqK/VD9HM9JsCSh7rNhMZE98= +github.com/go-delve/delve v1.5.0/go.mod h1:c6b3a1Gry6x8a4LGCe/CWzrocrfaHvkUxCj3k4bvSUQ= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.10.0/go.mod h1:xUsJbQ/Fp4kEt7AFgCuvyX4a71u8h9jB8tj/ORgOZ7o= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A= +github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q= +github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= +github.com/go-playground/universal-translator v0.17.0 h1:icxd5fm+REJzpZx7ZfpaD876Lmtgy7VtROAbHHXk8no= +github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= +github.com/go-playground/validator/v10 v10.4.1 h1:pH2c5ADXtd66mxoE0Zm9SUhxE20r7aM3F26W0hOn+GE= +github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= +github.com/go-redis/redis v6.14.2+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA= +github.com/go-redis/redis/v7 v7.4.0/go.mod h1:JDNMw23GTyLNC4GZu9njt15ctBQVn7xjRfnwdHj/Dcg= +github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= +github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/gobuffalo/attrs v0.0.0-20190224210810-a9411de4debd/go.mod h1:4duuawTqi2wkkpB4ePgWMaai6/Kc6WEz83bhFwpHzj0= +github.com/gobuffalo/depgen v0.0.0-20190329151759-d478694a28d3/go.mod h1:3STtPUQYuzV0gBVOY3vy6CfMm/ljR4pABfrTeHNLHUY= +github.com/gobuffalo/depgen v0.1.0/go.mod h1:+ifsuy7fhi15RWncXQQKjWS9JPkdah5sZvtHc2RXGlg= +github.com/gobuffalo/envy v1.6.15/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI= +github.com/gobuffalo/envy v1.7.0/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI= +github.com/gobuffalo/flect v0.1.0/go.mod h1:d2ehjJqGOH/Kjqcoz+F7jHTBbmDb38yXA598Hb50EGs= +github.com/gobuffalo/flect v0.1.1/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI= +github.com/gobuffalo/flect v0.1.3/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI= +github.com/gobuffalo/genny v0.0.0-20190329151137-27723ad26ef9/go.mod h1:rWs4Z12d1Zbf19rlsn0nurr75KqhYp52EAGGxTbBhNk= +github.com/gobuffalo/genny v0.0.0-20190403191548-3ca520ef0d9e/go.mod h1:80lIj3kVJWwOrXWWMRzzdhW3DsrdjILVil/SFKBzF28= +github.com/gobuffalo/genny v0.1.0/go.mod h1:XidbUqzak3lHdS//TPu2OgiFB+51Ur5f7CSnXZ/JDvo= +github.com/gobuffalo/genny v0.1.1/go.mod h1:5TExbEyY48pfunL4QSXxlDOmdsD44RRq4mVZ0Ex28Xk= +github.com/gobuffalo/gitgen v0.0.0-20190315122116-cc086187d211/go.mod h1:vEHJk/E9DmhejeLeNt7UVvlSGv3ziL+djtTr3yyzcOw= +github.com/gobuffalo/gogen v0.0.0-20190315121717-8f38393713f5/go.mod h1:V9QVDIxsgKNZs6L2IYiGR8datgMhB577vzTDqypH360= +github.com/gobuffalo/gogen v0.1.0/go.mod h1:8NTelM5qd8RZ15VjQTFkAW6qOMx5wBbW4dSCS3BY8gg= +github.com/gobuffalo/gogen v0.1.1/go.mod h1:y8iBtmHmGc4qa3urIyo1shvOD8JftTtfcKi+71xfDNE= +github.com/gobuffalo/logger v0.0.0-20190315122211-86e12af44bc2/go.mod h1:QdxcLw541hSGtBnhUc4gaNIXRjiDppFGaDqzbrBd3v8= +github.com/gobuffalo/mapi v1.0.1/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc= +github.com/gobuffalo/mapi v1.0.2/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc= +github.com/gobuffalo/packd v0.0.0-20190315124812-a385830c7fc0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4= +github.com/gobuffalo/packd v0.1.0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4= +github.com/gobuffalo/packr/v2 v2.0.9/go.mod h1:emmyGweYTm6Kdper+iywB6YK5YzuKchGtJQZ0Odn4pQ= +github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/VCm/3ptBN+0= +github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw= +github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= +github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3 h1:JjCZWpVbqXDqFVmTfYWEVTMIYrL/NPdPSCHPJ0T/raM= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/snappy v0.0.0-20170215233205-553a64147049/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.2 h1:aeE13tS0IiQgFjYdoL8qN3K1N2bXXtI6Vi51/y7BpMw= +github.com/golang/snappy v0.0.2/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/gomodule/redigo v2.0.0+incompatible/go.mod h1:B4C85qUVwatsJoIUNIfCRsp7qO0iAmpGFZ4EELWSbC4= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2 h1:X2ev0eStA3AbceY54o37/0PQ/UWqKEiiO2dKL5OPaFM= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-dap v0.2.0/go.mod h1:5q8aYQFnHOAZEMP+6vmq25HKYAEwE+LF5yh7JKrrhSQ= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= +github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= +github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= +github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= +github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= +github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= +github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= +github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= +github.com/hashicorp/consul/api v1.3.0/go.mod h1:MmDNSzIMUjNpY/mQ398R4bk2FnqQLoPndWW5VkKPlCE= +github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= +github.com/hashicorp/consul/sdk v0.3.0/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= +github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= +github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= +github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= +github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= +github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/hudl/fargo v1.3.0/go.mod h1:y3CKSmjA+wD2gak7sUSXTAoopbhU08POFhmITJgmKTg= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/influxdata/influxdb1-client v0.0.0-20191209144304-8bf82d3c094d/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo= +github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= +github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= +github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= +github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= +github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= +github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= +github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/juju/errors v0.0.0-20181118221551-089d3ea4e4d5/go.mod h1:W54LbzXuIE0boCoNJfwqpmkKJ1O4TCTZMetAt6jGk7Q= +github.com/juju/errors v0.0.0-20190930114154-d42613fe1ab9/go.mod h1:W54LbzXuIE0boCoNJfwqpmkKJ1O4TCTZMetAt6jGk7Q= +github.com/juju/loggo v0.0.0-20180524022052-584905176618/go.mod h1:vgyd7OREkbtVEN/8IXZe5Ooef3LQePvuBm9UWj6ZL8U= +github.com/juju/testing v0.0.0-20180920084828-472a3e8b2073/go.mod h1:63prj8cnj0tU0S9OHjGJn+b1h0ZghCndfnbQolrYTwA= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= +github.com/karrick/godirwalk v1.8.0/go.mod h1:H5KPZjojv4lE+QYImBI8xVtrBRgYrIVsaRPx4tDPEn4= +github.com/karrick/godirwalk v1.10.3/go.mod h1:RoGL9dQei4vP9ilrpETWE8CLOZ1kiN0LhBygSwrAsHA= +github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= +github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= +github.com/klauspost/compress v1.11.7 h1:0hzRabrMN4tSTvMfnL3SCv1ZGeAP23ynzodBgaHeMeg= +github.com/klauspost/compress v1.11.7/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/ledisdb/ledisdb v0.0.0-20200510135210-d35789ec47e6/go.mod h1:n931TsDuKuq+uX4v1fulaMbA/7ZLLhjc85h7chZGBCQ= +github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= +github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= +github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= +github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.7.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM= +github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4= +github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ= +github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= +github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= +github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= +github.com/mattn/go-colorable v0.0.0-20170327083344-ded68f7a9561/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= +github.com/mattn/go-sqlite3 v2.0.3+incompatible/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= +github.com/mattn/goveralls v0.0.2/go.mod h1:8d1ZMHsd7fW6IRPKQh46F2WRpyib5/X4FOpevwGNQEw= +github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= +github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= +github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= +github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= +github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.3.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.4.1 h1:CpVNEelQCZBooIPDn+AR3NpivK/TIKU8bDxdASFVQag= +github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/nats-io/jwt v0.3.0/go.mod h1:fRYCDE99xlTsqUzISS1Bi75UBJ6ljOJQOAAu5VglpSg= +github.com/nats-io/jwt v0.3.2/go.mod h1:/euKqTS1ZD+zzjYrY7pseZrTtWQSjujC7xjPc8wL6eU= +github.com/nats-io/nats-server/v2 v2.1.2/go.mod h1:Afk+wRZqkMQs/p45uXdrVLuab3gwv3Z8C4HTBu8GD/k= +github.com/nats-io/nats.go v1.9.1/go.mod h1:ZjDU1L/7fJ09jvUSRVBR2e7+RnLiiIQyqyzEE/Zbp4w= +github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= +github.com/nats-io/nkeys v0.1.3/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= +github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs= +github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= +github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= +github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= +github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.12.0/go.mod h1:oUhWkIvk5aDxtKvDDuw8gItl8pKl42LzjC9KZE0HfGg= +github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= +github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= +github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= +github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk= +github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go.mod h1:Ngi6UdF0k5OKD5t5wlmGhe/EDKPoUM3BXZSSfIuJbis= +github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74= +github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= +github.com/openzipkin-contrib/zipkin-go-opentracing v0.4.5/go.mod h1:/wsWhb9smxSfWAKL3wpBW7V8scJMt8N8gnaMCS9E/cA= +github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw= +github.com/openzipkin/zipkin-go v0.2.1/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= +github.com/openzipkin/zipkin-go v0.2.2/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= +github.com/pact-foundation/pact-go v1.0.4/go.mod h1:uExwJY4kCzNPcHRj+hCR/HBbOOIwwtUjcrb0b5/5kLM= +github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= +github.com/pelletier/go-toml v1.0.1/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= +github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= +github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE= +github.com/pelletier/go-toml v1.8.1/go.mod h1:T2/BmBdy8dvIRq1a/8aqjN41wvWlN4lrapLU/GW4pbc= +github.com/performancecopilot/speed v3.0.0+incompatible/go.mod h1:/CLtqpZ5gBg1M9iaPbIdPPGyKcA8hKdoy6hAWba7Yac= +github.com/peterh/liner v0.0.0-20170317030525-88609521dc4b/go.mod h1:xIteQHvHuaLYG9IFj6mSxM0fCKrs34IrEQUhOYuGPHc= +github.com/peterh/liner v1.0.1-0.20171122030339-3681c2a91233/go.mod h1:xIteQHvHuaLYG9IFj6mSxM0fCKrs34IrEQUhOYuGPHc= +github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc= +github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs= +github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.3.0/go.mod h1:hJaj2vgQTGQmVCsAACORcieXFeDPbaTKGT+JTgUa3og= +github.com/prometheus/client_golang v1.7.0/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= +github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= +github.com/prometheus/client_golang v1.9.0 h1:Rrch9mh17XcxvEu9D9DEpb4isxjGBtcevQjKvxPRQIU= +github.com/prometheus/client_golang v1.9.0/go.mod h1:FqZLKOZnGdFAhOK4nqGHa7D66IdsO+O441Eve7ptJDU= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.1.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= +github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.7.0/go.mod h1:DjGbpBbp5NYNiECxcL/VnbXCCaQpKd3tt26CguLLsqA= +github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/common v0.15.0 h1:4fgOnadei3EZvgRwxJ7RMpG1k1pOZth5Pc13tyspaKM= +github.com/prometheus/common v0.15.0/go.mod h1:U+gB1OBLb1lF3O42bTCL+FK18tX9Oar16Clt/msog/s= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= +github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.2.0/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.3.0 h1:Uehi/mxLK0eiUc0H0++5tpMGTexB8wZ598MIgU8VpDM= +github.com/prometheus/procfs v0.3.0/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= +github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= +github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= +github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= +github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= +github.com/shiena/ansicolor v0.0.0-20151119151921-a422bbe96644/go.mod h1:nkxAfR/5quYxwPZhyDxgasBMnRtBZd0FCEpawpjMUFg= +github.com/shiena/ansicolor v0.0.0-20200904210342-c7312218db18 h1:DAYUYH5869yV94zvCES9F51oYtN5oGlwjxJJz7ZCnik= +github.com/shiena/ansicolor v0.0.0-20200904210342-c7312218db18/go.mod h1:nkxAfR/5quYxwPZhyDxgasBMnRtBZd0FCEpawpjMUFg= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/siddontang/go v0.0.0-20170517070808-cb568a3e5cc0/go.mod h1:3yhqj7WBBfRhbBlzyOC3gUxftwsU0u8gqevxwIHQpMw= +github.com/siddontang/goredis v0.0.0-20150324035039-760763f78400/go.mod h1:DDcKzU3qCuvj/tPnimWSsZZzvk9qvkvrIL5naVBPh5s= +github.com/siddontang/rdb v0.0.0-20150307021120-fc89ed2e418d/go.mod h1:AMEsy7v5z92TR1JKMkLLoaOQk++LVnOKL3ScbJ8GNGA= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= +github.com/smartwalle/pongo2render v1.0.1/go.mod h1:MGnTzND7nEMz7g194kjlnw8lx/V5JJlb1hr5kDXEO0I= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= +github.com/sony/gobreaker v0.4.1/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= +github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= +github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cobra v0.0.0-20170417170307-b6cb39589372/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= +github.com/spf13/pflag v0.0.0-20170417173400-9e4c21054fa1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/viper v1.7.0/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg= +github.com/ssdb/gossdb v0.0.0-20180723034631-88f6b59b84ec/go.mod h1:QBvMkMya+gXctz3kmljlUCu/yB3GZ6oee+dUozsezQE= +github.com/streadway/amqp v0.0.0-20190404075320-75d898a42a94/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= +github.com/streadway/amqp v0.0.0-20190827072141-edfb9018d271/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= +github.com/streadway/handy v0.0.0-20190108123426-d5acb3125c2a/go.mod h1:qNTQ5P5JnDBl6z3cMAg/SywNDC5ABu5ApDIw6lUbRmI= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= +github.com/syndtr/goleveldb v0.0.0-20160425020131-cfa635847112/go.mod h1:Z4AUp2Km+PwemOoO/VB5AOx9XSsIItzFjoJlOSiYmn0= +github.com/syndtr/goleveldb v0.0.0-20181127023241-353a9fca669c/go.mod h1:Z4AUp2Km+PwemOoO/VB5AOx9XSsIItzFjoJlOSiYmn0= +github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4= +github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/ugorji/go v0.0.0-20171122102828-84cb69a8af83/go.mod h1:hnLbHMwcvSihnDhEfx2/BzKp2xb0Y+ErdfYcrs9tkJQ= +github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= +github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= +github.com/vk496/cron v1.2.0 h1:fDxb4qNi6Rmxh3h9snW1sKJ0nHgjpg3fYc0Oq+igbvk= +github.com/vk496/cron v1.2.0/go.mod h1:f8lpm+SIXbjvujp8Dix4S2B+GGva/q0yrRPQ8hwTtOc= +github.com/wendal/errors v0.0.0-20130201093226-f66c77a7882b/go.mod h1:Q12BUT7DqIlHRmgv3RskH+UCM/4eqVMgI0EMmlSpAXc= +github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c h1:u40Z8hqBAAQyv+vATcGgV0YCnDjqSL7/q/JyPhhJSPk= +github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= +github.com/xdg/stringprep v0.0.0-20180714160509-73f8eece6fdc h1:n+nNi93yXLkJvKwXNP9d55HC7lGK4H/SRcwB5IaUZLo= +github.com/xdg/stringprep v0.0.0-20180714160509-73f8eece6fdc/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= +github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/gopher-lua v0.0.0-20171031051903-609c9cd26973/go.mod h1:aEV29XrmTYFr3CiRxZeGHpkvbwq+prZduBqMaascyCU= +go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= +go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= +go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg= +go.etcd.io/etcd v3.3.25+incompatible/go.mod h1:yaeTdrJi5lOmYerz05bd8+V7KubZs8YSFZfzsF9A6aI= +go.mongodb.org/mongo-driver v1.4.5 h1:TLtO+iD8krabXxvY1F1qpBOHgOxhLWR7XsT7kQeRmMY= +go.mongodb.org/mongo-driver v1.4.5/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc= +go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= +go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.starlark.net v0.0.0-20190702223751-32f345186213/go.mod h1:c1/X6cHgvdXj6pUlmWKMkuqRnW4K8x2vwt6JAaaircg= +go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= +go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= +go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= +go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= +go.uber.org/zap v1.15.0/go.mod h1:Mb2vm2krFEG5DV0W9qcHBYFtp/Wku1cvYaqPsS/WYfc= +golang.org/x/arch v0.0.0-20190927153633-4e8777c89be4/go.mod h1:flIaEI6LNU6xOCD5PaJvn9wGP0agmIOqjrtsKGRguv4= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad h1:DN0cp81fZ3njFcrLCytUHRSUkqBjfTo4Tx9RJTWs0EY= +golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1 h1:Kvvh58BN8Y9/lBi7hTekvtMpm07eUZ0ck5pRHpsMWrY= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210119194325-5f4716e94777 h1:003p0dJM77cxMSyCPFphvZf/Y5/NXf5fzg6ufd1/Oew= +golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45 h1:SVwTIAaPC2U/AvvLNZ2a7OVsmBpC8L5BlwK1whH3hm0= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190412183630-56d357773e84/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a h1:DcqTD9SDLc+1P/r1EmRBwnVsrOwW+kk2vWf9n+1sGhs= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190419153524-e8e3143a4f4a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190531175056-4c3a928424d2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191010194322-b09406accb47/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191220142924-d4481acd189f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201214210602-f9fddec55a1e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4 h1:myAQVi0cGEoqQVR5POX+8RR2mrocKqNN1hmeMqhX27k= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5 h1:i6eZZ+zk0SOf0xgBpEpPD18qWcJda6q1sxt3S0kzyUQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20181221001348-537d06c36207/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190329151228-23e29df326fe/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190416151739-9c9e1878f421/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190420181800-aa740d480789/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190531172133-b3315ee88b7d/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191127201027-ecd32218bd7f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20201211185031-d93e913c1a58/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0 h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1 h1:QzqyMA1tlu6CgqCDUtU9V+ZKhLFT2dkJuANu5QaxI3I= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190530194941-fb225487d101/go.mod h1:z3L6/3dTEVtUr6QSP8miRzeRqwQOioJ9I66odjN4I7s= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.0/go.mod h1:chYK+tFQF0nDUGJgXMSgLCQk3phJEuONr2DCgLDdAQM= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.22.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.25.0 h1:Ejskq+SyPohKW+1uil0JJMtmHCgJPJ/qWTxr8qp+R4c= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b h1:QRR6H1YWRnHb4Y/HeNFCTJLFVxaq6wH4YuVdsUOr75U= +gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o= +gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/mgo.v2 v2.0.0-20180705113604-9856a29383ce/go.mod h1:yeKp02qBN3iKW1OzL3MGk2IdtZzaj7SFntXj72NppTA= +gopkg.in/mgo.v2 v2.0.0-20190816093944-a6b53ec6cb22/go.mod h1:yeKp02qBN3iKW1OzL3MGk2IdtZzaj7SFntXj72NppTA= +gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= +gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.5/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= +sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= +sourcegraph.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU= diff --git a/main.go b/main.go new file mode 100644 index 0000000..49e14ed --- /dev/null +++ b/main.go @@ -0,0 +1,31 @@ +package main + +import ( + "os" + + "cloud.o-forge.io/core/oc-catalog/routers" + "cloud.o-forge.io/core/oc-catalog/services" + + beego "github.com/beego/beego/v2/server/web" +) + +func main() { + + // If we have any parameter, we run the beego directly + if len(os.Args) > 1 { + beego.Run() + } + + routers.Init() + services.Init() + + if beego.BConfig.RunMode == "dev" { + // beego.BConfig.WebConfig.DirectoryIndex = true + beego.BConfig.WebConfig.StaticDir["/swagger"] = "swagger" + } + beego.Run() + + defer func() { + services.MongoDisconnect() + }() +} diff --git a/models/computing.go b/models/computing.go new file mode 100644 index 0000000..5169077 --- /dev/null +++ b/models/computing.go @@ -0,0 +1,164 @@ +package models + +import ( + "cloud.o-forge.io/core/oc-catalog/models/rtype" + "cloud.o-forge.io/core/oc-catalog/services" + "github.com/beego/beego/v2/core/logs" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +type ExecutionRequirementsModel struct { + CPUs uint `json:"cpus" required:"true"` + GPUs uint `json:"gpus" description:"Amount of GPUs needed"` + RAM uint `json:"ram" required:"true" description:"Units in MB"` + + // We should check closely how to deal with storage, since they are independent models + // but also part of a DataCenter + // Storage uint `json:"storage" description:"Units in MB"` + + Parallel bool `json:"parallel"` + ScalingModel uint `json:"scaling_model"` + DiskIO string `json:"disk_io"` +} + +type RepositoryModel struct { + Credentials string `json:"credentials"` + Url string `json:"url"` +} + +type ComputingNEWModel struct { + Description string `json:"description" required:"true"` + Name string `json:"name,omitempty" required:"true" validate:"required" description:"Name of the computing"` + ShortDescription string `json:"short_description" required:"true" validate:"required"` + Logo string `json:"logo" required:"true" validate:"required"` + + Type string `json:"type,omitempty" required:"true"` + Owner string `json:"owner"` + License string `json:"license"` + Price uint `json:"price"` + + ExecutionRequirements ExecutionRequirementsModel `json:"execution_requirements"` + + Dinputs []string + Doutputs []string + + Repository RepositoryModel `json:"repository"` +} + +type ComputingModel struct { + ID string `json:"ID" bson:"_id" required:"true" example:"5099803df3f4948bd2f98391"` + ComputingNEWModel `bson:",inline"` +} + +func (model ComputingModel) getRtype() rtype.Rtype { + return rtype.COMPUTING +} + +func (model ComputingModel) getName() string { + return model.Name +} + +// A user can have multiple workload project with the same model. We must distinguish what is +// the model and what is the user object + +type ComputingObject struct { + ReferenceID primitive.ObjectID `json:"referenceID" description:"Computing model ID"` + + Inputs []string `json:"inputs"` + Outputs []string `json:"outputs"` + + DataCenterID string `json:"datacenterID" description:"Datacenter where the computing will be executed"` +} + +func (obj ComputingObject) getHost() *string { + return nil // Host is DC only attribute +} + +func (obj *ComputingObject) setReference(rID primitive.ObjectID) { + obj.ReferenceID = rID +} + +func (obj ComputingObject) getReference() primitive.ObjectID { + return obj.ReferenceID +} + +func (obj ComputingObject) getRtype() rtype.Rtype { + return rtype.COMPUTING +} + +func (obj ComputingObject) getModel() (ret ResourceModel, err error) { + var ret2 ComputingModel + res := services.MngoCollComputing.FindOne(services.MngoCtx, + primitive.M{"_id": obj.ReferenceID}, + ) + + if err = res.Err(); err != nil { + return + } + + err = res.Decode(&ret2) + return ret2, err +} + +func (obj ComputingObject) getName() (name *string) { + + aa, err := obj.getModel() + + if err != nil { + logs.Warn(err) + return + } + + name2 := aa.getName() + + return &name2 +} + +func (obj ComputingObject) isLinked(rObjID string) LinkingState { + if contains(obj.Inputs, rObjID) { + return INPUT + } + + if contains(obj.Outputs, rObjID) { + return OUTPUT + } + + return NO_LINK +} + +func (obj *ComputingObject) addLink(direction LinkingState, rID string) { + switch direction { + case INPUT: + obj.Inputs = append(obj.Inputs, rID) + case OUTPUT: + obj.Outputs = append(obj.Outputs, rID) + } +} + +func GetOneComputing(ID string) (object *ComputingModel, err error) { + obj, err := getOneResourceByID(ID, rtype.COMPUTING) + + if err != nil { + return object, err + } + + object = obj.(*ComputingModel) + + return object, err +} + +func GetMultipleComputing(IDs []string) (object *[]ComputingModel, err error) { + objArray, err := getMultipleResourceByIDs(IDs, rtype.COMPUTING) + + if err != nil { + return nil, err + } + + object = objArray.(*[]ComputingModel) + + return object, err +} + +func PostOneComputing(obj ComputingNEWModel) (ID string, err error) { + return postOneResource(obj, rtype.COMPUTING) +} diff --git a/models/data.go b/models/data.go new file mode 100644 index 0000000..eb36d25 --- /dev/null +++ b/models/data.go @@ -0,0 +1,125 @@ +package models + +import ( + "cloud.o-forge.io/core/oc-catalog/models/rtype" + "cloud.o-forge.io/core/oc-catalog/services" + "github.com/beego/beego/v2/core/logs" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +// TODO: Define better the different types of Data model with model herarchy +// TODO: review why swagger are not using the metadata when we do herarchy +type DataNEWModel struct { + Name string `json:"name,omitempty" required:"true" validate:"required" description:"Name of the data"` + ShortDescription string `json:"short_description" required:"true" validate:"required"` + Logo string `json:"logo" required:"true" validate:"required"` + Description string `json:"description" required:"true" validate:"required"` + + Type string `json:"type,omitempty" required:"true" validate:"required" description:"Define type of data" example:"file"` + Example string `json:"example" required:"true" validate:"required" description:"base64 encoded data"` + Location string `json:"location" required:"true" validate:"required"` + Dtype string `json:"dtype"` + Protocol []string `json:"protocol"` //TODO Enum type +} + +type DataModel struct { + ID string `json:"ID" bson:"_id" required:"true" validate:"required"` + DataNEWModel `bson:",inline"` +} + +func (obj DataModel) getRtype() rtype.Rtype { + return rtype.DATA +} + +func (model DataModel) getName() string { + return model.Name +} + +type DataIO struct { + Counter uint `description:"Incremental number starting from 0"` +} + +type DataObject struct { + ReferenceID primitive.ObjectID `json:"referenceID" description:"Data model ID"` +} + +func (obj DataObject) getHost() *string { + return nil // Host is DC only attribute +} + +func (obj DataObject) getModel() (ret ResourceModel, err error) { + var ret2 DataModel + res := services.MngoCollData.FindOne(services.MngoCtx, + primitive.M{"_id": obj.ReferenceID}, + ) + + if err = res.Err(); err != nil { + return + } + + err = res.Decode(&ret2) + return ret2, err +} + +func (obj *DataObject) setReference(rID primitive.ObjectID) { + obj.ReferenceID = rID +} + +func (obj DataObject) getReference() primitive.ObjectID { + return obj.ReferenceID +} + +func (obj DataObject) getRtype() rtype.Rtype { + return rtype.DATA +} + +func (obj DataObject) getName() (name *string) { + + res := services.MngoCollData.FindOne(services.MngoCtx, primitive.M{"_id": obj.ReferenceID}) + + if res.Err() != nil { + logs.Error(res) + return + } + + var ret DataModel + res.Decode(&ret) + + return &ret.Name +} + +func (obj DataObject) isLinked(rID string) LinkingState { + return NO_LINK +} + +func (obj *DataObject) addLink(direction LinkingState, rObjID string) { + +} + +func PostOneData(obj DataNEWModel) (string, error) { + return postOneResource(obj, rtype.DATA) +} + +func GetMultipleData(IDs []string) (object *[]DataModel, err error) { + objArray, err := getMultipleResourceByIDs(IDs, rtype.DATA) + + if err != nil { + return nil, err + } + + object = objArray.(*[]DataModel) + + return object, err +} + +func GetOneData(ID string) (object *DataModel, err error) { + obj, err := getOneResourceByID(ID, rtype.DATA) + + if err != nil { + return nil, err + } + + object = obj.(*DataModel) + + return object, err +} diff --git a/models/datacenter.go b/models/datacenter.go new file mode 100644 index 0000000..3a66515 --- /dev/null +++ b/models/datacenter.go @@ -0,0 +1,212 @@ +package models + +import ( + "net" + "time" + + "cloud.o-forge.io/core/oc-catalog/models/rtype" + "cloud.o-forge.io/core/oc-catalog/services" + "github.com/beego/beego/v2/core/logs" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +type DatacenterCpuModel struct { + Cores uint `json:"cores" required:"true"` //TODO: validate + Architecture string `json:"architecture"` //TOOD: enum + Shared bool `json:"shared"` + MinimumMemory uint `json:"minimum_memory"` + Platform string `json:"platform"` +} + +type DatacenterMemoryModel struct { + Size uint `json:"size" description:"Units in MB"` + Ecc bool `json:"ecc"` +} + +type DatacenterGpuModel struct { + CudaCores uint `json:"cuda_cores"` + Model string `json:"model"` + Memory uint `json:"memory" description:"Units in MB"` + TensorCores uint `json:"tensor_cores"` +} + +type DatacenterNEWModel struct { + Name string `json:"name" required:"true"` + Type string `json:"type,omitempty" required:"true"` + Acronym string `json:"acronym" required:"true" description:"id of the DC"` + Hosts []string `json:"hosts" required:"true" description:"list of host:port"` + Description string `json:"description" required:"true"` + ShortDescription string `json:"short_description" required:"true" validate:"required"` + Logo string `json:"logo" required:"true" validate:"required"` + + CPU DatacenterCpuModel `json:"cpu" required:"true"` + RAM DatacenterMemoryModel `json:"ram" required:"true"` + GPU []DatacenterGpuModel `json:"gpu" required:"true"` + + Owner string `json:"owner" ` + BookingPrice int `json:"bookingPrice" ` +} + +type DatacenterModel struct { + ID string `json:"ID" bson:"_id" required:"true"` + DatacenterNEWModel `bson:",inline"` +} + +func GetDatacenterFromAcronym(DC_name string) (retObj *DatacenterModel) { + // TODO: This call should get the data from the peers, since it could be a different + // host in the future + res := services.MngoCollDatacenter.FindOne(services.MngoCtx, primitive.M{"acronym": DC_name}) + + if res.Err() != nil { + logs.Error(res) + return + } + + var ret DatacenterModel + res.Decode(&ret) + + return &ret +} + +func (obj DatacenterModel) GetTotalCPUs() uint { + return obj.CPU.Cores +} + +func (obj DatacenterModel) GetTotalGPUs() uint { + return uint(len(obj.GPU)) +} + +func (obj DatacenterModel) GetTotalRAM() uint { + return obj.RAM.Size +} + +func (obj DatacenterModel) getRtype() rtype.Rtype { + return rtype.DATACENTER +} + +func (model DatacenterModel) getName() string { + return model.Name +} + +type DatacenterObject struct { + ReferenceID primitive.ObjectID `json:"referenceID" description:"Data model ID"` +} + +func (obj *DatacenterObject) setReference(rID primitive.ObjectID) { + obj.ReferenceID = rID +} + +func (obj DatacenterObject) getModel() (ret ResourceModel, err error) { + var ret2 DatacenterModel + res := services.MngoCollDatacenter.FindOne(services.MngoCtx, + primitive.M{"_id": obj.ReferenceID}, + ) + + if err = res.Err(); err != nil { + return + } + + err = res.Decode(&ret2) + return ret2, err +} + +func (obj DatacenterObject) getReference() primitive.ObjectID { + return obj.ReferenceID +} + +// Return a reachable host. If no one is reachable, return the first entry +func (obj DatacenterObject) getHost() (host *string) { + res := services.MngoCollDatacenter.FindOne(services.MngoCtx, primitive.M{"_id": obj.ReferenceID}) + + if res.Err() != nil { + logs.Error(res) + return nil + } + + var ret DatacenterModel + + err := res.Decode(&ret) + if err != nil { + logs.Error(res) + return nil + } + + host = GetHost(ret.Hosts) + return +} + +func GetHost(hosts []string) (host *string) { + // Return the first one if we can't reach any server + host = &hosts[0] + + for _, singleHost := range hosts { + conn, err := net.DialTimeout("tcp", singleHost, time.Duration(3)*time.Second) //FIXME: longer wait for connection in the future? + + if err != nil { + continue + } + + if conn != nil { + //bingo + host = &singleHost + conn.Close() + return + } + } + return +} + +func (obj DatacenterObject) getRtype() rtype.Rtype { + return rtype.DATACENTER +} + +func (obj DatacenterObject) getName() (name *string) { + + res := services.MngoCollDatacenter.FindOne(services.MngoCtx, primitive.M{"_id": obj.ReferenceID}) + + if res.Err() != nil { + logs.Error(res) + return + } + + var ret DatacenterModel + res.Decode(&ret) + + return &ret.Name +} + +func (obj DatacenterObject) isLinked(rID string) LinkingState { + return NO_LINK +} + +func (obj *DatacenterObject) addLink(direction LinkingState, rObjID string) { + +} + +func PostOneDatacenter(obj DatacenterNEWModel) (string, error) { + return postOneResource(obj, rtype.DATACENTER) +} + +func GetMultipleDatacenter(IDs []string) (object *[]DatacenterModel, err error) { + objArray, err := getMultipleResourceByIDs(IDs, rtype.DATACENTER) + + if err != nil { + return nil, err + } + + object = objArray.(*[]DatacenterModel) + + return object, err +} + +func GetOneDatacenter(ID string) (object *DatacenterModel, err error) { + obj, err := getOneResourceByID(ID, rtype.DATACENTER) + + if err != nil { + return object, err + } + + object = obj.(*DatacenterModel) //TODO: fix a possible segfault in this model and the others + + return object, err +} diff --git a/models/generic.go b/models/generic.go new file mode 100644 index 0000000..33088d7 --- /dev/null +++ b/models/generic.go @@ -0,0 +1,118 @@ +package models + +import ( + "errors" + + "cloud.o-forge.io/core/oc-catalog/models/rtype" + "cloud.o-forge.io/core/oc-catalog/services" + "github.com/beego/beego/v2/core/logs" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" + "go.mongodb.org/mongo-driver/mongo/options" +) + +func getObjIDFromString(id string) interface{} { + objectID, err := primitive.ObjectIDFromHex(id) + if err == nil { + return objectID + } + + return id +} + +func getMultipleObjIDFromArray(ids []string) []interface{} { + + var ret []interface{} + + for _, val := range ids { + ret = append(ret, getObjIDFromString(val)) + } + + return ret + +} + +func getOneResourceByID(ID string, rType rtype.Rtype) (obj interface{}, err error) { + + targetDBCollection := rType.MongoCollection() + var retObj interface{} + + // asd := rType. + switch rType { + case rtype.DATA: + retObj = &DataModel{} + case rtype.COMPUTING: + retObj = &ComputingModel{} + case rtype.STORAGE: + retObj = &StorageModel{} + case rtype.DATACENTER: + retObj = &DatacenterModel{} + default: + message := "Rtype " + rType.String() + " is not implemented" + logs.Error(message) + return nil, errors.New(message) + } + + filter := bson.M{"_id": getObjIDFromString(ID)} + + res := targetDBCollection.FindOne(services.MngoCtx, filter) + res.Decode(retObj) + + if res.Err() != nil { + logs.Warn("Couldn't find resource: " + res.Err().Error()) + } + + return retObj, res.Err() +} + +func getMultipleResourceByIDs(IDs []string, rType rtype.Rtype) (interface{}, error) { + + targetDBCollection := rType.MongoCollection() + var retObj interface{} + + // asd := rType. + switch rType { + case rtype.DATA: + retObj = &[]DataModel{} + case rtype.COMPUTING: + retObj = &[]ComputingModel{} + case rtype.STORAGE: + retObj = &[]StorageModel{} + case rtype.DATACENTER: + retObj = &[]DatacenterModel{} + default: + message := "Rtype " + rType.String() + " is not implemented" + logs.Error(message) + return nil, errors.New(message) + } + + filter := bson.M{"_id": bson.M{"$in": getMultipleObjIDFromArray(IDs)}} + + //FIXME: Limit of find + res, err := targetDBCollection.Find(services.MngoCtx, + filter, + options.Find().SetLimit(100), + ) + + if err != nil { + logs.Warn("Couldn't find multiple data: " + err.Error()) + return nil, err + } + res.All(services.MngoCtx, retObj) + + return retObj, res.Err() +} + +func postOneResource(retObj interface{}, rType rtype.Rtype) (ID string, err error) { + + targetDBCollection := rType.MongoCollection() + + result, err := targetDBCollection.InsertOne(services.MngoCtx, retObj) + if err != nil { + logs.Warn("Couldn't insert resource: " + err.Error()) + return "", err + } + + return result.InsertedID.(primitive.ObjectID).Hex(), nil + +} diff --git a/models/mxgraph.go b/models/mxgraph.go new file mode 100644 index 0000000..7f227fc --- /dev/null +++ b/models/mxgraph.go @@ -0,0 +1,35 @@ +package models + +import ( + "encoding/xml" +) + +type MxGraphModel struct { + XMLName xml.Name `xml:"mxGraphModel"` + + Root struct { + XMLName xml.Name `xml:"root"` + MxCell []MxCell `xml:"mxCell"` + } +} + +type MxCell struct { + XMLName xml.Name `xml:"mxCell"` + ID string `xml:"id,attr"` + Parent *string `xml:"parent,attr"` + RID *string `xml:"rID,attr"` + Source *string `xml:"source,attr"` + Target *string `xml:"target,attr"` +} + +type mxissue struct { + msg string +} + +func (m *mxissue) Error() string { + return m.msg +} + +func newMxIssue(message string) error { + return &mxissue{message} +} diff --git a/models/rtype/rtype.go b/models/rtype/rtype.go new file mode 100644 index 0000000..bd934ae --- /dev/null +++ b/models/rtype/rtype.go @@ -0,0 +1,73 @@ +package rtype + +import ( + "cloud.o-forge.io/core/oc-catalog/services" + "github.com/beego/beego/v2/core/logs" + "go.mongodb.org/mongo-driver/mongo" +) + +//http://www.inanzzz.com/index.php/post/wqbs/a-basic-usage-of-int-and-string-enum-types-in-golang + +type Rtype int + +const ( + INVALID Rtype = iota + DATA + COMPUTING + STORAGE + DATACENTER +) + +var extensions = [...]string{ + "INVALID", + "data", + "computing", + "storage", + "datacenter", +} + +func IsValidRtype(input string) bool { + for _, v := range extensions { + if v == input { + return true + } + } + + return false +} + +func NewRtype(rType string) Rtype { + switch rType { + case DATA.String(): + return DATA + case COMPUTING.String(): + return COMPUTING + case STORAGE.String(): + return STORAGE + case DATACENTER.String(): + return DATACENTER + default: + return INVALID + } +} + +func (e Rtype) String() string { + return extensions[e] +} + +func (e Rtype) MongoCollection() *mongo.Collection { + switch e { + case DATA: + return services.MngoCollData + case COMPUTING: + return services.MngoCollComputing + case STORAGE: + return services.MngoCollStorage + case DATACENTER: + return services.MngoCollDatacenter + default: + message := "Rtype " + e.String() + " is not implemented. Returning a nil" + logs.Error(message) + return nil + } +} diff --git a/models/schedule.go b/models/schedule.go new file mode 100644 index 0000000..d157166 --- /dev/null +++ b/models/schedule.go @@ -0,0 +1,322 @@ +package models + +import ( + "errors" + "time" + + "cloud.o-forge.io/core/oc-catalog/services" + "github.com/beego/beego/v2/core/logs" + "github.com/vk496/cron" + "go.mongodb.org/mongo-driver/bson/primitive" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +const MAX_DATES uint = 1000 // Max number of dates to retrieve +const MAX_SCHEDULES int = 200 + +type ScheduleDB struct { + StartDate time.Time + StopDate time.Time + Workflow string + ResourceQty ExecutionRequirementsModel +} + +type ScheduleInfo struct { + Total int + NextExecutions [5]string +} + +type WorkflowSchedule struct { + IsService bool `description:"Service: true, Task: false"` + StartDate time.Time + StopDate time.Time + Cron string `json:"cron"` + Duration uint `json:"duration" description:"Durantion in seconds" example:"7200"` + Events string `json:"events"` + IsBooked bool `json:"isBooked"` +} + +func timeBetween(base, t1, t2 time.Time) bool { + if t2.Before(t1) { + return false + } + + if (t1.Before(base) || t1.Equal(base)) && (t2.After(base) || t2.Equal(base)) { + return true + } else { + return false + } +} + +func CheckSchedule(cronString string, duration uint, cronFirstDate, cronLastDate time.Time, desiredToSchedule ExecutionRequirementsModel) (err error) { + // ######################################## + // TODO: Redesign the whole flow for all possible cases. Meanwhile, use it like this + // ######################################## + // + // Here we have already a timeslot filled by other cron Task. So we must check if there + // is still resources available + + // However, we could have 3 possibilities here: + // + // + // ## Task with start and stop Date in the DB ## + // + // startDate stopDate + // .-----------------------------· + // | | + // ·-----------------------------· + // + // + // New tasks that have conflicts with what we have in the DB + // + // CASE1 (beggining): + // .--------· + // |########W| + // ·--------· + // + // CASE2 (end): + // .--------· + // |########| + // ·--------· + // + // CASE3 (middle): + // .--------· + // |########| + // ·--------· + // + // CASE4 (multiple): + // .-----· .-· .-----· .----------------· + // |#####| |#| |#####| |################| + // ·-----· ·-· ·-----· ·----------------· + // + // + // The first 3 cases are trivial. But in the 4th case, we must get the sum of the resources + // The same could happen in the opposite, where cases are DB entries + + cron, err := cron.Parse(cronString) + if err != nil { + return errors.New("Bad cron message: " + err.Error()) + } + + dcModel := GetDatacenterFromAcronym(services.DC_NAME) + if dcModel == nil { + return errors.New("The DC " + services.DC_NAME + " doesn't have any DC model with that acronym") + } + + if desiredToSchedule.CPUs > dcModel.GetTotalCPUs() { + return errors.New("Requested more CPUs than DC have") + } + if desiredToSchedule.GPUs > dcModel.GetTotalGPUs() { + return errors.New("Requested more GPUs than DC have") + } + if desiredToSchedule.RAM > dcModel.GetTotalRAM() { + return errors.New("Requested more RAM than DC have") + } + + var lastMongoDBdate ScheduleDB + dberr := services.MngoCollSchedule.FindOne(services.MngoCtx, + primitive.M{}, + options.FindOne().SetSort(primitive.D{{"stopdate", -1}}), + ).Decode(&lastMongoDBdate) + + if dberr != nil { + if dberr == mongo.ErrNoDocuments { + // The database is empty. We can book without problems + return + } + return dberr + } + + // for cursor.Next(services.MngoCtx) { + // var item Workspace + // if err = cursor.Decode(&item); err != nil { + // logs.Error(err) + // close(ch) + // } + // } + // var cronScheduleStart, cronScheduleStop time.Time + + // cronScheduleStart = cron.Next(cronFirstDate) // Get the first execution + + for cronScheduleStart := cron.Next(cronFirstDate); !cronScheduleStart.IsZero() && cronScheduleStart.Before(lastMongoDBdate.StopDate); cronScheduleStart = cron.Next(cronScheduleStart) { + cronScheduleStop := cronScheduleStart.Add(time.Second * time.Duration(duration)) + if cronScheduleStop.After(cronLastDate) || cronScheduleStart.Before(cronFirstDate) { + // We skip values that are in the middle of the limits + continue + } + // ########################### + + cursor, err := services.MngoCollSchedule.Find(services.MngoCtx, + primitive.M{"$or": primitive.A{ + primitive.M{"startdate": primitive.M{ + "$gte": cronScheduleStart, + "$lte": cronScheduleStop, + }}, + primitive.M{"stopdate": primitive.M{ + "$gte": cronScheduleStart, + "$lte": cronScheduleStop, + }}, + primitive.M{"$and": primitive.A{ + primitive.M{"startdate": primitive.M{ + "$lte": cronScheduleStart, + }}, + primitive.M{"stopdate": primitive.M{ + "$gte": cronScheduleStop, + }}, + }}, + }}, + // options.Find().SetSort(primitive.D{{"startdate", 1}}), + ) + if err != nil { + return err + } + + var items []ScheduleDB + cursor.All(services.MngoCtx, &items) + + if len(items) == 0 { + // A empty time slot. Available + continue + } + + // There is some workflows booked here. We must check if there is remaining resources + var alreadScheduled ExecutionRequirementsModel + + for _, scheduled := range items { + alreadScheduled.CPUs += scheduled.ResourceQty.CPUs + alreadScheduled.GPUs += scheduled.ResourceQty.GPUs + alreadScheduled.RAM += scheduled.ResourceQty.RAM + } + + if alreadScheduled.CPUs+desiredToSchedule.CPUs > dcModel.GetTotalCPUs() { + return errors.New("Not enough CPU capacity from date " + cronScheduleStart.UTC().String() + " to " + cronScheduleStop.UTC().String()) + } + + if alreadScheduled.GPUs+desiredToSchedule.GPUs > dcModel.GetTotalGPUs() { + return errors.New("Not enough GPU capacity from date " + cronScheduleStart.UTC().String() + " to " + cronScheduleStop.UTC().String()) + } + + if alreadScheduled.RAM+desiredToSchedule.RAM > dcModel.GetTotalRAM() { + return errors.New("Not enough RAM capacity from date " + cronScheduleStart.UTC().String() + " to " + cronScheduleStop.UTC().String()) + } + + } + + return +} + +func CreateScheduleWorkflow(dcName, userID, workflowName, cronString string, duration uint, startDate, stopDate time.Time, requirements ExecutionRequirementsModel) (ret ScheduleInfo, err error) { + + //TODO: Check that dcName is correct + + err = CheckSchedule(cronString, duration, startDate, stopDate, requirements) + if err != nil { + return ret, err + } + + // Already checked possible errors + myCron, _ := cron.Parse(cronString) + + scheduledTimeStart := myCron.Next(time.Now().UTC()) // Get the first execution + + counter := 0 + + var uploadSchedule []interface{} + + for !scheduledTimeStart.IsZero() && counter < MAX_SCHEDULES { + scheduledTimeStop := scheduledTimeStart.Add(time.Second * time.Duration(duration)) + if scheduledTimeStop.After(stopDate) || scheduledTimeStart.Before(startDate) { + // We skip values that are in the middle of the limits + scheduledTimeStart = myCron.Next(scheduledTimeStart) + counter++ + continue + } + + uploadSchedule = append(uploadSchedule, ScheduleDB{ + StartDate: scheduledTimeStart, + StopDate: scheduledTimeStop, + Workflow: dcName + "." + userID + "." + workflowName, + ResourceQty: requirements, //stub + }) + + scheduledTimeStart = myCron.Next(scheduledTimeStart) + counter++ + } + + //FIXME: Consider doing something with the inserting result + _, err = services.MngoCollSchedule.InsertMany(services.MngoCtx, uploadSchedule) + if err != nil { + logs.Error(err) + } + + ret.Total = len(uploadSchedule) + for i := 0; i < 5 && len(uploadSchedule) > i; i++ { + elem := uploadSchedule[i].(ScheduleDB) + ret.NextExecutions[i] = elem.StartDate.String() + } + + return + +} + +func GetFarSchedules(baseDate time.Time, isNext bool) *time.Time { + + operator := "$gt" + if !isNext { + // Previous to this date + operator = "$lt" + } + + var res *ScheduleDB + dberr := services.MngoCollSchedule.FindOne(services.MngoCtx, + primitive.M{"startdate": primitive.M{ + operator: baseDate, + }}, + options.FindOne().SetSort(primitive.D{{"startdate", 1}})).Decode(&res) + + if dberr != nil { + logs.Error(dberr) + return nil + } + + return &res.StartDate +} + +func GetSchedules(startDate, stopDate time.Time) (data []ScheduleDB, maxLimit bool, err error) { + + if startDate.After(stopDate) { + return nil, false, errors.New("stopDate must be after startDate") + } + + // Range of 35 days as max + if startDate.Add(24 * time.Hour * time.Duration(35)).Before(stopDate) { + return nil, false, errors.New("Must be less than 35 days between startDate and stopDate") + } + + //FIXME: Discuss if we should check old schedules + // if startDate.Before(time.Now().UTC()) { + // return nil, false, errors.New("TimeServer is " + time.Now().UTC().String() + " but your startDate is " + startDate.String()) + // } + + firstDateCur, err := services.MngoCollSchedule.Find(services.MngoCtx, + primitive.M{"startdate": primitive.M{ + "$gte": startDate, + "$lte": stopDate, + }}, + options.Find().SetLimit(int64(MAX_DATES)), + ) + + if err != nil { + return + } + + firstDateCur.All(services.MngoCtx, &data) + + if len(data) == int(MAX_DATES) { + maxLimit = true + } + + return + +} diff --git a/models/search.go b/models/search.go new file mode 100644 index 0000000..6e96eab --- /dev/null +++ b/models/search.go @@ -0,0 +1,101 @@ +package models + +import ( + "strings" + + "cloud.o-forge.io/core/oc-catalog/services" + "github.com/beego/beego/v2/core/logs" + + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +type SearchResult struct { + Computing []ComputingModel `json:"computing" required:"true"` + Datacenter []DatacenterModel `json:"datacenter"` + Storage []StorageModel `json:"storage"` + Data []DataModel `json:"data"` +} + +func FindByWord(word string) (object *SearchResult, err error) { + + returnObject := SearchResult{} + opts := options.Find() + opts.SetLimit(100) //FIXME: Decide if limit and how + + var cursor *mongo.Cursor + if strings.TrimSpace(word) == "*" { + word = "" + } else { + word = `(?i).*` + word + `.*` + } + // filter := bson.M{"$text": bson.M{"$search": word}} + + if cursor, err = services.MngoCollComputing.Find( + services.MngoCtx, + bson.M{"$or": []bson.M{ + {"description": bson.M{"$regex": word}}, + {"owner": bson.M{"$regex": word}}, + {"license": bson.M{"$regex": word}}, + }}, + opts, + ); err != nil { + logs.Error(err) + return nil, err + } + if err = cursor.All(services.MngoCtx, &returnObject.Computing); err != nil { + logs.Error(err) + return nil, err + } + + if cursor, err = services.MngoCollDatacenter.Find( + services.MngoCtx, + bson.M{"$or": []bson.M{ + {"name": bson.M{"$regex": word}}, + {"description": bson.M{"$regex": word}}, + {"owner": bson.M{"$regex": word}}, + }}, opts, + ); err != nil { + logs.Error(err) + return nil, err + } + if err = cursor.All(services.MngoCtx, &returnObject.Datacenter); err != nil { + logs.Error(err) + return nil, err + } + + if cursor, err = services.MngoCollStorage.Find( + services.MngoCtx, + bson.M{"$or": []bson.M{ + {"name": bson.M{"$regex": word}}, + {"description": bson.M{"$regex": word}}, + }}, + opts, + ); err != nil { + logs.Error(err) + return nil, err + } + if err = cursor.All(services.MngoCtx, &returnObject.Storage); err != nil { + logs.Error(err) + return nil, err + } + + if cursor, err = services.MngoCollData.Find( + services.MngoCtx, + bson.M{"$or": []bson.M{ + {"description": bson.M{"$regex": word}}, + {"example": bson.M{"$regex": word}}, + }}, + opts, + ); err != nil { + logs.Error(err) + return nil, err + } + if err = cursor.All(services.MngoCtx, &returnObject.Data); err != nil { + logs.Error(err) + return nil, err + } + + return &returnObject, nil +} diff --git a/models/storage.go b/models/storage.go new file mode 100644 index 0000000..1f04624 --- /dev/null +++ b/models/storage.go @@ -0,0 +1,138 @@ +package models + +import ( + "cloud.o-forge.io/core/oc-catalog/models/rtype" + "cloud.o-forge.io/core/oc-catalog/services" + "github.com/beego/beego/v2/core/logs" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +type StorageNEWModel struct { + Name string `json:"name" required:"true"` + Description string `json:"description" required:"true"` + ShortDescription string `json:"short_description" required:"true" validate:"required"` + Logo string `json:"logo" required:"true" validate:"required"` + Type string `json:"type,omitempty" required:"true"` + + DCacronym string `json:"DCacronym" required:"true" description:"Unique ID of the DC where it is the storage"` + + Size uint `json:"size" required:"true"` + Encryption bool `json:"encryption" ` + Redundancy string `json:"redundancy" ` + Throughput string `json:"throughput" ` + BookingPrice uint `json:"bookingPrice" ` +} + +type StorageModel struct { + ID string `json:"ID" bson:"_id" required:"true"` + StorageNEWModel `bson:",inline"` +} + +func (obj StorageModel) getRtype() rtype.Rtype { + return rtype.STORAGE +} + +func (model StorageModel) getName() string { + return model.Name +} + +type StorageObject struct { + ReferenceID primitive.ObjectID `json:"referenceID" description:"Storage model ID"` + + Inputs []string `json:"inputs" ` + Outputs []string `json:"outputs" ` +} + +func (obj StorageObject) getHost() *string { + return nil // Host is DC only attribute +} + +func (obj StorageObject) getModel() (ret ResourceModel, err error) { + var ret2 StorageModel + res := services.MngoCollStorage.FindOne(services.MngoCtx, + primitive.M{"_id": obj.ReferenceID}, + ) + + if err = res.Err(); err != nil { + return + } + + err = res.Decode(&ret2) + return ret2, err +} + +func (obj *StorageObject) setReference(rID primitive.ObjectID) { + obj.ReferenceID = rID +} + +func (obj StorageObject) getReference() primitive.ObjectID { + return obj.ReferenceID +} + +func (obj StorageObject) getRtype() rtype.Rtype { + return rtype.STORAGE +} + +func (obj StorageObject) getName() (name *string) { + + res := services.MngoCollStorage.FindOne(services.MngoCtx, primitive.M{"_id": obj.ReferenceID}) + + if res.Err() != nil { + logs.Error(res) + return + } + + var ret StorageModel + res.Decode(&ret) + + return &ret.Name +} + +func (obj StorageObject) isLinked(rObjID string) LinkingState { + if contains(obj.Inputs, rObjID) { + return INPUT + } + + if contains(obj.Outputs, rObjID) { + return OUTPUT + } + + return NO_LINK +} + +func (obj *StorageObject) addLink(direction LinkingState, rObjID string) { + switch direction { + case INPUT: + obj.Inputs = append(obj.Inputs, rObjID) + case OUTPUT: + obj.Outputs = append(obj.Outputs, rObjID) + } +} + +func PostOneStorage(obj StorageNEWModel) (string, error) { + return postOneResource(obj, rtype.STORAGE) +} + +func GetOneStorage(ID string) (object *StorageModel, err error) { + obj, err := getOneResourceByID(ID, rtype.STORAGE) + + if err != nil { + return object, err + } + + object = obj.(*StorageModel) + + return object, err +} + +func GetMultipleStorage(IDs []string) (object *[]StorageModel, err error) { + objArray, err := getMultipleResourceByIDs(IDs, rtype.STORAGE) + + if err != nil { + return nil, err + } + + object = objArray.(*[]StorageModel) + + return object, err +} diff --git a/models/user.go b/models/user.go new file mode 100644 index 0000000..6d8e595 --- /dev/null +++ b/models/user.go @@ -0,0 +1,12 @@ +package models + +type UserModel struct { + ID string `json:"id,omitempty",bson:"_id"` + Username string `json:"username,omitempty"` + Password string `json:"password,omitempty"` + Email string `json:"email,omitempty"` +} + +func Login(username, password string) bool { + return true +} diff --git a/models/workflow.go b/models/workflow.go new file mode 100644 index 0000000..526ee2b --- /dev/null +++ b/models/workflow.go @@ -0,0 +1,997 @@ +package models + +import ( + "context" + "encoding/xml" + "errors" + "sort" + "time" + + "cloud.o-forge.io/core/oc-catalog/models/rtype" + swagger "cloud.o-forge.io/core/oc-catalog/selfapi" + "cloud.o-forge.io/core/oc-catalog/services" + "github.com/beego/beego/v2/core/logs" + "github.com/vk496/cron" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +type LinkingState uint + +// When we check the schedule of a workflow, we report with this +type DCstatus struct { + DCname string + DCobjID string //FIXME: Probably should be model ID + IsReachable bool + + IsAvailable bool + Booked *ScheduleInfo + + ErrorMessage string +} + +const ( + NO_LINK LinkingState = iota + INPUT + OUTPUT +) + +func boolToLinkingState(boolState bool) LinkingState { + switch boolState { + case true: + return INPUT + case false: + return OUTPUT + default: + return NO_LINK + } +} + +const SchedulesDB = "schedules" + +type Workflow struct { + // The key of the map is the ID of the object itself + Data map[string]DataObject `json:"data"` + Computing map[string]ComputingObject `json:"computing"` + Storage map[string]StorageObject `json:"storage"` + Datacenter map[string]DatacenterObject `json:"datacenter"` //TODO: Decide if there should be multiple objects of a datacenter + + Schedules WorkflowSchedule `json:"schedules"` + + MxgraphXML string `description:"State of the mxgraph"` +} + +type ResourceObject interface { + getHost() *string + getName() *string + getModel() (ResourceModel, error) + getRtype() rtype.Rtype + setReference(rObjID primitive.ObjectID) + getReference() primitive.ObjectID + isLinked(rObjID string) LinkingState + addLink(direction LinkingState, rObjID string) +} + +// Get a sum of all execution requirements attached to a DC obj +func (w Workflow) GetExecutionRequirements(dcIDobj string) (ret ExecutionRequirementsModel, err error) { + + // Find the id of the DC obj + + if _, ok := w.Datacenter[dcIDobj]; !ok { + return ExecutionRequirementsModel{}, errors.New("DC obj" + dcIDobj + " doesn't exist in the Workflow") + } + + // Get all elements that are attached to the DC + + for _, computingObj := range w.Computing { + if computingObj.DataCenterID == dcIDobj { + mymodel, err := computingObj.getModel() + if err != nil { + return ExecutionRequirementsModel{}, err + } + + compModel := mymodel.(ComputingModel) + + //TODO a generic way to concatenate execution requirements + ret.CPUs += compModel.ExecutionRequirements.CPUs + ret.GPUs += compModel.ExecutionRequirements.GPUs + ret.RAM += compModel.ExecutionRequirements.RAM + } + } + + return +} + +func (w *Workflow) GetResource(rObjID *string) (retObj ResourceObject) { + + if rObjID == nil { + return nil + } + + if storVal, ok := w.Data[*rObjID]; ok { + retObj = &storVal + return + } + + if storVal, ok := w.Computing[*rObjID]; ok { + retObj = &storVal + return + } + + if storVal, ok := w.Storage[*rObjID]; ok { + retObj = &storVal + return + } + + if storVal, ok := w.Datacenter[*rObjID]; ok { + retObj = &storVal + return + } + + return nil +} + +func (w *Workflow) GetResourceMapByRtype(rt rtype.Rtype) interface{} { + switch rt { + case rtype.DATA: + return w.Data + case rtype.COMPUTING: + return w.Computing + case rtype.STORAGE: + return w.Storage + case rtype.DATACENTER: + return w.Datacenter + default: + return nil + } +} + +func (w *Workflow) CreateResourceObject(rt rtype.Rtype) ResourceObject { + var res ResourceObject + switch rt { + case rtype.DATA: + res = &DataObject{} + case rtype.COMPUTING: + res = &ComputingObject{} + case rtype.STORAGE: + res = &StorageObject{} + case rtype.DATACENTER: + res = &DatacenterObject{} + default: + res = nil + } + return res +} + +func (w *Workflow) AddObj(robj ResourceObject) *primitive.ObjectID { + outputID := primitive.NewObjectID() + w.UpdateObj(robj, outputID.Hex()) + + return &outputID +} + +func (w *Workflow) UpdateDB(userID, workflowName string) error { + + _, err := services.MngoCollWorkspace.UpdateOne(services.MngoCtx, + primitive.M{"_id": userID}, + primitive.M{"$set": primitive.M{WorkflowDB + "." + workflowName: w}}, + ) + + return err +} + +func (w *Workflow) UpdateObj(robj ResourceObject, objID string) { + switch robj.getRtype() { + case rtype.DATA: + var target DataObject + if w.Data == nil { + //init + w.Data = make(map[string]DataObject) + } + target = *robj.(*DataObject) + w.Data[objID] = target + + case rtype.COMPUTING: + var target ComputingObject + if w.Computing == nil { + //init + w.Computing = make(map[string]ComputingObject) + } + target = *robj.(*ComputingObject) + w.Computing[objID] = target + + case rtype.STORAGE: + var target StorageObject + if w.Storage == nil { + //init + w.Storage = make(map[string]StorageObject) + } + target = *robj.(*StorageObject) + w.Storage[objID] = target + case rtype.DATACENTER: + var target DatacenterObject + if w.Datacenter == nil { + //init + w.Datacenter = make(map[string]DatacenterObject) + } + target = *robj.(*DatacenterObject) + w.Datacenter[objID] = target + } + +} + +func GetWorkflow(userID, workflowName string) (workflow *Workflow, err error) { + + userWorkspace := GetWorkspace(userID) + + if userWorkspace != nil { + if theWorkflow, ok := userWorkspace.Workflows[workflowName]; ok { + return &theWorkflow, nil + } + logs.Debug("No workflow name") + return nil, errors.New("No workflow name") + } + + logs.Debug("No workspace") + return nil, errors.New("No workspace") +} + +func CreateWorkflow(userID, workflowName string) (err error) { + + //TODO: Maybe operate directly in the DB instead retriving the full object? + userWorkspace := GetWorkspace(userID) + + // Exist in the DB + if userWorkspace != nil { + + if _, ok := userWorkspace.Workflows[workflowName]; ok { + message := "Workspace workflow " + workflowName + + " is already created for user " + userID + logs.Debug(message) + return errors.New(message) + } + + userWP := &Workflow{} + + // New element + addElem := primitive.M{WorkflowDB + "." + workflowName: userWP} + + // If user doesn't have workflows, we must init at least one + if userWorkspace.Workflows == nil { + addElem = primitive.M{WorkflowDB: map[string]*Workflow{ + workflowName: userWP, + }} + } + + _, err := services.MngoCollWorkspace.UpdateOne(services.MngoCtx, + primitive.M{"_id": userID}, + primitive.M{"$set": addElem}, + ) + + if err != nil { + message := "Internal error when updating in DB" + logs.Debug(message + "; " + err.Error()) + return errors.New(message) + } + return nil + } + + return errors.New("Can't create a workflow without a workspace") +} + +func CreateObjectInWorkflow(userID, workflowName, rID string) (rObjID2 *string, err error) { + + userWorkspace := GetWorkspace(userID) + + if userWorkspace == nil { + return nil, errors.New("No workspace for user " + userID) + } + + if _, ok := userWorkspace.Workflows[workflowName]; !ok { + return nil, errors.New("Workspace workflow " + workflowName + " doesn't exist for user " + userID) + } + + rIDObj, err := primitive.ObjectIDFromHex(rID) + + if err != nil { + return nil, errors.New("ID " + rID + " is not valid") + } + + //TODO: We are replacing the entire array instead of pushing + // a new element. Probably will have problems with multithread/async + // operations and consistency in the future + + for rtyp, resource := range userWorkspace.GetResources() { + if contains(resource, rID) { + wWorkflow := userWorkspace.Workflows[workflowName] + + newObj := wWorkflow.CreateResourceObject(rtyp) + newObj.setReference(rIDObj) + + outputID := wWorkflow.AddObj(newObj) + + _, err := services.MngoCollWorkspace.UpdateOne(services.MngoCtx, + primitive.M{"_id": userID}, + primitive.M{"$set": primitive.M{WorkflowDB + "." + workflowName + "." + rtyp.String(): wWorkflow.GetResourceMapByRtype(rtyp)}}, + ) + + if err != nil { + return nil, errors.New("Internal error when updating in DB: " + err.Error()) + } + + outStr := outputID.Hex() + + return &outStr, nil + } + } + + return nil, errors.New("rID " + rID + " doesn't exist in the user workspace") +} + +func LinkObjectsInWorkspace(userID, workflowName, rObjIDsource string, isInput bool, rObjIDtarger string) (err error) { + userWorkspace := GetWorkspace(userID) + + if userWorkspace == nil { + return errors.New("No workspace for user " + userID) + } + + if _, ok := userWorkspace.Workflows[workflowName]; !ok { + return errors.New("Workspace workflow " + workflowName + " doesn't exist for user " + userID) + } + + // Check rObjIDsource + if _, ok := userWorkspace.Workflows[workflowName].Data[rObjIDsource]; !ok { + return errors.New("rObjIDsource must be of type DATA for now") + } + + // Check rObjIDtarger + + wWorkflow := userWorkspace.Workflows[workflowName] + + resObjTarget := wWorkflow.GetResource(&rObjIDtarger) + + if resObjTarget == nil { + return errors.New("rObjIDtarger doesn't exist") + } else if resObjTarget.getRtype() == rtype.DATA { + return errors.New("rObjIDtarger of type Data doesn't have inputs/outputs") + } + + if resObjTarget.isLinked(rObjIDsource) != NO_LINK { + return errors.New("rObjIDsource already exists in the inputs or outputs") + } + + resObjTarget.addLink(boolToLinkingState(isInput), rObjIDsource) + + _, err = services.MngoCollWorkspace.UpdateOne(services.MngoCtx, + primitive.M{"_id": userID}, + primitive.M{"$set": primitive.M{ + WorkflowDB + "." + + workflowName + "." + + resObjTarget.getRtype().String() + "." + + rObjIDtarger: resObjTarget}}, + ) + + if err != nil { + return errors.New("Internal error when updating in DB: " + err.Error()) + } + + return nil +} + +func GetWorkflowSchedule(username, workflowName string) (Currentschedule *WorkflowSchedule, err error) { + + userWorkspace := GetWorkspace(username) + + if userWorkspace == nil { + return nil, errors.New("No workspace for user " + username) + } + + if workflow, ok := userWorkspace.Workflows[workflowName]; ok { + Currentschedule = &workflow.Schedules + } + + return +} + +func SetWorkflowSchedule(username, workflowName, cronString, events string, isService bool, startDate, stopDate time.Time, duration uint) (NextSchedules *ScheduleInfo, err error) { + + userWorkspace := GetWorkspace(username) + + if userWorkspace == nil { + return nil, errors.New("No workspace for user " + username) + } + + // Check if workflow exist + if _, ok := userWorkspace.Workflows[workflowName]; !ok { + return + } + + // We mustn't modify a booked schedule + if userWorkspace.Workflows[workflowName].Schedules.IsBooked { + return nil, errors.New("A booked schedule can't be modified") + } + + sch := WorkflowSchedule{} + NextSchedules = &ScheduleInfo{} + + sch.StartDate = startDate + sch.StopDate = stopDate + sch.IsService = isService + + if isService { // Service + NextSchedules.NextExecutions[0] = startDate.String() + NextSchedules.Total = 1 + } + + if !isService { // Task + sch.Cron = cronString + sch.Duration = duration + sch.Events = events + + // Obtain next executions + counter := 0 + myCron, _ := cron.Parse(cronString) // NOTE: already checked in the controller + scheduledStart := myCron.Next(startDate) // Get the first execution starting from startDate + + for !scheduledStart.IsZero() && counter < MAX_SCHEDULES { + scheduleStop := scheduledStart.Add(time.Second * time.Duration(duration)) + if scheduleStop.After(stopDate) || scheduledStart.Before(startDate) { + // If a task is longer than last possible date, we ignore it + scheduledStart = myCron.Next(scheduledStart) + counter++ + continue + } + + if counter < len(NextSchedules.NextExecutions) { + NextSchedules.NextExecutions[counter] = scheduledStart.String() + } + + scheduledStart = myCron.Next(scheduledStart) + counter++ + } + + NextSchedules.Total = counter + + if NextSchedules.Total == 0 { + return nil, errors.New("Current Task configuration will have 0 executions") + } + } + + _, err = services.MngoCollWorkspace.UpdateOne(services.MngoCtx, + primitive.M{"_id": username}, + primitive.M{"$set": primitive.M{ + WorkflowDB + "." + + workflowName + "." + + SchedulesDB: &sch}}, + ) + + if err != nil { + return nil, errors.New("Internal error when updating in DB: " + err.Error()) + } + + return +} + +func GetMxGraph(username, workflowName string) (xmlData *string, err error) { + userWorkspace := GetWorkspace(username) + + if userWorkspace == nil { + return nil, errors.New("No workspace for user " + username) + } + + if _, ok := userWorkspace.Workflows[workflowName]; !ok { + return nil, errors.New("Workspace workflow " + workflowName + " doesn't exist for user " + username) + } + + var data string = userWorkspace.Workflows[workflowName].MxgraphXML + + if data == "" { + xmlData = nil + } else { + xmlData = &data + } + return +} + +func ParseMxGraph(username, workflowName, xmlData string) (err error, mxissues []error) { + + userWorkspace := GetWorkspace(username) + + if userWorkspace == nil { + return errors.New("No workspace for user " + username), nil + } + + currentWorkflow, ok := userWorkspace.Workflows[workflowName] + if !ok { + return errors.New("No workflow " + workflowName), nil + } + + if currentWorkflow.Schedules.IsBooked { + return errors.New("Can't modify a booked workflow"), nil + } + + var xmlModel MxGraphModel + + // logs.Debug(xmlData) + err = xml.Unmarshal([]byte(xmlData), &xmlModel) + if err != nil { + return err, nil + } + + targetWorkspaceWorkflow, err, mxissues := userWorkspace.ConsumeMxGraphModel(xmlModel) + if err != nil { + return err, nil + } + + targetWorkspaceWorkflow.MxgraphXML = xmlData + targetWorkspaceWorkflow.Schedules = currentWorkflow.Schedules //TODO: Probably we should move schudles outside the workflow + + _, err = services.MngoCollWorkspace.UpdateOne(services.MngoCtx, + primitive.M{"_id": username}, + primitive.M{"$set": primitive.M{ + WorkflowDB + "." + + workflowName: targetWorkspaceWorkflow}}, + ) + + if err != nil { + return errors.New("Internal error when updating in DB: " + err.Error()), nil + } + + return nil, mxissues +} + +// FindInSlice takes a slice and looks for an element in it. If found it will +// return it's key, otherwise it will return -1 and a bool of false. +func FindInSlice(slice []string, val string) (int, bool) { + for i, item := range slice { + if item == val { + return i, true + } + } + return -1, false +} + +// At least one element exist in both slices +// Return index1, index2 and if exist +func FindSliceInSlice(slice1 []string, slice2 []string) (int, int, bool) { + for i1, item1 := range slice1 { + for i2, item2 := range slice2 { + if item1 == item2 { + return i1, i2, true + } + } + } + return -1, -1, false +} + +func (w Workspace) ConsumeMxGraphModel(xmlmodel MxGraphModel) (ret *Workflow, err error, issues []error) { + + ret = &Workflow{} + + // When we will iterate over the full array of cells, we first will register the resources + // and after the linkage between them + sort.Slice(xmlmodel.Root.MxCell, func(i, j int) bool { + return xmlmodel.Root.MxCell[i].RID != nil + }) + + for _, cell := range xmlmodel.Root.MxCell { + + switch { + case cell.RID != nil: + // Case of a Resource + rType := w.getRtype(*cell.RID) + + if rType == rtype.INVALID { + return nil, + errors.New("Refering to a rID that is not in the workflow"), + nil + } + + // Generate ObjectID for the reference ID + rIDObj, err := primitive.ObjectIDFromHex(*cell.RID) + if err != nil { + return nil, + errors.New("Bad ID format: " + *cell.RID), + nil + } + + resObj := ret.CreateResourceObject(rType) + resObj.setReference(rIDObj) + + ret.UpdateObj(resObj, cell.ID) + case cell.ID == "0" || cell.ID == "1": + // ID 0 and 1 are special cases of mxeditor + continue + // issues = append(issues, errors.New("MxCell with ID "+cell.ID+" doesn't have a valid link")) + + default: + // Not root nor resource. Should be only links + sourceObj := ret.GetResource(cell.Source) + targetObj := ret.GetResource(cell.Target) + + if sourceObj == nil || targetObj == nil { + if sourceObj == nil && targetObj == nil { + issues = append(issues, errors.New("Arrow "+cell.ID+" is alone")) + } else if sourceObj == nil { + issues = append(issues, errors.New("Arrow ("+cell.ID+") to "+*targetObj.getName()+" without parent")) + } else { + issues = append(issues, errors.New("Arrow "+cell.ID+" from "+*sourceObj.getName()+" without target")) + } + + // If is a invalid link, we can't save it in the DB + continue + } + + if sourceObj.getRtype() == rtype.DATACENTER || targetObj.getRtype() == rtype.DATACENTER { + var datacenter, datacenterLinked *string + + if sourceObj.getRtype() == rtype.DATACENTER { + datacenter = cell.Source + datacenterLinked = cell.Target + } else { + datacenter = cell.Target + datacenterLinked = cell.Source + } + + switch ret.GetResource(datacenterLinked).getRtype() { + case rtype.COMPUTING: + computingObj := ret.GetResource(datacenterLinked).(*ComputingObject) + + // We should always get a ID because we already registered resources and discarded which doesn't correspond to existent models + computingObj.DataCenterID = *datacenter + ret.UpdateObj(computingObj, *datacenterLinked) + } + + } else { + targetObj.addLink(INPUT, *cell.Source) + ret.UpdateObj(targetObj, *cell.Target) // save back + + // If we have a relationship of: + // Source ----> Target + // + // The Source will be in the INPUTs of the Target. + // But we also must make sure that the Target will be in the OUTPUTs of the Source + + sourceObj.addLink(OUTPUT, *cell.Target) + ret.UpdateObj(sourceObj, *cell.Source) + } + + } + } + + dcslist := make(map[string]bool) + dataslist := make(map[string]bool) + // datalist := make(map[string]bool) + + for _, comp := range ret.Computing { + if comp.DataCenterID == "" { + issues = append(issues, errors.New("Computing "+*comp.getName()+" without a Datacenter")) + } else { + // If doesn't exist in the list, means is new element to register as used + dcslist[comp.DataCenterID] = true + + } + + for _, dcin := range comp.Inputs { + switch ret.GetResource(&dcin).getRtype() { + case rtype.DATA: + dataslist[dcin] = true + } + } + + for _, dcout := range comp.Outputs { + switch ret.GetResource(&dcout).getRtype() { + case rtype.DATA: + dataslist[dcout] = true + } + } + + } + + for _, va := range ret.Storage { + if va.Inputs == nil && va.Outputs == nil { + issues = append(issues, errors.New("Storage "+*va.getName()+" without compatible inputs and outputs")) + } + } + + for dcID, va := range ret.Datacenter { + // if rID doesn't exist in the list, it means that it's not used + if _, ok := dcslist[dcID]; !ok { + issues = append(issues, errors.New("DC "+*va.getName()+" not atached to any Computing")) + } + } + + for dcID, va := range ret.Data { + // if rID doesn't exist in the list, it means that it's not used + if _, ok := dataslist[dcID]; !ok { + issues = append(issues, errors.New("Data "+*va.getName()+" not atached to any Computing")) + } + } + + ////////////////////////////////////////////////////////// + // // + // Starting from here, we check the type of resources // + // // + ////////////////////////////////////////////////////////// + + // FIXME: Avoid checking twice the same cases (cycles). Ex: + // + // Comp1 ----> Comp2 + // + // In this case, we will check Comp1 outputs with Comp2 + // inputs AND Comp2 inputs with Comp1 outputs, since we are + // iterating over all existent Computing models in the Graph + + for _, comp := range ret.Computing { + + compModel, err2 := comp.getModel() + if err = err2; err != nil { + return + } + + currentCompModel := compModel.(ComputingModel) + + // Come computings may not allow inputs or outputs + if len(currentCompModel.Dinputs) == 0 && len(comp.Inputs) > 0 { + issues = append(issues, errors.New("Computing "+compModel.getName()+" must not have any input")) + continue + } + + if len(currentCompModel.Doutputs) == 0 && len(comp.Outputs) > 0 { + issues = append(issues, errors.New("Computing "+compModel.getName()+" must not have any output")) + continue + } + + //TODO: We should allow heterogenous inputs? + for _, objIn := range comp.Inputs { + resIn := ret.GetResource(&objIn) + resInType := resIn.getRtype() + switch resInType { + case rtype.DATA: + + dataModel, err2 := resIn.getModel() + if err = err2; err != nil { + return + } + myDataModel := dataModel.(DataModel) + + if _, ok := FindInSlice(currentCompModel.Dinputs, myDataModel.Dtype); !ok { + issues = append(issues, errors.New("Computing "+compModel.getName()+" can't handle inputs of type "+myDataModel.Dtype+" from Data "+dataModel.getName())) + } + + case rtype.COMPUTING: + inCompModel, err2 := resIn.getModel() + if err = err2; err != nil { + return + } + + myInComputingModel := inCompModel.(ComputingModel) + + if _, _, ok := FindSliceInSlice(myInComputingModel.Doutputs, currentCompModel.Dinputs); !ok { + issues = append(issues, errors.New("Computing "+compModel.getName()+" can't handle any input from "+inCompModel.getName())) + } + case rtype.STORAGE: + // Storage can give use anything, so we always accept it's input for now + continue + + default: + issues = append(issues, errors.New("Computing "+currentCompModel.getName()+" can't have any resource of type "+resInType.String()+" (behaviour not defined)")) + } + } + + //TODO: We should allow heterogenous outputs? + for _, objOut := range comp.Outputs { + resOut := ret.GetResource(&objOut) + resOutType := resOut.getRtype() + switch resOutType { + case rtype.COMPUTING: + outCompModel, err2 := resOut.getModel() + if err = err2; err != nil { + return + } + + myOutComputingModel := outCompModel.(ComputingModel) + + if _, _, ok := FindSliceInSlice(currentCompModel.Doutputs, myOutComputingModel.Dinputs); !ok { + issues = append(issues, errors.New("Computing "+compModel.getName()+" doesn't have output data compatible with "+outCompModel.getName())) + } + case rtype.STORAGE: + // Storage can save anything, so we always accept store it for now + continue + + default: + issues = append(issues, errors.New("Computing "+currentCompModel.getName()+" can't have any resource of type "+resOutType.String()+" (behaviour not defined)")) + } + } + + } + + return +} + +func sumExecutionReqs(exeqReq ...ExecutionRequirementsModel) (ret ExecutionRequirementsModel) { + for _, v := range exeqReq { + ret.CPUs += v.CPUs + ret.GPUs += v.GPUs + ret.RAM += v.RAM + } + + return +} + +func CheckAndBookWorkflowSchedule(username, workflowName string, book bool) (myRet []DCstatus, err error) { + + userWorkspace := GetWorkspace(username) + + if userWorkspace == nil { + return nil, errors.New("No workspace for user " + username) + } + + currentWorkflow, ok := userWorkspace.Workflows[workflowName] + if !ok { + return nil, errors.New("No workflow " + workflowName + " for user " + username) + } + + if currentWorkflow.Schedules.IsBooked { + return nil, errors.New("Can't operate DCs for a already booked schedule") + } + + // dd := ¤tWorkflow + + // We can have multiple DCobjs pointing to the same DCmodel. We must sum all req of the same DCmodel + totalDCs := make(map[primitive.ObjectID]ExecutionRequirementsModel) + + for dcIDobj, dcObj := range currentWorkflow.Datacenter { + modelID := dcObj.getReference() + + var totalsModel ExecutionRequirementsModel + totalsModel, err = currentWorkflow.GetExecutionRequirements(dcIDobj) + if err != nil { + return + } + + if _, ok := totalDCs[modelID]; ok { + totalDCs[modelID] = sumExecutionReqs(totalDCs[modelID], totalsModel) + } else { + totalDCs[modelID] = totalsModel + } + } + + myRet = make([]DCstatus, len(totalDCs)) + var i int + i = -1 + for modelID, execReq := range totalDCs { + i++ + var dcModel *DatacenterModel + dcModel, err = GetOneDatacenter(modelID.Hex()) + + if err != nil { + return + } + + myRet[i].DCname = dcModel.Name + myRet[i].DCobjID = modelID.Hex() + + // retrieve the host of the DC + host := GetHost(dcModel.Hosts) + if host == nil { + myRet[i].ErrorMessage = "Datacenter " + myRet[i].DCname + " doesn't have a host property" + continue + } + + cli := services.GetSelfAPI(*host) + data, err := cli.ScheduleApi.ScheduleControllerCheckIfScheduleCanBeCreatedInThisDC(context.Background(), + currentWorkflow.Schedules.Cron, + int32(currentWorkflow.Schedules.Duration), + currentWorkflow.Schedules.StartDate.Format(time.RFC3339), + currentWorkflow.Schedules.StopDate.Format(time.RFC3339), + swagger.ModelsExecutionRequirementsModel{ + Cpus: int32(execReq.CPUs), + Gpus: int32(execReq.GPUs), + Ram: int32(execReq.RAM), + }, + ) + + if err != nil { + myRet[i].ErrorMessage = err.Error() + swErr, ok := err.(swagger.GenericSwaggerError) + if ok { + myRet[i].IsReachable = true + myRet[i].ErrorMessage += ": " + string(swErr.Body()) + } + continue + } + + myRet[i].IsReachable = true + + if data.StatusCode == 200 { + myRet[i].IsAvailable = true + } + + } + + // If we only check, we should exit here + if !book { + return + } + + for _, v := range myRet { + if !v.IsAvailable { + return + } + } + + i = -1 + allBooked := true + for modelID, execReq := range totalDCs { + i++ + // _ = v + var dcModel *DatacenterModel + dcModel, err = GetOneDatacenter(modelID.Hex()) + + if err != nil { + myRet[i].ErrorMessage = err.Error() + continue + } + + cli := services.GetSelfAPI(*GetHost(dcModel.Hosts)) // If we are here, we already check that host exists and is reachable + data, resp, err := cli.ScheduleApi.ScheduleControllerCreateSchedule(context.Background(), + services.DC_NAME, + workflowName, + currentWorkflow.Schedules.Cron, + int32(currentWorkflow.Schedules.Duration), + currentWorkflow.Schedules.StartDate.Format(time.RFC3339), + currentWorkflow.Schedules.StopDate.Format(time.RFC3339), + swagger.ModelsExecutionRequirementsModel{ + Cpus: int32(execReq.CPUs), + Gpus: int32(execReq.GPUs), + Ram: int32(execReq.RAM), + }, + ) + + if err != nil { + allBooked = false + myRet[i].ErrorMessage = err.Error() + swErr, ok := err.(swagger.GenericSwaggerError) + if ok { + myRet[i].IsReachable = true + myRet[i].ErrorMessage += ": " + string(swErr.Body()) + } + continue + } + + if resp.StatusCode == 200 { + //FIXME: Maybe some better way of casting? + + var nextExec [5]string + for counter := 0; counter < 5; counter++ { + nextExec[counter] = data.NextExecutions[counter] + } + + myRet[i].Booked = &ScheduleInfo{ + Total: int(data.Total), + NextExecutions: nextExec, + } + } + + } + + // If some DC fail, we must not mark the workflow as booked + if !allBooked { + return + } + + currentWorkflow.Schedules.IsBooked = true + _, err = services.MngoCollWorkspace.UpdateOne(services.MngoCtx, + primitive.M{"_id": username}, + primitive.M{"$set": primitive.M{ + WorkflowDB + "." + + workflowName + "." + + SchedulesDB: ¤tWorkflow.Schedules}}, + ) + + if err != nil { + logs.Critical("Internal error when updating in DB: " + err.Error()) + } + + return myRet, nil +} diff --git a/models/workspace.go b/models/workspace.go new file mode 100644 index 0000000..c17964b --- /dev/null +++ b/models/workspace.go @@ -0,0 +1,418 @@ +package models + +import ( + "context" + "errors" + + "cloud.o-forge.io/core/oc-catalog/models/rtype" + "cloud.o-forge.io/core/oc-catalog/services" + "github.com/beego/beego/v2/core/logs" + "go.mongodb.org/mongo-driver/bson/primitive" + "go.mongodb.org/mongo-driver/mongo" +) + +// Assure consistency by using a const which refers to the MongoDB entry name +// Workspace.Projects +const WorkflowDB = "workflows" + +type Workspace struct { + UserID string `bson:"_id" json:"user_id"` + + Workflows map[string]Workflow //WorkflowDB + + // ID: rtype + Data []string `json:"data"` + Computing []string `json:"computing"` + Datacenter []string `json:"datacenter"` + Storage []string `json:"storage"` +} + +type ResourceModel interface { + getRtype() rtype.Rtype + getName() string +} + +func (w Workspace) getRtype(rID string) (resModel rtype.Rtype) { + + for _, compVal := range w.Computing { + if compVal == rID { + return rtype.COMPUTING + } + } + + for _, datVal := range w.Data { + if datVal == rID { + return rtype.DATA + } + } + + for _, storVal := range w.Storage { + if storVal == rID { + return rtype.STORAGE + } + } + + for _, datcentVal := range w.Datacenter { + if datcentVal == rID { + return rtype.DATACENTER + } + } + + return rtype.INVALID +} + +func (w *Workspace) GetResources() map[rtype.Rtype][]string { + return map[rtype.Rtype][]string{ + rtype.DATA: w.Data, + rtype.COMPUTING: w.Computing, + rtype.STORAGE: w.Storage, + rtype.DATACENTER: w.Datacenter, + } +} + +func (w *Workspace) GetWorkflow(workflowName string) *Workflow { + + var proj Workflow + + proj = w.Workflows[workflowName] + return &proj +} + +func (w *Workspace) GetWorkflows() []string { + + if len(w.Workflows) == 0 { + return nil + } + + workflowNames := make([]string, len(w.Workflows)) + + i := 0 + for k := range w.Workflows { + workflowNames[i] = k + i++ + } + + return workflowNames +} + +type WorkspaceModel struct { + UserID string `bson:"_id" json:"user_id"` + + Data []DataModel `json:"data"` + Computing []ComputingModel `json:"computing"` + Datacenter []DatacenterModel `json:"datacenter"` + Storage []StorageModel `json:"storage"` +} + +func ListFullWorkspace(userID string) (*WorkspaceModel, error) { + ws := GetWorkspace(userID) + + if ws == nil { + return nil, errors.New("Internal error") + } + + fws := &WorkspaceModel{ + UserID: ws.UserID, + + Data: []DataModel{}, + Computing: []ComputingModel{}, + Datacenter: []DatacenterModel{}, + Storage: []StorageModel{}, + } + + pipeline := []primitive.M{ + {"$match": primitive.M{"_id": userID}}, + {"$lookup": primitive.M{ + "localField": "data", + "from": services.MngoNamesCollection.DATA, + "foreignField": "_id", + "as": "data", + }}, + {"$lookup": primitive.M{ + "localField": "computing", + "from": services.MngoNamesCollection.COMPUTING, + "foreignField": "_id", + "as": "computing", + }}, + {"$lookup": primitive.M{ + "localField": "datacenter", + "from": services.MngoNamesCollection.DATACENTER, + "foreignField": "_id", + "as": "datacenter", + }}, + {"$lookup": primitive.M{ + "localField": "storage", + "from": services.MngoNamesCollection.STORAGE, + "foreignField": "_id", + "as": "storage", + }}, + } + + ret, err := services.MngoCollWorkspace.Aggregate(services.MngoCtx, pipeline) + + if err != nil { + message := "Couldn't obtain subobjects" + logs.Debug(message + "; " + err.Error()) + return nil, errors.New(message) + } + + if ret.RemainingBatchLength() == 1 { + ret.Next(context.Background()) + ret.Decode(&fws) + } + + return fws, nil +} + +// Contains tells whether a contains x. +func contains(a []string, x string) bool { + for _, n := range a { + if x == n { + return true + } + } + return false +} + +func RemoveResource(userID, rID, rType string) error { + + rIDObj, err := primitive.ObjectIDFromHex(rID) + + if err != nil { + message := "ID " + rID + " is not valid" + logs.Debug(message + "; " + err.Error()) + return errors.New(message) + } + + result, err := services.MngoCollWorkspace.UpdateOne(services.MngoCtx, + primitive.M{"_id": userID}, + primitive.M{"$pull": primitive.M{rType: rIDObj}}, + ) + + if err != nil { + message := err.Error() + logs.Debug(message) + return errors.New(message) + } + + if result.MatchedCount == 0 { + message := "No user " + userID + " in workspace" + logs.Debug(message) + return errors.New(message) + } + + if result.ModifiedCount == 0 { + message := "No rID " + rID + " in rtype " + rType + logs.Debug(message) + return errors.New(message) + } + + return nil + +} + +func (w *Workspace) updateDB() (err error) { + + _, err = services.MngoCollWorkspace.ReplaceOne(services.MngoCtx, + primitive.M{"_id": w.UserID}, + w, + ) + + return +} + +func (w *Workspace) NewResource(rID string, rType string) (err error) { + + var targetArray *[]string + + switch rType { + case rtype.DATA.String(): + targetArray = &w.Data + case rtype.COMPUTING.String(): + targetArray = &w.Computing + case rtype.STORAGE.String(): + targetArray = &w.Storage + case rtype.DATACENTER.String(): + targetArray = &w.Datacenter + default: + return errors.New("Rtype " + rType + " is not valid") + } + + for _, models := range *targetArray { + if models == rID { + return errors.New("Resource " + rID + " of type " + rType + + " is already registered for user " + w.UserID) + } + } + + *targetArray = append(*targetArray, rID) + + w.updateDB() + + return +} + +func AddResource(userID, rID, rType string) (err error) { + + var rIDObj *primitive.ObjectID + + if rIDObj, err = IsValidResource(rID, rType); err != nil { + return err + } + + //TODO: Maybe operate directly in the DB instead retriving the full object? + userWorkspace := GetWorkspace(userID) + + // Exist in the DB + if userWorkspace != nil { + var targetArray []string + + switch rType { + case rtype.DATA.String(): + targetArray = userWorkspace.Data + case rtype.COMPUTING.String(): + targetArray = userWorkspace.Computing + case rtype.STORAGE.String(): + targetArray = userWorkspace.Storage + case rtype.DATACENTER.String(): + targetArray = userWorkspace.Datacenter + default: + message := "Rtype " + rType + " is not valid" + logs.Debug(message) + return errors.New(message) + } + + if ok := contains(targetArray, rID); ok { + // Element already registered + message := "Resource " + rID + " of type " + rType + + " is already registered for user " + userID + logs.Debug(message) + return errors.New(message) + } + + // New element + // userWorkspace.ResourceList[rID] = rType + _, err := services.MngoCollWorkspace.UpdateOne(services.MngoCtx, + primitive.M{"_id": userID}, + primitive.M{"$push": primitive.M{rType: rIDObj}}, + ) + + if err != nil { + message := "Internal error when updating in DB" + logs.Debug(message + "; " + err.Error()) + return errors.New(message) + } + + return nil + + } + return errors.New("Internal error") +} + +func rTypeToCollection(rType string) (*mongo.Collection, error) { + switch rType { + case rtype.DATA.String(): + return services.MngoCollData, nil + case rtype.COMPUTING.String(): + return services.MngoCollComputing, nil + case rtype.DATACENTER.String(): + return services.MngoCollDatacenter, nil + case rtype.STORAGE.String(): + return services.MngoCollStorage, nil + } + + message := rType + " is not a valid resource type" + logs.Debug(message) + + return nil, errors.New(message) +} + +func IsValidResource(rID, rType string) (*primitive.ObjectID, error) { + + targetColl, err := rTypeToCollection(rType) + + if err != nil { + return nil, err + } + + rIDObj, err := primitive.ObjectIDFromHex(rID) + + if err != nil { + message := "ID " + rID + " is not valid" + logs.Debug(message + "; " + err.Error()) + return nil, errors.New(message) + } + + result := targetColl.FindOne(services.MngoCtx, primitive.M{"_id": rIDObj}) + + if result.Err() != nil { + message := "ID " + rID + " doesn't exist for resource type " + rType + logs.Debug(message + "; " + result.Err().Error()) + return nil, errors.New(message) + } + + return &rIDObj, nil +} + +func GetAllWorkspaces() <-chan *Workspace { + ch := make(chan *Workspace) + go func() { + cursor, err := services.MngoCollWorkspace.Find(services.MngoCtx, primitive.M{}) + if err != nil { + logs.Error(cursor.Err()) + close(ch) + } + + for cursor.Next(services.MngoCtx) { + var item Workspace + if err = cursor.Decode(&item); err != nil { + logs.Error(err) + close(ch) + } + ch <- &item + } + close(ch) // Remember to close or the loop never ends! + }() + return ch +} + +func (w *Workspace) GetAllWorkspacesProjects() <-chan *Workflow { + ch := make(chan *Workflow) + go func() { + for _, wproj := range w.Workflows { + ch <- &wproj + } + close(ch) + }() + return ch +} + +func GetWorkspace(userID string) (retObj *Workspace) { + + if err := services.MngoCollWorkspace.FindOne(services.MngoCtx, primitive.M{"_id": userID}).Decode(&retObj); err != nil { + logs.Error(err.Error()) + return nil + } + + return +} + +func NewWorkspace(userID string) (*Workspace, error) { + + newWsp := &Workspace{ + UserID: userID, + Data: []string{}, + Computing: []string{}, + Datacenter: []string{}, + Storage: []string{}, + } + + _, err := services.MngoCollWorkspace.InsertOne(services.MngoCtx, newWsp) + if err != nil { + logs.Warning(err.Error()) + return nil, err + } + + return newWsp, nil + +} diff --git a/routers/auth.go b/routers/auth.go new file mode 100644 index 0000000..d08278c --- /dev/null +++ b/routers/auth.go @@ -0,0 +1,118 @@ +package routers + +import ( + "reflect" + "strings" + + "cloud.o-forge.io/core/oc-catalog/controllers" + "cloud.o-forge.io/core/oc-catalog/models" + "github.com/beego/beego/v2/core/logs" + beego "github.com/beego/beego/v2/server/web" + "github.com/beego/beego/v2/server/web/context" + "github.com/beego/beego/v2/server/web/swagger" +) + +func setStatus403(ctx *context.Context) { + ctx.Output.SetStatus(403) + ctx.Output.Body([]byte("")) +} + +// TODO: Force swagger regeneration during startup to ensure consistency +func initAuthMiddleware() { + var FilterAuthTags = func(ctx *context.Context) { + patternMatch := ctx.Input.GetData("RouterPattern").(string) + + patternMatchPath := strings.Replace(patternMatch, rootapi.BasePath, "", 1) + + //Discovery path + if patternMatch == "/" { + return + } + + // First letter Uppercase and the rest lowercase + reqMethod := strings.ToUpper(string(ctx.Request.Method[0])) + strings.ToLower(string(ctx.Request.Method[1:])) + + val := reflect.ValueOf(rootapi.Paths[patternMatchPath]). + Elem().FieldByName(reqMethod). + Elem().Interface().(swagger.Operation) + + // Make sure never omit a security declaration + canaryVar := false + + for _, securityItem := range val.Security { + canaryVar = true + + for securityItemName := range securityItem { + switch t := rootapi.SecurityDefinitions[securityItemName]; t.Type { + case "basic": + user, pass, containBasic := ctx.Request.BasicAuth() + if containBasic { + if models.Login(user, pass) { + //TODO: Decide behaviour with multiple security + return + } + + ctx.Output.SetStatus(403) + ctx.Output.Body([]byte("")) //We must create some kind of output to force beego abort the request + return + + } + + ctx.Output.Header("WWW-Authenticate", `Basic realm="Restricted"`) + ctx.Output.SetStatus(401) + ctx.Output.Body([]byte("")) //We must create some kind of output to force beego abort the request + return + + case "apiKey": + var jwtTokenString string + + switch t.In { + case "header": + jwtTokenString = ctx.Request.Header.Get(t.Name) + case "query": + jwtTokenString = ctx.Request.URL.Query().Get(t.Name) + default: + logs.Warn("BUG: API auth with token of type " + t.In + " not implemented.") + setStatus403(ctx) + return + } + + if jwtTokenString != "" { + // We have a token + + _, err := controllers.IsValidToken(jwtTokenString) + if err != nil { + //Bad token + ctx.Output.SetStatus(401) + ctx.Output.Body([]byte(err.Error())) + return + } + + ctx.Input.SetData("jwtAPIToken", jwtTokenString) + return //Go to Controller + } + + // No token at all + logs.Debug("No access token provided") + setStatus403(ctx) + return + default: + ctx.Output.SetStatus(501) + ctx.Output.Body([]byte("Authentication with " + t.Type + " not implemented")) + + return + } + } + } + + if canaryVar { + // If we are here, that means some Security declaration exist and was skipped. + // Must avoid giving access, but should inform about it + + logs.Critical("Probably a BUG related to authentication process") + setStatus403(ctx) + } + } + + beego.InsertFilter("/*", beego.BeforeExec, FilterAuthTags) +} diff --git a/routers/router.go b/routers/router.go new file mode 100644 index 0000000..3d04e6f --- /dev/null +++ b/routers/router.go @@ -0,0 +1,129 @@ +// @APIVersion 1.0.0 +// @Title oc-catalog API +// @Description Backend of the oc-search project +// @Contact opencloud@irt-saintexupery.com +//// @SecurityDefinition jwtAPIToken apiKey Authorization header "API authentication with JWT tokens" + +package routers + +import ( + "encoding/json" + "io/ioutil" + "os" + "strings" + + "cloud.o-forge.io/core/oc-catalog/controllers" + "cloud.o-forge.io/core/oc-catalog/services" + + "github.com/beego/beego/v2/core/logs" + + bee "github.com/beego/bee/v2/generate/swaggergen" + + "github.com/beego/beego/v2/adapter/swagger" + beego "github.com/beego/beego/v2/server/web" + "github.com/beego/beego/v2/server/web/context" +) + +var rootapi swagger.Swagger + +func Init() { + // Remove old swagger comments + err := os.Remove("routers/commentsRouter_controllers.go") + if err != nil { + logs.Warning("Couldn't remove comments file: " + err.Error()) + } + + ns := beego.NewNamespace("/v1", + beego.NSNamespace("/user", + beego.NSInclude( + &controllers.UserController{}, + ), + ), + beego.NSNamespace("/data", + beego.NSInclude( + &controllers.DataController{}, + ), + ), + beego.NSNamespace("/computing", + beego.NSInclude( + &controllers.ComputingController{}, + ), + ), + beego.NSNamespace("/datacenter", + beego.NSInclude( + &controllers.DatacenterController{}, + ), + ), + beego.NSNamespace("/storage", + beego.NSInclude( + &controllers.StorageController{}, + ), + ), + beego.NSNamespace("/search", + beego.NSInclude( + &controllers.SearchController{}, + ), + ), + beego.NSNamespace("/workspace", + beego.NSInclude( + &controllers.WorkspaceController{}, + ), + ), + beego.NSNamespace("/workflow", + beego.NSInclude( + &controllers.WorkflowController{}, + ), + ), + beego.NSNamespace("/schedule", + beego.NSInclude( + &controllers.ScheduleController{}, + ), + ), + ) + + beego.AddNamespace(ns) + + beego.Get("/", func(ctx *context.Context) { + ctx.Output.Body([]byte(services.DC_NAME)) + }) + + // Force regenerate swagger before consuming the data + bee.GenerateDocs(".") + + // Open our jsonFile + swaggerSchemaPath := "swagger/swagger.json" + swaggerFile, err := os.Open(swaggerSchemaPath) + // if we os.Open returns an error then handle it + if err != nil { + logs.Critical("Error opening %v: %v", swaggerSchemaPath, err) + panic(err) + } + + // defer the closing of our jsonFile so that we can parse it later on + defer swaggerFile.Close() + + byteValue, err := ioutil.ReadAll(swaggerFile) + if err != nil { + logs.Critical("Error reading %v: %v", swaggerSchemaPath, err) + panic(err) + } + + json.Unmarshal(byteValue, &rootapi) + + // To simplify match of Paths, we must adapt swagger path representation to beego router + // For example: /path/{myID} will be /path/:myID + for k, v := range rootapi.Paths { + if strings.ContainsAny(k, "{}") { + newKey := strings.Replace(k, "{", ":", -1) + newKey = strings.Replace(newKey, "}", "", -1) + + rootapi.Paths[newKey] = v + delete(rootapi.Paths, k) + + } + } + + // Init some extra stuff + initAuthMiddleware() + initUglyFixes() +} diff --git a/routers/uglyfix.go b/routers/uglyfix.go new file mode 100644 index 0000000..f55d4e9 --- /dev/null +++ b/routers/uglyfix.go @@ -0,0 +1,42 @@ +package routers + +import ( + "strings" + + "github.com/beego/beego/v2/server/web/filter/cors" + + "github.com/beego/beego/v2/core/logs" + beego "github.com/beego/beego/v2/server/web" + "github.com/beego/beego/v2/server/web/context" +) + +//TODO: This file should never exist. Solve the bugs and avoid using this +// [ id id2 id3 ] +// id,id2,id3 +// Some Beego bugs? that we want to fix +func initUglyFixes() { + var FixParameterArrays = func(ctx *context.Context) { + + for k, v := range ctx.Input.Params() { + if strings.HasPrefix(v, "[") && strings.HasSuffix(v, "]") { + logs.Warn("BUGFIX: Fixing array interpretation. Should not be done like this") + newParam := strings.NewReplacer("[", "", " ", ",", "]", "").Replace(v) + ctx.Input.SetParam(k, newParam) + } + } + + } + beego.InsertFilter("/*/multi/:IDs", beego.BeforeExec, FixParameterArrays, beego.WithReturnOnOutput(true)) + + // FIXME: We should define CORS properly (maybe more permissive in dev mode?) + beego.InsertFilter("*", beego.BeforeStatic, cors.Allow(&cors.Options{ + AllowAllOrigins: true, + AllowMethods: []string{ + "HEAD", + "GET", + "POST", + "PUT", + "DELETE", + }, + })) +} diff --git a/scripts/demo.json b/scripts/demo.json new file mode 100644 index 0000000..e41ecca --- /dev/null +++ b/scripts/demo.json @@ -0,0 +1,322 @@ + [ + { + "api": "/v1/data/", + "content": [ + { + "name": "Mundi Sentienl 3 SRAL Images", + "short_description": "Mundi Sentinels 3 SAR Altiemter image", + "logo": "./local_imgs/Mundi Sentienl 3 SRAL Images.png", + "description": "A very long description of what this data is", + "example": "string", + "ftype": "string", + "location": "string", + "type": "data" + }, + { + "name": "Mundi Sentienl 3 OLCI Images", + "short_description": "Mundi Sentinels 3 Ocean and land color Altiemter image", + "logo": "./local_imgs/Mundi Sentienl 3 OLCI Images.png", + "description": "A very long description of what this data is", + "example": "string", + "ftype": "string", + "location": "string", + "type": "data" + }, + { + "name": "Meteo-France forecasts", + "short_description": "Meteo France weather forecasts", + "logo": "./local_imgs/Meteo-France forecasts.png", + "description": "A very long description of what this data is", + "example": "string", + "ftype": "string", + "location": "string", + "type": "data" + }, + { + "name": "Meteo-France wind archive", + "short_description": "Meteo France wind archive", + "logo": "./local_imgs/Meteo-France wind archive.png", + "description": "A very long description of what this data is", + "example": "string", + "ftype": "string", + "location": "string", + "type": "data" + } + ] + }, + { + "api": "/v1/computing/", + "content": [ + { + "name": "SAR High points", + "short_description": "SAR Altimeter High points extraction Software", + "logo": "./local_imgs/SAR High points.png", + "description": "A very long description of what this data is", + "type": "computing", + "owner": "IRT", + "price": 300, + "license": "GPLv2", + "execution_requirements": { + "cpus": 1, + "ram": 1024, + "storage": 300, + "gpus": 1, + "disk_io": "30 MB/s", + "parallel": true, + "scaling_model": 2 + }, + "inputs": [], + "outputs": [] + }, + { + "name": "Flammable vegetation slicer", + "short_description": "Analyze land cover and define optimum vegetation slices to prevent fire propagation", + "logo": "./local_imgs/Flammable vegetation slicer.png", + "description": "A very long description of what this data is", + "type": "computing", + "execution_requirements": { + "cpus": 3, + "ram": 4096, + "storage": 30000, + "disk_io": "30 MB/s", + "parallel": true, + "scaling_model": 2 + }, + "owner": "Gob.fr", + "price": 330, + "license": "Copyright", + "inputs": [], + "outputs": [] + }, + { + "name": "Long term fire risk mitigation planner", + "short_description": "Long term fire risk mitigation planner : provides list of actions to be performed to mitigate fire propagation", + "logo": "./local_imgs/Long term fire risk mitigation planner.png", + "description": "A very long description of what this data is", + "type": "computing", + "execution_requirements": { + "cpus": 2, + "ram": 1024, + "disk_io": "30 MB/s", + "parallel": false, + "scaling_model": 2 + }, + "owner": "Gob.fr", + "price": 30, + "license": "GPLv3", + "inputs": [], + "outputs": [] + }, + { + "name": "Fire propagation simulator", + "short_description": "Fire propagation simulator", + "logo": "./local_imgs/Fire propagation simulator.png", + "description": "A very long description of what this data is", + "type": "computing", + "execution_requirements": { + "cpus": 4, + "ram": 8192, + "storage": 30000, + "gpus": 1, + "disk_io": "30 MB/s", + "parallel": true, + "scaling_model": 2 + }, + "owner": "Gob.fr", + "price": 39, + "license": "GPLv3", + "inputs": [], + "outputs": [] + }, + { + "name": "Environment builder", + "short_description": "build simulated environment from real environmental data and fire mitigation rules ", + "logo": "./local_imgs/Environment builder.png", + "description": "A very long description of what this data is", + "type": "computing", + "execution_requirements": { + "cpus": 1, + "ram": 2049, + "storage": 500, + "gpus": 4, + "disk_io": "30 MB/s", + "parallel": true, + "scaling_model": 2 + }, + "owner": "Gob.fr", + "price": 39, + "license": "GPLv3", + "inputs": [], + "outputs": [] + } + ] + }, + { + "api": "/v1/storage/", + "content": [ + { + "name": "IRT risk database", + "short_description": "IRT Database instance", + "logo": "./local_imgs/IRT risk database.png", + "description": "A very long description of what this data is", + "type": "database", + "DCacronym": "DC_myDC", + "size": 4000, + "encryption": false, + "redundancy": "RAID5", + "throughput": "r:200,w:150", + "bookingPrice": 60, + "inputs": [], + "outputs": [] + }, + { + "name": "IRT local file storage", + "short_description": "S3 compliant IRT file storage", + "logo": "./local_imgs/IRT local file storage.png", + "description": "A very long description of what this data is", + "type": "storage", + "DCacronym": "DC_myDC", + "size": 40000, + "encryption": false, + "redundancy": "RAID5S", + "throughput": "r:300,w:350", + "bookingPrice": 90, + "inputs": [], + "outputs": [] + } + ] + }, + { + "api": "/v1/datacenter/", + "content": [ + { + "name": "Mundi datacenter", + "acronym": "DC_myDC", + "hosts": [ + "localhost:49618", + "oc-catalog:49618" + ], + "short_description": "Mundi Opencloud Instance", + "logo": "./local_imgs/Mundi datacenter.png", + "description": "A very long description of what this data is", + "type": "datacenter", + "bookingPrice": 650, + "owner": "IRT", + "cpu": { + "cores": 8, + "architecture": "x86", + "shared": false + }, + "ram": { + "size": 16384, + "ecc": false + }, + "gpu": [ + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + } + ] + }, + { + "name": "CNES datacenter", + "acronym": "DC_superDC1", + "hosts": [ + "localhost:49619", + "dc1:49618" + ], + "short_description": "CNES Opencloud Instance", + "logo": "./local_imgs/CNES datacenter.png", + "description": "A very long description of what this data is", + "type": "datacenter", + "bookingPrice": 650, + "owner": "IRT", + "cpu": { + "cores": 32, + "architecture": "x86", + "shared": false + }, + "ram": { + "size": 100000, + "ecc": false + }, + "gpu": [] + }, + { + "name": "Meteo France datacenter", + "acronym": "DC_superDC2", + "hosts": [ + "localhost:49620", + "dc2:49618" + ], + "short_description": "Meteo France Opencloud Instance", + "logo": "./local_imgs/Meteo France datacenter.png", + "description": "A very long description of what this data is", + "type": "datacenter", + "bookingPrice": 650, + "owner": "Meteo France", + "cpu": { + "cores": 16, + "architecture": "x86", + "shared": false + }, + "ram": { + "size": 32786, + "ecc": false + }, + "gpu": [ + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + }, + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + }, + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + }, + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + }, + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + }, + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + }, + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + }, + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + } + ] + } + ] + } + ] \ No newline at end of file diff --git a/scripts/demo_dc2.json b/scripts/demo_dc2.json new file mode 100644 index 0000000..3d3530c --- /dev/null +++ b/scripts/demo_dc2.json @@ -0,0 +1,135 @@ +[ + { + "api": "/v1/datacenter/", + "content": [ + { + "name": "Mundi datacenter", + "acronym": "DC_myDC", + "hosts": [ + "localhost:49618", + "oc-catalog:49618" + ], + "short_description": "Mundi Opencloud Instance", + "logo": "./local_imgs/Mundi datacenter.png", + "description": "A very long description of what this data is", + "type": "datacenter", + "bookingPrice": 650, + "owner": "IRT", + "cpu": { + "cores": 8, + "architecture": "x86", + "shared": false + }, + "ram": { + "size": 16384, + "ecc": false + }, + "gpu": [ + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + } + ] + }, + { + "name": "CNES datacenter", + "acronym": "DC_superDC1", + "hosts": [ + "localhost:49619", + "dc1:49618" + ], + "short_description": "CNES Opencloud Instance", + "logo": "./local_imgs/CNES datacenter.png", + "description": "A very long description of what this data is", + "type": "datacenter", + "bookingPrice": 650, + "owner": "IRT", + "cpu": { + "cores": 32, + "architecture": "x86", + "shared": false + }, + "ram": { + "size": 100000, + "ecc": false + }, + "gpu": [] + }, + { + "name": "Meteo France datacenter", + "acronym": "DC_superDC2", + "hosts": [ + "localhost:49620", + "dc2:49618" + ], + "short_description": "Meteo France Opencloud Instance", + "logo": "./local_imgs/Meteo France datacenter.png", + "description": "A very long description of what this data is", + "type": "datacenter", + "bookingPrice": 650, + "owner": "Meteo France", + "cpu": { + "cores": 16, + "architecture": "x86", + "shared": false + }, + "ram": { + "size": 32786, + "ecc": false + }, + "gpu": [ + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + }, + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + }, + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + }, + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + }, + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + }, + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + }, + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + }, + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + } + ] + } + ] + } +] \ No newline at end of file diff --git a/scripts/demo_dc3.json b/scripts/demo_dc3.json new file mode 100644 index 0000000..3d3530c --- /dev/null +++ b/scripts/demo_dc3.json @@ -0,0 +1,135 @@ +[ + { + "api": "/v1/datacenter/", + "content": [ + { + "name": "Mundi datacenter", + "acronym": "DC_myDC", + "hosts": [ + "localhost:49618", + "oc-catalog:49618" + ], + "short_description": "Mundi Opencloud Instance", + "logo": "./local_imgs/Mundi datacenter.png", + "description": "A very long description of what this data is", + "type": "datacenter", + "bookingPrice": 650, + "owner": "IRT", + "cpu": { + "cores": 8, + "architecture": "x86", + "shared": false + }, + "ram": { + "size": 16384, + "ecc": false + }, + "gpu": [ + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + } + ] + }, + { + "name": "CNES datacenter", + "acronym": "DC_superDC1", + "hosts": [ + "localhost:49619", + "dc1:49618" + ], + "short_description": "CNES Opencloud Instance", + "logo": "./local_imgs/CNES datacenter.png", + "description": "A very long description of what this data is", + "type": "datacenter", + "bookingPrice": 650, + "owner": "IRT", + "cpu": { + "cores": 32, + "architecture": "x86", + "shared": false + }, + "ram": { + "size": 100000, + "ecc": false + }, + "gpu": [] + }, + { + "name": "Meteo France datacenter", + "acronym": "DC_superDC2", + "hosts": [ + "localhost:49620", + "dc2:49618" + ], + "short_description": "Meteo France Opencloud Instance", + "logo": "./local_imgs/Meteo France datacenter.png", + "description": "A very long description of what this data is", + "type": "datacenter", + "bookingPrice": 650, + "owner": "Meteo France", + "cpu": { + "cores": 16, + "architecture": "x86", + "shared": false + }, + "ram": { + "size": 32786, + "ecc": false + }, + "gpu": [ + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + }, + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + }, + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + }, + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + }, + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + }, + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + }, + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + }, + { + "cuda_cores": 10496, + "model": "RTX 3090 FE", + "memory": 24000, + "tensor_cores": 328 + } + ] + } + ] + } +] \ No newline at end of file diff --git a/scripts/generate_selfapi.sh b/scripts/generate_selfapi.sh new file mode 100755 index 0000000..ee1fbae --- /dev/null +++ b/scripts/generate_selfapi.sh @@ -0,0 +1,15 @@ +#!/bin/env bash +set -xe # Don't continue if first command fail + +docker run \ + --rm \ + --user=$UID \ + --network=host \ + -v ${PWD}:/local \ + swaggerapi/swagger-codegen-cli:2.4.18 \ + generate -i /local/swagger/swagger.json -l go -o /local/selfapi_bak + +if [ -d selfapi_bak ]; then + rm -fr selfapi + mv selfapi_bak selfapi +fi \ No newline at end of file diff --git a/scripts/local_imgs/CNES datacenter.png b/scripts/local_imgs/CNES datacenter.png new file mode 100644 index 0000000..fd1ae8e Binary files /dev/null and b/scripts/local_imgs/CNES datacenter.png differ diff --git a/scripts/local_imgs/Environment builder.png b/scripts/local_imgs/Environment builder.png new file mode 100644 index 0000000..1237c87 Binary files /dev/null and b/scripts/local_imgs/Environment builder.png differ diff --git a/scripts/local_imgs/Fire propagation simulator.png b/scripts/local_imgs/Fire propagation simulator.png new file mode 100644 index 0000000..ce612dc Binary files /dev/null and b/scripts/local_imgs/Fire propagation simulator.png differ diff --git a/scripts/local_imgs/Flammable vegetation slicer.png b/scripts/local_imgs/Flammable vegetation slicer.png new file mode 100644 index 0000000..2b4b383 Binary files /dev/null and b/scripts/local_imgs/Flammable vegetation slicer.png differ diff --git a/scripts/local_imgs/IRT local file storage.png b/scripts/local_imgs/IRT local file storage.png new file mode 100644 index 0000000..fd49a1a Binary files /dev/null and b/scripts/local_imgs/IRT local file storage.png differ diff --git a/scripts/local_imgs/IRT risk database.png b/scripts/local_imgs/IRT risk database.png new file mode 100644 index 0000000..ff48cf2 Binary files /dev/null and b/scripts/local_imgs/IRT risk database.png differ diff --git a/scripts/local_imgs/Long term fire risk mitigation planner.png b/scripts/local_imgs/Long term fire risk mitigation planner.png new file mode 100644 index 0000000..ce612dc Binary files /dev/null and b/scripts/local_imgs/Long term fire risk mitigation planner.png differ diff --git a/scripts/local_imgs/Meteo France datacenter.png b/scripts/local_imgs/Meteo France datacenter.png new file mode 100644 index 0000000..4fe39d1 Binary files /dev/null and b/scripts/local_imgs/Meteo France datacenter.png differ diff --git a/scripts/local_imgs/Meteo-France forecasts.png b/scripts/local_imgs/Meteo-France forecasts.png new file mode 100644 index 0000000..aba85e5 Binary files /dev/null and b/scripts/local_imgs/Meteo-France forecasts.png differ diff --git a/scripts/local_imgs/Meteo-France wind archive.png b/scripts/local_imgs/Meteo-France wind archive.png new file mode 100644 index 0000000..20069d8 Binary files /dev/null and b/scripts/local_imgs/Meteo-France wind archive.png differ diff --git a/scripts/local_imgs/Mundi Sentienl 3 OLCI Images.png b/scripts/local_imgs/Mundi Sentienl 3 OLCI Images.png new file mode 100644 index 0000000..dd2977c Binary files /dev/null and b/scripts/local_imgs/Mundi Sentienl 3 OLCI Images.png differ diff --git a/scripts/local_imgs/Mundi Sentienl 3 SRAL Images.png b/scripts/local_imgs/Mundi Sentienl 3 SRAL Images.png new file mode 100644 index 0000000..d54a3ea Binary files /dev/null and b/scripts/local_imgs/Mundi Sentienl 3 SRAL Images.png differ diff --git a/scripts/local_imgs/Mundi datacenter.png b/scripts/local_imgs/Mundi datacenter.png new file mode 100644 index 0000000..e67753e Binary files /dev/null and b/scripts/local_imgs/Mundi datacenter.png differ diff --git a/scripts/local_imgs/SAR High points.png b/scripts/local_imgs/SAR High points.png new file mode 100644 index 0000000..16019b3 Binary files /dev/null and b/scripts/local_imgs/SAR High points.png differ diff --git a/scripts/populate_models.sh b/scripts/populate_models.sh new file mode 100755 index 0000000..9bed980 --- /dev/null +++ b/scripts/populate_models.sh @@ -0,0 +1,62 @@ +#!/usr/bin/env bash + + +# Must specify a JSON with a following structure: + +# [ +# { +# "api": "endpoint", +# "content": [ +# {} //obj to populate +# ] +# }, +# ] + +ENDPOINT="http://localhost:49618" +if [[ $DOCKER_ENVIRONMENT ]]; then + ENDPOINT="http://oc-catalog:49618" +fi + +if [[ $DOCKER_ENDPOINT ]]; then + ENDPOINT="$DOCKER_ENDPOINT" +fi + +[[ -z $1 ]] && { echo "Must specify a json path"; exit 1; } +[[ ! -f $1 ]] && { echo "$1 is not a file"; exit 1; } +cat "$1" | jq empty || { echo "$1 is not a valid JSON"; exit 1; } + +######## + +while read row; do + TRGT_ENDPOINT=$(echo $row | jq -r '.api') + + while read item; do + + img_data=$(echo $item | jq -r '.logo') + # If is a path, replace with base64 encoded image + if [[ $img_data =~ ^\./.* ]]; then + if [[ ! -f ./scripts/"$img_data" ]]; then + echo "File ./scripts/$img_data doesn't exist"; exit 1 + fi + + item=$(echo $item | jq ".logo |= \"$(base64 -w0 ./scripts/"$img_data")\"") + fi + + echo `echo $item | jq -r '.name'` to $ENDPOINT${TRGT_ENDPOINT} + + answer=$(curl --fail "$ENDPOINT${TRGT_ENDPOINT}" \ + -X POST \ + --data-binary "$item") + + if [[ $? -ne 0 || "$answer" == *""* ]]; then + echo -e "\nERROR: Some error ocurred when submitting" + exit 1 + fi + done < <(echo "$row" | jq -c '.content[]') +done < <(jq -c '.[]' $1) + + +echo +echo +echo +echo "All models submitted correctly!" \ No newline at end of file diff --git a/selfapi/.gitignore b/selfapi/.gitignore new file mode 100644 index 0000000..daf913b --- /dev/null +++ b/selfapi/.gitignore @@ -0,0 +1,24 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test +*.prof diff --git a/selfapi/.swagger-codegen-ignore b/selfapi/.swagger-codegen-ignore new file mode 100644 index 0000000..c5fa491 --- /dev/null +++ b/selfapi/.swagger-codegen-ignore @@ -0,0 +1,23 @@ +# Swagger Codegen Ignore +# Generated by swagger-codegen https://github.com/swagger-api/swagger-codegen + +# Use this file to prevent files from being overwritten by the generator. +# The patterns follow closely to .gitignore or .dockerignore. + +# As an example, the C# client generator defines ApiClient.cs. +# You can make changes and tell Swagger Codgen to ignore just this file by uncommenting the following line: +#ApiClient.cs + +# You can match any string of characters against a directory, file or extension with a single asterisk (*): +#foo/*/qux +# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux + +# You can recursively match patterns against a directory, file or extension with a double asterisk (**): +#foo/**/qux +# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux + +# You can also negate patterns with an exclamation (!). +# For example, you can ignore all files in a docs folder with the file extension .md: +#docs/*.md +# Then explicitly reverse the ignore rule for a single file: +#!docs/README.md diff --git a/selfapi/.swagger-codegen/VERSION b/selfapi/.swagger-codegen/VERSION new file mode 100644 index 0000000..6381ae0 --- /dev/null +++ b/selfapi/.swagger-codegen/VERSION @@ -0,0 +1 @@ +2.4.18 \ No newline at end of file diff --git a/selfapi/.travis.yml b/selfapi/.travis.yml new file mode 100644 index 0000000..f5cb2ce --- /dev/null +++ b/selfapi/.travis.yml @@ -0,0 +1,8 @@ +language: go + +install: + - go get -d -v . + +script: + - go build -v ./ + diff --git a/selfapi/README.md b/selfapi/README.md new file mode 100644 index 0000000..f7b9284 --- /dev/null +++ b/selfapi/README.md @@ -0,0 +1,100 @@ +# Go API client for swagger + +Backend of the oc-search project + +## Overview +This API client was generated by the [swagger-codegen](https://github.com/swagger-api/swagger-codegen) project. By using the [swagger-spec](https://github.com/swagger-api/swagger-spec) from a remote server, you can easily generate an API client. + +- API version: 1.0.0 +- Package version: 1.0.0 +- Build package: io.swagger.codegen.languages.GoClientCodegen + +## Installation +Put the package under your project folder and add the following in import: +```golang +import "./swagger" +``` + +## Documentation for API Endpoints + +All URIs are relative to *https://localhost:49618/v1* + +Class | Method | HTTP request | Description +------------ | ------------- | ------------- | ------------- +*ComputingApi* | [**ComputingControllerAddComputing**](docs/ComputingApi.md#computingcontrolleraddcomputing) | **Post** /computing/ | +*ComputingApi* | [**ComputingControllerGetComputingByID**](docs/ComputingApi.md#computingcontrollergetcomputingbyid) | **Get** /computing/{ID} | +*ComputingApi* | [**ComputingControllerGetMultipleComputingByIDs**](docs/ComputingApi.md#computingcontrollergetmultiplecomputingbyids) | **Get** /computing/multi/{IDs} | +*DataApi* | [**DataControllerCreateData**](docs/DataApi.md#datacontrollercreatedata) | **Post** /data/ | +*DataApi* | [**DataControllerGetDataByID**](docs/DataApi.md#datacontrollergetdatabyid) | **Get** /data/{ID} | +*DataApi* | [**DataControllerGetMultipleDataByIDs**](docs/DataApi.md#datacontrollergetmultipledatabyids) | **Get** /data/multi/{IDs} | +*DatacenterApi* | [**DatacenterControllerCreateDatacenter**](docs/DatacenterApi.md#datacentercontrollercreatedatacenter) | **Post** /datacenter/ | +*DatacenterApi* | [**DatacenterControllerGetMultipleDatacentersByIDs**](docs/DatacenterApi.md#datacentercontrollergetmultipledatacentersbyids) | **Get** /datacenter/multi/{IDs} | +*DatacenterApi* | [**DatacenterControllerGetOneDatacenter**](docs/DatacenterApi.md#datacentercontrollergetonedatacenter) | **Get** /datacenter/{ID} | +*ScheduleApi* | [**ScheduleControllerCheckIfScheduleCanBeCreatedInThisDC**](docs/ScheduleApi.md#schedulecontrollercheckifschedulecanbecreatedinthisdc) | **Post** /schedule/check | +*ScheduleApi* | [**ScheduleControllerCreateSchedule**](docs/ScheduleApi.md#schedulecontrollercreateschedule) | **Post** /schedule/book | +*ScheduleApi* | [**ScheduleControllerGetNextSchedule**](docs/ScheduleApi.md#schedulecontrollergetnextschedule) | **Get** /schedule/next | +*ScheduleApi* | [**ScheduleControllerGetPreviousSchedule**](docs/ScheduleApi.md#schedulecontrollergetpreviousschedule) | **Get** /schedule/previous | +*ScheduleApi* | [**ScheduleControllerGetSchedules**](docs/ScheduleApi.md#schedulecontrollergetschedules) | **Get** /schedule/ | +*SearchApi* | [**SearchControllerSearchByWord**](docs/SearchApi.md#searchcontrollersearchbyword) | **Get** /search/byWord | +*StorageApi* | [**StorageControllerCreateStorage**](docs/StorageApi.md#storagecontrollercreatestorage) | **Post** /storage/ | +*StorageApi* | [**StorageControllerGet**](docs/StorageApi.md#storagecontrollerget) | **Get** /storage/{ID} | +*StorageApi* | [**StorageControllerGetMultipleStoragesByIDs**](docs/StorageApi.md#storagecontrollergetmultiplestoragesbyids) | **Get** /storage/multi/{IDs} | +*UserApi* | [**UserControllerLogin**](docs/UserApi.md#usercontrollerlogin) | **Get** /user/login | +*UserApi* | [**UserControllerLogout**](docs/UserApi.md#usercontrollerlogout) | **Get** /user/logout | +*WorkflowApi* | [**WorkflowControllerAddNewObjectToAWorkflow**](docs/WorkflowApi.md#workflowcontrolleraddnewobjecttoaworkflow) | **Post** /workflow/{workflowName}/add | +*WorkflowApi* | [**WorkflowControllerBookSchedule**](docs/WorkflowApi.md#workflowcontrollerbookschedule) | **Post** /workflow/{workflowName}/schedule/book | +*WorkflowApi* | [**WorkflowControllerCheckSchedule**](docs/WorkflowApi.md#workflowcontrollercheckschedule) | **Get** /workflow/{workflowName}/schedule/check | +*WorkflowApi* | [**WorkflowControllerCreateANewWorkflow**](docs/WorkflowApi.md#workflowcontrollercreateanewworkflow) | **Post** /workflow/ | +*WorkflowApi* | [**WorkflowControllerCreateARealtionshipBetweenTwoRobjects**](docs/WorkflowApi.md#workflowcontrollercreatearealtionshipbetweentworobjects) | **Post** /workflow/{workflowName}/link | +*WorkflowApi* | [**WorkflowControllerGetMxGraphLastStatus**](docs/WorkflowApi.md#workflowcontrollergetmxgraphlaststatus) | **Get** /workflow/{workflowName}/mxGraphParser | +*WorkflowApi* | [**WorkflowControllerGetSchedule**](docs/WorkflowApi.md#workflowcontrollergetschedule) | **Get** /workflow/{workflowName}/schedule | +*WorkflowApi* | [**WorkflowControllerGetWorkflow**](docs/WorkflowApi.md#workflowcontrollergetworkflow) | **Get** /workflow/{workflowName} | +*WorkflowApi* | [**WorkflowControllerListWorkflows**](docs/WorkflowApi.md#workflowcontrollerlistworkflows) | **Get** /workflow/ | +*WorkflowApi* | [**WorkflowControllerParseMxGraph**](docs/WorkflowApi.md#workflowcontrollerparsemxgraph) | **Post** /workflow/{workflowName}/mxGraphParser | +*WorkflowApi* | [**WorkflowControllerSetSchedule**](docs/WorkflowApi.md#workflowcontrollersetschedule) | **Put** /workflow/{workflowName}/schedule | +*WorkspaceApi* | [**WorkspaceControllerAddModelToWorkspace**](docs/WorkspaceApi.md#workspacecontrolleraddmodeltoworkspace) | **Post** /workspace/ | +*WorkspaceApi* | [**WorkspaceControllerDeleteElementFromUserWorkspace**](docs/WorkspaceApi.md#workspacecontrollerdeleteelementfromuserworkspace) | **Delete** /workspace/ | +*WorkspaceApi* | [**WorkspaceControllerGetFullWorkspace**](docs/WorkspaceApi.md#workspacecontrollergetfullworkspace) | **Get** /workspace/list_model | +*WorkspaceApi* | [**WorkspaceControllerGetWorkspace**](docs/WorkspaceApi.md#workspacecontrollergetworkspace) | **Get** /workspace/list | + + +## Documentation For Models + + - [ModelsComputingModel](docs/ModelsComputingModel.md) + - [ModelsComputingNewModel](docs/ModelsComputingNewModel.md) + - [ModelsComputingObject](docs/ModelsComputingObject.md) + - [ModelsDCstatus](docs/ModelsDCstatus.md) + - [ModelsDataModel](docs/ModelsDataModel.md) + - [ModelsDataNewModel](docs/ModelsDataNewModel.md) + - [ModelsDataObject](docs/ModelsDataObject.md) + - [ModelsDatacenterCpuModel](docs/ModelsDatacenterCpuModel.md) + - [ModelsDatacenterGpuModel](docs/ModelsDatacenterGpuModel.md) + - [ModelsDatacenterMemoryModel](docs/ModelsDatacenterMemoryModel.md) + - [ModelsDatacenterModel](docs/ModelsDatacenterModel.md) + - [ModelsDatacenterNewModel](docs/ModelsDatacenterNewModel.md) + - [ModelsDatacenterObject](docs/ModelsDatacenterObject.md) + - [ModelsExecutionRequirementsModel](docs/ModelsExecutionRequirementsModel.md) + - [ModelsRepositoryModel](docs/ModelsRepositoryModel.md) + - [ModelsScheduleDb](docs/ModelsScheduleDb.md) + - [ModelsScheduleInfo](docs/ModelsScheduleInfo.md) + - [ModelsScheduleTime](docs/ModelsScheduleTime.md) + - [ModelsSearchResult](docs/ModelsSearchResult.md) + - [ModelsStorageModel](docs/ModelsStorageModel.md) + - [ModelsStorageNewModel](docs/ModelsStorageNewModel.md) + - [ModelsStorageObject](docs/ModelsStorageObject.md) + - [ModelsWorkflow](docs/ModelsWorkflow.md) + - [ModelsWorkflowSchedule](docs/ModelsWorkflowSchedule.md) + - [ModelsWorkspace](docs/ModelsWorkspace.md) + - [ModelsWorkspaceModel](docs/ModelsWorkspaceModel.md) + - [PrimitiveObjectId](docs/PrimitiveObjectId.md) + - [TimeTime](docs/TimeTime.md) + + +## Documentation For Authorization + Endpoints do not require authorization. + + +## Author + +opencloud@irt-saintexupery.com + diff --git a/selfapi/api/swagger.yaml b/selfapi/api/swagger.yaml new file mode 100644 index 0000000..17dbfb1 --- /dev/null +++ b/selfapi/api/swagger.yaml @@ -0,0 +1,1996 @@ +--- +swagger: "2.0" +info: + description: "Backend of the oc-search project" + version: "1.0.0" + title: "oc-catalog API" + contact: + email: "opencloud@irt-saintexupery.com" +host: "localhost:49618" +basePath: "/v1" +tags: +- name: "data" + description: "All operations related to the rType data\n" +- name: "computing" + description: "All operations related to the rType computing\n" +- name: "datacenter" + description: "DatacenterController operations about datacenters\n" +- name: "storage" + description: "StorageController operations about storage\n" +paths: + /computing/: + post: + tags: + - "computing" + description: "Submit a computing object" + operationId: "ComputingController.Add computing" + parameters: + - in: "body" + name: "body" + description: "The object content" + required: true + schema: + $ref: "#/definitions/models.ComputingNEWModel" + x-exportParamName: "Body" + responses: + "200": + description: "{string} ID" + "403": + description: "Missing body or fields" + /computing/multi/{IDs}: + get: + tags: + - "computing" + description: "Return Computing objects if found in the DB. Not found IDs will\ + \ be ignored" + operationId: "ComputingController.Get multiple computing by IDs" + parameters: + - name: "IDs" + in: "path" + description: "List of computing IDs" + required: true + type: "array" + items: + type: "string" + x-exportParamName: "IDs" + responses: + "200": + description: "" + schema: + type: "array" + items: + $ref: "#/definitions/models.ComputingModel" + "403": + description: "IDs are empty" + /computing/{ID}: + get: + tags: + - "computing" + description: "Find a computing resource based on ID" + operationId: "ComputingController.Get computing by ID" + parameters: + - name: "ID" + in: "path" + description: "The ID of the resource" + required: true + type: "string" + x-exportParamName: "ID" + responses: + "200": + description: "" + schema: + $ref: "#/definitions/models.ComputingModel" + "403": + description: "ID is empty" + /data/: + post: + tags: + - "data" + description: "Submit data object" + operationId: "DataController.Create Data" + parameters: + - in: "body" + name: "body" + description: "The object content" + required: true + schema: + $ref: "#/definitions/models.DataNEWModel" + x-exportParamName: "Body" + responses: + "200": + description: "{string} ID" + "403": + description: "Missing body or fields" + /data/multi/{IDs}: + get: + tags: + - "data" + description: "Return Data object if found in the DB. Not found IDs will be ignored" + operationId: "DataController.Get multiple data by IDs" + parameters: + - name: "IDs" + in: "path" + description: "List of data IDs" + required: true + type: "array" + items: + type: "string" + x-exportParamName: "IDs" + responses: + "200": + description: "" + schema: + type: "array" + items: + $ref: "#/definitions/models.DataModel" + "403": + description: "IDs are empty" + /data/{ID}: + get: + tags: + - "data" + description: "Find rType data based on ID" + operationId: "DataController.Get data by ID" + parameters: + - name: "ID" + in: "path" + description: "The ID of the data resource" + required: true + type: "string" + x-exportParamName: "ID" + responses: + "200": + description: "" + schema: + $ref: "#/definitions/models.DataModel" + "403": + description: "ID is empty" + /datacenter/: + post: + tags: + - "datacenter" + description: "submit Datacenter object" + operationId: "DatacenterController.Create Datacenter" + parameters: + - in: "body" + name: "body" + description: "The object content" + required: true + schema: + $ref: "#/definitions/models.DatacenterNEWModel" + x-exportParamName: "Body" + responses: + "200": + description: "{string} models.DatacenterModel" + "403": + description: "Missing body or fields" + /datacenter/multi/{IDs}: + get: + tags: + - "datacenter" + description: "Return Datacenter objects if found in the DB. Not found IDs will\ + \ be ignored" + operationId: "DatacenterController.Get multiple datacenters by IDs" + parameters: + - name: "IDs" + in: "path" + description: "List of datacenter IDs" + required: true + type: "array" + items: + type: "string" + x-exportParamName: "IDs" + responses: + "200": + description: "" + schema: + type: "array" + items: + $ref: "#/definitions/models.ComputingModel" + "403": + description: "IDs are empty" + /datacenter/{ID}: + get: + tags: + - "datacenter" + description: "find datacenter by ID" + operationId: "DatacenterController.GetOneDatacenter" + parameters: + - name: "ID" + in: "path" + description: "the ID you want to get" + required: true + type: "string" + x-exportParamName: "ID" + responses: + "200": + description: "" + schema: + $ref: "#/definitions/models.DatacenterModel" + "403": + description: "ID is empty" + /schedule/: + get: + tags: + - "schedule" + description: "Get a list of next startDates schedules (inclusive). If timezone\ + \ is not specified, will assume UTC" + operationId: "ScheduleController.Get schedules" + parameters: + - name: "startDate" + in: "query" + description: "Start date" + required: true + x-exportParamName: "StartDate" + - name: "stopDate" + in: "query" + description: "End date" + required: true + x-exportParamName: "StopDate" + responses: + "200": + description: "" + schema: + type: "array" + items: + $ref: "#/definitions/models.ScheduleDB" + "201": + description: "Too much elements within the range of dates" + "400": + description: "Other error. Check the output" + "403": + description: "Authentication issue" + /schedule/book: + post: + tags: + - "schedule" + description: "Create schedule for a workflow. It will return some future executions\ + \ just as information" + operationId: "ScheduleController.Create schedule" + parameters: + - name: "dcName" + in: "query" + description: "Name of the node (oc-catalog) from where the workflow comes." + required: true + type: "string" + x-exportParamName: "DcName" + - name: "workflowName" + in: "query" + description: "Workflow Name" + required: true + type: "string" + x-exportParamName: "WorkflowName" + - name: "cron" + in: "query" + description: "Cron syntax with year. If no year is specified, will use the\ + \ current" + required: true + type: "string" + x-exportParamName: "Cron" + - name: "duration" + in: "query" + description: "Duration in seconds" + required: true + type: "integer" + format: "int32" + x-exportParamName: "Duration" + - name: "startDate" + in: "query" + description: "RFC3339 time for startDate" + required: true + x-exportParamName: "StartDate" + - name: "stopDate" + in: "query" + description: "RFC3339 time for stopDate" + required: true + x-exportParamName: "StopDate" + - in: "body" + name: "requirements" + description: "The object content" + required: true + schema: + $ref: "#/definitions/models.ExecutionRequirementsModel" + x-exportParamName: "Requirements" + responses: + "200": + description: "" + schema: + $ref: "#/definitions/models.ScheduleInfo" + "400": + description: "workflowName not found or empty" + "403": + description: "Authentication issue" + /schedule/check: + post: + tags: + - "schedule" + description: "Check for availability of this DC" + operationId: "ScheduleController.Check if schedule can be created in this DC" + parameters: + - name: "cron" + in: "query" + description: "Cron syntax" + required: true + type: "string" + x-exportParamName: "Cron" + - name: "duration" + in: "query" + description: "Duration in seconds" + required: true + type: "integer" + format: "int32" + x-exportParamName: "Duration" + - name: "startDate" + in: "query" + description: "RFC3339 time for startDate" + required: true + x-exportParamName: "StartDate" + - name: "stopDate" + in: "query" + description: "RFC3339 time for stopDate" + required: true + x-exportParamName: "StopDate" + - in: "body" + name: "requirements" + description: "The object content" + required: true + schema: + $ref: "#/definitions/models.ExecutionRequirementsModel" + x-exportParamName: "Requirements" + responses: + "200": + description: "The schedule can be created" + "400": + description: "Other error. Check the output" + "403": + description: "Authentication issue" + /schedule/next: + get: + tags: + - "schedule" + description: "Give a date, get the next date where there are at least on schedule.\ + \ If no hours specified, will assume 00:00" + operationId: "ScheduleController.Get next schedule" + parameters: + - name: "baseDate" + in: "query" + description: "Base date" + required: true + x-exportParamName: "BaseDate" + responses: + "200": + description: "" + schema: + $ref: "#/definitions/*time.Time" + "400": + description: "Other error. Check the output" + "403": + description: "Authentication issue" + /schedule/previous: + get: + tags: + - "schedule" + description: "Give a date, get the previous date where there are at least on\ + \ schedule. If no hours specified, will assume 00:00" + operationId: "ScheduleController.Get previous schedule" + parameters: + - name: "baseDate" + in: "query" + description: "Base date" + required: true + x-exportParamName: "BaseDate" + responses: + "200": + description: "" + schema: + $ref: "#/definitions/*time.Time" + "400": + description: "Other error. Check the output" + "403": + description: "Authentication issue" + /search/byWord: + get: + tags: + - "search" + description: "find resources by word" + operationId: "SearchController.Search by word" + parameters: + - name: "word" + in: "query" + description: "Word to search across all resources" + required: true + type: "string" + x-exportParamName: "Word" + responses: + "200": + description: "" + schema: + $ref: "#/definitions/models.SearchResult" + "503": + description: "Internal error" + /storage/: + post: + tags: + - "storage" + description: "submit storage object" + operationId: "StorageController.Create Storage" + parameters: + - in: "body" + name: "body" + description: "The object content" + required: true + schema: + $ref: "#/definitions/models.StorageNEWModel" + x-exportParamName: "Body" + responses: + "200": + description: "{string} models.StorageModel" + "403": + description: "Missing body or fields" + /storage/multi/{IDs}: + get: + tags: + - "storage" + description: "Return Storage objects if found in the DB. Not found IDs will\ + \ be ignored" + operationId: "StorageController.Get multiple storages by IDs" + parameters: + - name: "IDs" + in: "path" + description: "List of storage IDs" + required: true + type: "array" + items: + type: "string" + x-exportParamName: "IDs" + responses: + "200": + description: "" + schema: + type: "array" + items: + $ref: "#/definitions/models.ComputingModel" + "403": + description: "IDs are empty" + /storage/{ID}: + get: + tags: + - "storage" + description: "find storage by ID" + operationId: "StorageController.Get" + parameters: + - name: "ID" + in: "path" + description: "the ID you want to get" + required: true + type: "string" + x-exportParamName: "ID" + responses: + "200": + description: "" + schema: + $ref: "#/definitions/models.StorageModel" + "403": + description: "ID is empty" + /user/login: + get: + tags: + - "user" + description: "Logs user into the system" + operationId: "UserController.Login" + parameters: + - name: "username" + in: "query" + description: "The username for login" + required: true + type: "string" + x-exportParamName: "Username" + - name: "password" + in: "query" + description: "The password for login" + required: true + type: "string" + x-exportParamName: "Password" + responses: + "200": + description: "{string} login success" + "403": + description: "user not exist" + /user/logout: + get: + tags: + - "user" + description: "Logs out current logged in user session" + operationId: "UserController.logout" + parameters: [] + responses: + "200": + description: "{string} logout success" + /workflow/: + get: + tags: + - "workflow" + description: "List available workflows" + operationId: "WorkflowController.List workflows" + parameters: [] + responses: + "200": + description: "[]string List of workflows" + "400": + description: "{string} Other error" + "403": + description: "Authentication issue" + post: + tags: + - "workflow" + description: "Create a name for the new workflow" + operationId: "WorkflowController.Create a new workflow" + parameters: + - name: "workflowName" + in: "query" + description: "Name of the workflow" + required: true + type: "string" + x-exportParamName: "WorkflowName" + responses: + "200": + description: "{string} Workflow created succeful" + "400": + description: "{string} Other error" + "403": + description: "Authentication issue" + /workflow/{workflowName}: + get: + tags: + - "workflow" + description: "Get a workflow by name" + operationId: "WorkflowController.Get Workflow" + parameters: + - name: "workflowName" + in: "path" + description: "Workflow Name" + required: true + type: "string" + x-exportParamName: "WorkflowName" + responses: + "200": + description: "" + schema: + $ref: "#/definitions/models.Workflow" + "400": + description: "{string} Other error" + "403": + description: "Authentication issue" + /workflow/{workflowName}/add: + post: + tags: + - "workflow" + description: "Create a Rtype object from already added resources to the workspace" + operationId: "WorkflowController.Add new object to a Workflow" + parameters: + - name: "workflowName" + in: "path" + description: "workflow Name" + required: true + type: "string" + x-exportParamName: "WorkflowName" + - name: "rID" + in: "query" + description: "rID of already existing item in Workspace" + required: true + type: "string" + x-exportParamName: "RID" + responses: + "200": + description: "{string} ID of the new object (rObjID)" + "400": + description: "{string} Other error" + "403": + description: "Authentication issue" + /workflow/{workflowName}/link: + post: + tags: + - "workflow" + description: "Create a Rtype object from already added resources to the workspace" + operationId: "WorkflowController.Create a realtionship between two Robjects" + parameters: + - name: "workflowName" + in: "path" + description: "Workflow Name" + required: true + type: "string" + x-exportParamName: "WorkflowName" + - name: "rObjIDsource" + in: "query" + description: "Robject source. Usually Data" + required: true + type: "string" + x-exportParamName: "RObjIDsource" + - name: "isInput" + in: "query" + description: "If the operation is for input (true) linkage or output (false)" + required: true + type: "boolean" + x-exportParamName: "IsInput" + - name: "rObjIDtarger" + in: "query" + description: "Robject where will be written the association" + required: true + type: "string" + x-exportParamName: "RObjIDtarger" + responses: + "200": + description: "{string} ID of the new object (rObjID)" + "400": + description: "{string} Other error" + "403": + description: "Authentication issue" + /workflow/{workflowName}/mxGraphParser: + get: + tags: + - "workflow" + description: "Obtain the last mxgraph XML status from the workflow" + operationId: "WorkflowController.Get mxGraph last status" + parameters: + - name: "workflowName" + in: "path" + description: "Workflow Name" + required: true + type: "string" + x-exportParamName: "WorkflowName" + responses: + "200": + description: "The xmlgraph" + "201": + description: "Empty workflow" + "400": + description: "{string} Other error" + "403": + description: "Authentication issue" + post: + tags: + - "workflow" + description: "If we use this aproach to transofrm mxgraph representation in\ + \ our representation, we should not use other API calls for modify the project\ + \ structure or we'll have inconsistencies." + operationId: "WorkflowController.Parse mxGraph" + parameters: + - name: "workflowName" + in: "path" + description: "Workflow Name" + required: true + type: "string" + x-exportParamName: "WorkflowName" + - in: "body" + name: "xmlData" + description: "Xml representation of the workflow" + required: true + schema: + type: "string" + x-exportParamName: "XmlData" + responses: + "200": + description: "The xmlgraph consumed correctly" + "201": + description: "The xmlgraph consumed with issues" + "400": + description: "{string} Other error" + "403": + description: "Authentication issue" + /workflow/{workflowName}/schedule: + get: + tags: + - "workflow" + description: "Obtain the desired schedule of this workflow" + operationId: "WorkflowController.Get Schedule" + parameters: + - name: "workflowName" + in: "path" + description: "Workflow Name" + required: true + type: "string" + x-exportParamName: "WorkflowName" + responses: + "200": + description: "" + schema: + $ref: "#/definitions/models.ScheduleTime" + "400": + description: "Workflow doesn't exist" + "401": + description: "Other error" + "403": + description: "Authentication issue" + put: + tags: + - "workflow" + description: "Set desired schedule by the user. No other effects a part of saving\ + \ the user input" + operationId: "WorkflowController.Set Schedule" + parameters: + - name: "workflowName" + in: "path" + description: "Workflow Name" + required: true + type: "string" + x-exportParamName: "WorkflowName" + - name: "isService" + in: "query" + description: "True: Service, False: Task" + required: true + type: "boolean" + x-exportParamName: "IsService" + - name: "startDate" + in: "query" + description: "RFC3339 time for startDate" + required: true + x-exportParamName: "StartDate" + - name: "stopDate" + in: "query" + description: "RFC3339 time for stopDate" + required: true + x-exportParamName: "StopDate" + - name: "events" + in: "query" + description: "List of events separated by comma" + required: false + type: "string" + x-exportParamName: "Events" + x-optionalDataType: "String" + - name: "cronString" + in: "query" + description: "Cron string" + required: false + type: "string" + x-exportParamName: "CronString" + x-optionalDataType: "String" + - name: "duration" + in: "query" + description: "Duration in seconds" + required: false + type: "integer" + format: "int32" + x-exportParamName: "Duration" + x-optionalDataType: "Int32" + responses: + "200": + description: "" + schema: + $ref: "#/definitions/models.ScheduleInfo" + "400": + description: "Workflow doesn't exist" + "401": + description: "Other error" + "402": + description: "Bad user input" + "403": + description: "Authentication issue" + /workflow/{workflowName}/schedule/book: + post: + tags: + - "workflow" + description: "Book a schedule in all DCs of the workflow. Must set a desired\ + \ schedule first!" + operationId: "WorkflowController.Book Schedule" + parameters: + - name: "workflowName" + in: "path" + description: "Workflow Name" + required: true + type: "string" + x-exportParamName: "WorkflowName" + responses: + "200": + description: "" + schema: + type: "array" + items: + $ref: "#/definitions/models.DCstatus" + "401": + description: "Other error. Check output" + "403": + description: "Authentication issue" + /workflow/{workflowName}/schedule/check: + get: + tags: + - "workflow" + description: "Check if we can schedule the project in other DCs. Must set a\ + \ desired schedule first!" + operationId: "WorkflowController.Check Schedule" + parameters: + - name: "workflowName" + in: "path" + description: "Workflow Name" + required: true + type: "string" + x-exportParamName: "WorkflowName" + responses: + "200": + description: "" + schema: + type: "array" + items: + $ref: "#/definitions/models.DCstatus" + "401": + description: "Other error" + "403": + description: "Authentication issue" + /workspace/: + post: + tags: + - "workspace" + description: "Insert a resource in the workspace" + operationId: "WorkspaceController.Add model to workspace" + parameters: + - name: "id" + in: "query" + description: "ID of a resource" + required: true + type: "string" + x-exportParamName: "Id" + - name: "rtype" + in: "query" + description: "Type of resource" + required: true + type: "string" + x-exportParamName: "Rtype" + responses: + "200": + description: "{string} login success" + "400": + description: "{string} Other error" + "403": + description: "Authentication issue" + delete: + tags: + - "workspace" + description: "Remove a resource from the workspace" + operationId: "WorkspaceController.Delete element from user workspace" + parameters: + - name: "id" + in: "query" + description: "ID of a resource" + required: true + type: "string" + x-exportParamName: "Id" + - name: "rtype" + in: "query" + description: "Type of resource" + required: true + type: "string" + x-exportParamName: "Rtype" + responses: + "200": + description: "{string} Removed succeful" + "400": + description: "{string} Other error" + "403": + description: "Authentication issue" + /workspace/list: + get: + tags: + - "workspace" + description: "Get workspace elements based on user_id token" + operationId: "WorkspaceController.Get workspace" + parameters: [] + responses: + "200": + description: "" + schema: + $ref: "#/definitions/models.Workspace" + "403": + description: "Authentication issue" + /workspace/list_model: + get: + tags: + - "workspace" + description: "Get full workspace elements based on user_id token" + operationId: "WorkspaceController.Get full workspace" + parameters: [] + responses: + "200": + description: "" + schema: + $ref: "#/definitions/models.WorkspaceModel" + "403": + description: "Authentication issue" +definitions: + '*time.Time': + type: "object" + title: "Time" + models.ComputingModel: + type: "object" + required: + - "ID" + properties: + ID: + type: "string" + example: "5099803df3f4948bd2f98391" + description: + type: "string" + execution_requirements: + $ref: "#/definitions/models.ExecutionRequirementsModel" + license: + type: "string" + logo: + type: "string" + name: + type: "string" + description: "Name of the computing" + owner: + type: "string" + price: + type: "integer" + format: "int32" + repository: + $ref: "#/definitions/models.RepositoryModel" + short_description: + type: "string" + type: + type: "string" + title: "ComputingModel" + example: + owner: "owner" + license: "license" + short_description: "short_description" + price: 5 + name: "name" + description: "description" + logo: "logo" + execution_requirements: + cpus: 0 + parallel: true + scaling_model: 5 + gpus: 6 + disk_io: "disk_io" + ram: 1 + ID: "5099803df3f4948bd2f98391" + repository: + credentials: "credentials" + url: "url" + type: "type" + models.ComputingNEWModel: + type: "object" + required: + - "description" + - "logo" + - "name" + - "short_description" + - "type" + properties: + description: + type: "string" + execution_requirements: + $ref: "#/definitions/models.ExecutionRequirementsModel" + license: + type: "string" + logo: + type: "string" + name: + type: "string" + description: "Name of the computing" + owner: + type: "string" + price: + type: "integer" + format: "int32" + repository: + $ref: "#/definitions/models.RepositoryModel" + short_description: + type: "string" + type: + type: "string" + title: "ComputingNEWModel" + models.ComputingObject: + type: "object" + properties: + datacenterID: + type: "string" + description: "Datacenter where the computing will be executed" + inputs: + type: "array" + items: + type: "string" + outputs: + type: "array" + items: + type: "string" + referenceID: + description: "Computing model ID" + $ref: "#/definitions/primitive.ObjectID" + title: "ComputingObject" + example: + outputs: + - "outputs" + - "outputs" + inputs: + - "inputs" + - "inputs" + datacenterID: "datacenterID" + referenceID: {} + models.DCstatus: + type: "object" + properties: + Booked: + $ref: "#/definitions/models.ScheduleInfo" + DCname: + type: "string" + DCobjID: + type: "string" + ErrorMessage: + type: "string" + IsAvailable: + type: "boolean" + IsReachable: + type: "boolean" + title: "DCstatus" + example: + DCobjID: "DCobjID" + DCname: "DCname" + IsAvailable: true + Booked: + Total: 0 + NextExecutions: + - "NextExecutions" + - "NextExecutions" + IsReachable: true + ErrorMessage: "ErrorMessage" + models.DataModel: + type: "object" + required: + - "ID" + properties: + ID: + type: "string" + description: + type: "string" + example: + type: "string" + description: "base64 encoded data" + ftype: + type: "string" + location: + type: "string" + logo: + type: "string" + name: + type: "string" + description: "Name of the data" + protocol: + type: "array" + items: + type: "string" + short_description: + type: "string" + type: + type: "string" + example: "file" + description: "Define type of data" + title: "DataModel" + example: + short_description: "short_description" + protocol: + - "protocol" + - "protocol" + ftype: "ftype" + name: "name" + description: "description" + logo: "logo" + location: "location" + ID: "ID" + type: "file" + example: "example" + models.DataNEWModel: + type: "object" + required: + - "description" + - "example" + - "location" + - "logo" + - "name" + - "short_description" + - "type" + properties: + description: + type: "string" + example: + type: "string" + description: "base64 encoded data" + ftype: + type: "string" + location: + type: "string" + logo: + type: "string" + name: + type: "string" + description: "Name of the data" + protocol: + type: "array" + items: + type: "string" + short_description: + type: "string" + type: + type: "string" + example: "file" + description: "Define type of data" + title: "DataNEWModel" + models.DataObject: + type: "object" + properties: + referenceID: + description: "Data model ID" + $ref: "#/definitions/primitive.ObjectID" + title: "DataObject" + example: {} + models.DatacenterCpuModel: + type: "object" + required: + - "cores" + properties: + architecture: + type: "string" + cores: + type: "integer" + format: "int32" + minimum_memory: + type: "integer" + format: "int32" + platform: + type: "string" + shared: + type: "boolean" + title: "DatacenterCpuModel" + example: + shared: true + cores: 6 + platform: "platform" + architecture: "architecture" + minimum_memory: 1 + models.DatacenterGpuModel: + type: "object" + properties: + cuda_cores: + type: "integer" + format: "int32" + memory: + type: "integer" + format: "int32" + description: "Units in MB" + model: + type: "string" + tensor_cores: + type: "integer" + format: "int32" + title: "DatacenterGpuModel" + example: + memory: 5 + cuda_cores: 5 + model: "model" + tensor_cores: 2 + models.DatacenterMemoryModel: + type: "object" + properties: + ecc: + type: "boolean" + size: + type: "integer" + format: "int32" + description: "Units in MB" + title: "DatacenterMemoryModel" + example: + ecc: true + size: 7 + models.DatacenterModel: + type: "object" + required: + - "ID" + properties: + ID: + type: "string" + acronym: + type: "string" + description: "id of the DC" + bookingPrice: + type: "integer" + format: "int64" + cpu: + $ref: "#/definitions/models.DatacenterCpuModel" + description: + type: "string" + gpu: + type: "array" + items: + $ref: "#/definitions/models.DatacenterGpuModel" + hosts: + type: "array" + description: "list of host:port" + items: + type: "string" + logo: + type: "string" + name: + type: "string" + owner: + type: "string" + ram: + $ref: "#/definitions/models.DatacenterMemoryModel" + short_description: + type: "string" + type: + type: "string" + title: "DatacenterModel" + example: + owner: "owner" + short_description: "short_description" + acronym: "acronym" + hosts: + - "hosts" + - "hosts" + cpu: + shared: true + cores: 6 + platform: "platform" + architecture: "architecture" + minimum_memory: 1 + description: "description" + type: "type" + gpu: + - memory: 5 + cuda_cores: 5 + model: "model" + tensor_cores: 2 + - memory: 5 + cuda_cores: 5 + model: "model" + tensor_cores: 2 + bookingPrice: 0 + name: "name" + logo: "logo" + ID: "ID" + ram: + ecc: true + size: 7 + models.DatacenterNEWModel: + type: "object" + required: + - "acronym" + - "cpu" + - "description" + - "gpu" + - "hosts" + - "logo" + - "name" + - "ram" + - "short_description" + - "type" + properties: + acronym: + type: "string" + description: "id of the DC" + bookingPrice: + type: "integer" + format: "int64" + cpu: + $ref: "#/definitions/models.DatacenterCpuModel" + description: + type: "string" + gpu: + type: "array" + items: + $ref: "#/definitions/models.DatacenterGpuModel" + hosts: + type: "array" + description: "list of host:port" + items: + type: "string" + logo: + type: "string" + name: + type: "string" + owner: + type: "string" + ram: + $ref: "#/definitions/models.DatacenterMemoryModel" + short_description: + type: "string" + type: + type: "string" + title: "DatacenterNEWModel" + models.DatacenterObject: + type: "object" + properties: + referenceID: + description: "Data model ID" + $ref: "#/definitions/primitive.ObjectID" + title: "DatacenterObject" + example: {} + models.ExecutionRequirementsModel: + type: "object" + required: + - "cpus" + - "ram" + properties: + cpus: + type: "integer" + format: "int32" + disk_io: + type: "string" + gpus: + type: "integer" + format: "int32" + description: "Amount of GPUs needed" + parallel: + type: "boolean" + ram: + type: "integer" + format: "int32" + description: "Units in MB" + scaling_model: + type: "integer" + format: "int32" + title: "ExecutionRequirementsModel" + example: + cpus: 0 + parallel: true + scaling_model: 5 + gpus: 6 + disk_io: "disk_io" + ram: 1 + models.RepositoryModel: + type: "object" + properties: + credentials: + type: "string" + url: + type: "string" + title: "RepositoryModel" + example: + credentials: "credentials" + url: "url" + models.ScheduleDB: + type: "object" + properties: + ResourceQty: + $ref: "#/definitions/models.ExecutionRequirementsModel" + StartDate: + type: "string" + format: "datetime" + StopDate: + type: "string" + format: "datetime" + Workflow: + type: "string" + title: "ScheduleDB" + example: + StartDate: "StartDate" + ResourceQty: + cpus: 0 + parallel: true + scaling_model: 5 + gpus: 6 + disk_io: "disk_io" + ram: 1 + StopDate: "StopDate" + Workflow: "Workflow" + models.ScheduleInfo: + type: "object" + properties: + NextExecutions: + type: "array" + items: + type: "string" + Total: + type: "integer" + format: "int64" + title: "ScheduleInfo" + example: + Total: 0 + NextExecutions: + - "NextExecutions" + - "NextExecutions" + models.ScheduleTime: + type: "object" + title: "ScheduleTime" + models.SearchResult: + type: "object" + required: + - "computing" + properties: + computing: + type: "array" + items: + $ref: "#/definitions/models.ComputingModel" + data: + type: "array" + items: + $ref: "#/definitions/models.DataModel" + datacenter: + type: "array" + items: + $ref: "#/definitions/models.DatacenterModel" + storage: + type: "array" + items: + $ref: "#/definitions/models.StorageModel" + title: "SearchResult" + example: + computing: + - owner: "owner" + license: "license" + short_description: "short_description" + price: 5 + name: "name" + description: "description" + logo: "logo" + execution_requirements: + cpus: 0 + parallel: true + scaling_model: 5 + gpus: 6 + disk_io: "disk_io" + ram: 1 + ID: "5099803df3f4948bd2f98391" + repository: + credentials: "credentials" + url: "url" + type: "type" + - owner: "owner" + license: "license" + short_description: "short_description" + price: 5 + name: "name" + description: "description" + logo: "logo" + execution_requirements: + cpus: 0 + parallel: true + scaling_model: 5 + gpus: 6 + disk_io: "disk_io" + ram: 1 + ID: "5099803df3f4948bd2f98391" + repository: + credentials: "credentials" + url: "url" + type: "type" + data: + - short_description: "short_description" + protocol: + - "protocol" + - "protocol" + ftype: "ftype" + name: "name" + description: "description" + logo: "logo" + location: "location" + ID: "ID" + type: "file" + example: "example" + - short_description: "short_description" + protocol: + - "protocol" + - "protocol" + ftype: "ftype" + name: "name" + description: "description" + logo: "logo" + location: "location" + ID: "ID" + type: "file" + example: "example" + datacenter: + - owner: "owner" + short_description: "short_description" + acronym: "acronym" + hosts: + - "hosts" + - "hosts" + cpu: + shared: true + cores: 6 + platform: "platform" + architecture: "architecture" + minimum_memory: 1 + description: "description" + type: "type" + gpu: + - memory: 5 + cuda_cores: 5 + model: "model" + tensor_cores: 2 + - memory: 5 + cuda_cores: 5 + model: "model" + tensor_cores: 2 + bookingPrice: 0 + name: "name" + logo: "logo" + ID: "ID" + ram: + ecc: true + size: 7 + - owner: "owner" + short_description: "short_description" + acronym: "acronym" + hosts: + - "hosts" + - "hosts" + cpu: + shared: true + cores: 6 + platform: "platform" + architecture: "architecture" + minimum_memory: 1 + description: "description" + type: "type" + gpu: + - memory: 5 + cuda_cores: 5 + model: "model" + tensor_cores: 2 + - memory: 5 + cuda_cores: 5 + model: "model" + tensor_cores: 2 + bookingPrice: 0 + name: "name" + logo: "logo" + ID: "ID" + ram: + ecc: true + size: 7 + storage: + - short_description: "short_description" + encryption: true + size: 6 + bookingPrice: 0 + DCacronym: "DCacronym" + name: "name" + description: "description" + logo: "logo" + ID: "ID" + redundancy: "redundancy" + throughput: "throughput" + type: "type" + - short_description: "short_description" + encryption: true + size: 6 + bookingPrice: 0 + DCacronym: "DCacronym" + name: "name" + description: "description" + logo: "logo" + ID: "ID" + redundancy: "redundancy" + throughput: "throughput" + type: "type" + models.StorageModel: + type: "object" + required: + - "ID" + properties: + DCacronym: + type: "string" + description: "Unique ID of the DC where it is the storage" + ID: + type: "string" + bookingPrice: + type: "integer" + format: "int32" + description: + type: "string" + encryption: + type: "boolean" + logo: + type: "string" + name: + type: "string" + redundancy: + type: "string" + short_description: + type: "string" + size: + type: "integer" + format: "int32" + throughput: + type: "string" + type: + type: "string" + title: "StorageModel" + example: + short_description: "short_description" + encryption: true + size: 6 + bookingPrice: 0 + DCacronym: "DCacronym" + name: "name" + description: "description" + logo: "logo" + ID: "ID" + redundancy: "redundancy" + throughput: "throughput" + type: "type" + models.StorageNEWModel: + type: "object" + required: + - "DCacronym" + - "description" + - "logo" + - "name" + - "short_description" + - "size" + - "type" + properties: + DCacronym: + type: "string" + description: "Unique ID of the DC where it is the storage" + bookingPrice: + type: "integer" + format: "int32" + description: + type: "string" + encryption: + type: "boolean" + logo: + type: "string" + name: + type: "string" + redundancy: + type: "string" + short_description: + type: "string" + size: + type: "integer" + format: "int32" + throughput: + type: "string" + type: + type: "string" + title: "StorageNEWModel" + models.StorageObject: + type: "object" + properties: + inputs: + type: "array" + items: + type: "string" + outputs: + type: "array" + items: + type: "string" + referenceID: + description: "Storage model ID" + $ref: "#/definitions/primitive.ObjectID" + title: "StorageObject" + example: + outputs: + - "outputs" + - "outputs" + inputs: + - "inputs" + - "inputs" + models.Workflow: + type: "object" + properties: + MxgraphXML: + type: "string" + description: "State of the mxgraph" + computing: + $ref: "#/definitions/models.ComputingObject" + data: + $ref: "#/definitions/models.DataObject" + datacenter: + $ref: "#/definitions/models.DatacenterObject" + schedules: + $ref: "#/definitions/models.WorkflowSchedule" + storage: + $ref: "#/definitions/models.StorageObject" + title: "Workflow" + example: + computing: + outputs: + - "outputs" + - "outputs" + inputs: + - "inputs" + - "inputs" + datacenterID: "datacenterID" + referenceID: {} + data: {} + schedules: + StartDate: "StartDate" + cron: "cron" + duration: 7200 + StopDate: "StopDate" + isBooked: true + IsService: true + events: "events" + datacenter: {} + storage: + outputs: + - "outputs" + - "outputs" + inputs: + - "inputs" + - "inputs" + MxgraphXML: "MxgraphXML" + models.WorkflowSchedule: + type: "object" + properties: + IsService: + type: "boolean" + description: "Service: true, Task: false" + StartDate: + type: "string" + format: "datetime" + StopDate: + type: "string" + format: "datetime" + cron: + type: "string" + duration: + type: "integer" + format: "int32" + example: 7200 + description: "Durantion in seconds" + events: + type: "string" + isBooked: + type: "boolean" + title: "WorkflowSchedule" + example: + StartDate: "StartDate" + cron: "cron" + duration: 7200 + StopDate: "StopDate" + isBooked: true + IsService: true + events: "events" + models.Workspace: + type: "object" + properties: + Workflows: + $ref: "#/definitions/models.Workflow" + computing: + type: "array" + items: + type: "string" + data: + type: "array" + items: + type: "string" + datacenter: + type: "array" + items: + type: "string" + storage: + type: "array" + items: + type: "string" + user_id: + type: "string" + title: "Workspace" + example: + computing: + - "computing" + - "computing" + data: + - "data" + - "data" + Workflows: + computing: + outputs: + - "outputs" + - "outputs" + inputs: + - "inputs" + - "inputs" + datacenterID: "datacenterID" + referenceID: {} + data: {} + schedules: + StartDate: "StartDate" + cron: "cron" + duration: 7200 + StopDate: "StopDate" + isBooked: true + IsService: true + events: "events" + datacenter: {} + storage: + outputs: + - "outputs" + - "outputs" + inputs: + - "inputs" + - "inputs" + MxgraphXML: "MxgraphXML" + user_id: "user_id" + datacenter: + - "datacenter" + - "datacenter" + storage: + - "storage" + - "storage" + models.WorkspaceModel: + type: "object" + properties: + computing: + type: "array" + items: + $ref: "#/definitions/models.ComputingModel" + data: + type: "array" + items: + $ref: "#/definitions/models.DataModel" + datacenter: + type: "array" + items: + $ref: "#/definitions/models.DatacenterModel" + storage: + type: "array" + items: + $ref: "#/definitions/models.StorageModel" + user_id: + type: "string" + title: "WorkspaceModel" + example: + computing: + - owner: "owner" + license: "license" + short_description: "short_description" + price: 5 + name: "name" + description: "description" + logo: "logo" + execution_requirements: + cpus: 0 + parallel: true + scaling_model: 5 + gpus: 6 + disk_io: "disk_io" + ram: 1 + ID: "5099803df3f4948bd2f98391" + repository: + credentials: "credentials" + url: "url" + type: "type" + - owner: "owner" + license: "license" + short_description: "short_description" + price: 5 + name: "name" + description: "description" + logo: "logo" + execution_requirements: + cpus: 0 + parallel: true + scaling_model: 5 + gpus: 6 + disk_io: "disk_io" + ram: 1 + ID: "5099803df3f4948bd2f98391" + repository: + credentials: "credentials" + url: "url" + type: "type" + data: + - short_description: "short_description" + protocol: + - "protocol" + - "protocol" + ftype: "ftype" + name: "name" + description: "description" + logo: "logo" + location: "location" + ID: "ID" + type: "file" + example: "example" + - short_description: "short_description" + protocol: + - "protocol" + - "protocol" + ftype: "ftype" + name: "name" + description: "description" + logo: "logo" + location: "location" + ID: "ID" + type: "file" + example: "example" + user_id: "user_id" + datacenter: + - owner: "owner" + short_description: "short_description" + acronym: "acronym" + hosts: + - "hosts" + - "hosts" + cpu: + shared: true + cores: 6 + platform: "platform" + architecture: "architecture" + minimum_memory: 1 + description: "description" + type: "type" + gpu: + - memory: 5 + cuda_cores: 5 + model: "model" + tensor_cores: 2 + - memory: 5 + cuda_cores: 5 + model: "model" + tensor_cores: 2 + bookingPrice: 0 + name: "name" + logo: "logo" + ID: "ID" + ram: + ecc: true + size: 7 + - owner: "owner" + short_description: "short_description" + acronym: "acronym" + hosts: + - "hosts" + - "hosts" + cpu: + shared: true + cores: 6 + platform: "platform" + architecture: "architecture" + minimum_memory: 1 + description: "description" + type: "type" + gpu: + - memory: 5 + cuda_cores: 5 + model: "model" + tensor_cores: 2 + - memory: 5 + cuda_cores: 5 + model: "model" + tensor_cores: 2 + bookingPrice: 0 + name: "name" + logo: "logo" + ID: "ID" + ram: + ecc: true + size: 7 + storage: + - short_description: "short_description" + encryption: true + size: 6 + bookingPrice: 0 + DCacronym: "DCacronym" + name: "name" + description: "description" + logo: "logo" + ID: "ID" + redundancy: "redundancy" + throughput: "throughput" + type: "type" + - short_description: "short_description" + encryption: true + size: 6 + bookingPrice: 0 + DCacronym: "DCacronym" + name: "name" + description: "description" + logo: "logo" + ID: "ID" + redundancy: "redundancy" + throughput: "throughput" + type: "type" + primitive.ObjectID: + type: "object" + title: "ObjectID" + time.Time: + type: "object" + title: "Time" diff --git a/selfapi/api_computing.go b/selfapi/api_computing.go new file mode 100644 index 0000000..60cfded --- /dev/null +++ b/selfapi/api_computing.go @@ -0,0 +1,269 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +import ( + "context" + "fmt" + "io/ioutil" + "net/http" + "net/url" + "strings" +) + +// Linger please +var ( + _ context.Context +) + +type ComputingApiService service + +/* +ComputingApiService +Submit a computing object + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param body The object content +*/ +func (a *ComputingApiService) ComputingControllerAddComputing(ctx context.Context, body ModelsComputingNewModel) (*http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Post") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/computing/" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + // body params + localVarPostBody = &body + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + return localVarHttpResponse, newErr + } + + return localVarHttpResponse, nil +} + +/* +ComputingApiService +Find a computing resource based on ID + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param iD The ID of the resource + +@return ModelsComputingModel +*/ +func (a *ComputingApiService) ComputingControllerGetComputingByID(ctx context.Context, iD string) (ModelsComputingModel, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue ModelsComputingModel + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/computing/{ID}" + localVarPath = strings.Replace(localVarPath, "{"+"ID"+"}", fmt.Sprintf("%v", iD), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v ModelsComputingModel + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +ComputingApiService +Return Computing objects if found in the DB. Not found IDs will be ignored + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param iDs List of computing IDs + +@return []ModelsComputingModel +*/ +func (a *ComputingApiService) ComputingControllerGetMultipleComputingByIDs(ctx context.Context, iDs []string) ([]ModelsComputingModel, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue []ModelsComputingModel + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/computing/multi/{IDs}" + localVarPath = strings.Replace(localVarPath, "{"+"IDs"+"}", fmt.Sprintf("%v", iDs), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v []ModelsComputingModel + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} diff --git a/selfapi/api_data.go b/selfapi/api_data.go new file mode 100644 index 0000000..e6d61a5 --- /dev/null +++ b/selfapi/api_data.go @@ -0,0 +1,269 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +import ( + "context" + "fmt" + "io/ioutil" + "net/http" + "net/url" + "strings" +) + +// Linger please +var ( + _ context.Context +) + +type DataApiService service + +/* +DataApiService +Submit data object + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param body The object content +*/ +func (a *DataApiService) DataControllerCreateData(ctx context.Context, body ModelsDataNewModel) (*http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Post") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/data/" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + // body params + localVarPostBody = &body + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + return localVarHttpResponse, newErr + } + + return localVarHttpResponse, nil +} + +/* +DataApiService +Find rType data based on ID + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param iD The ID of the data resource + +@return ModelsDataModel +*/ +func (a *DataApiService) DataControllerGetDataByID(ctx context.Context, iD string) (ModelsDataModel, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue ModelsDataModel + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/data/{ID}" + localVarPath = strings.Replace(localVarPath, "{"+"ID"+"}", fmt.Sprintf("%v", iD), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v ModelsDataModel + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +DataApiService +Return Data object if found in the DB. Not found IDs will be ignored + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param iDs List of data IDs + +@return []ModelsDataModel +*/ +func (a *DataApiService) DataControllerGetMultipleDataByIDs(ctx context.Context, iDs []string) ([]ModelsDataModel, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue []ModelsDataModel + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/data/multi/{IDs}" + localVarPath = strings.Replace(localVarPath, "{"+"IDs"+"}", fmt.Sprintf("%v", iDs), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v []ModelsDataModel + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} diff --git a/selfapi/api_datacenter.go b/selfapi/api_datacenter.go new file mode 100644 index 0000000..1cdc03c --- /dev/null +++ b/selfapi/api_datacenter.go @@ -0,0 +1,269 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +import ( + "context" + "fmt" + "io/ioutil" + "net/http" + "net/url" + "strings" +) + +// Linger please +var ( + _ context.Context +) + +type DatacenterApiService service + +/* +DatacenterApiService +submit Datacenter object + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param body The object content +*/ +func (a *DatacenterApiService) DatacenterControllerCreateDatacenter(ctx context.Context, body ModelsDatacenterNewModel) (*http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Post") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/datacenter/" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + // body params + localVarPostBody = &body + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + return localVarHttpResponse, newErr + } + + return localVarHttpResponse, nil +} + +/* +DatacenterApiService +Return Datacenter objects if found in the DB. Not found IDs will be ignored + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param iDs List of datacenter IDs + +@return []ModelsComputingModel +*/ +func (a *DatacenterApiService) DatacenterControllerGetMultipleDatacentersByIDs(ctx context.Context, iDs []string) ([]ModelsComputingModel, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue []ModelsComputingModel + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/datacenter/multi/{IDs}" + localVarPath = strings.Replace(localVarPath, "{"+"IDs"+"}", fmt.Sprintf("%v", iDs), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v []ModelsComputingModel + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +DatacenterApiService +find datacenter by ID + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param iD the ID you want to get + +@return ModelsDatacenterModel +*/ +func (a *DatacenterApiService) DatacenterControllerGetOneDatacenter(ctx context.Context, iD string) (ModelsDatacenterModel, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue ModelsDatacenterModel + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/datacenter/{ID}" + localVarPath = strings.Replace(localVarPath, "{"+"ID"+"}", fmt.Sprintf("%v", iD), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v ModelsDatacenterModel + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} diff --git a/selfapi/api_schedule.go b/selfapi/api_schedule.go new file mode 100644 index 0000000..01bf66f --- /dev/null +++ b/selfapi/api_schedule.go @@ -0,0 +1,465 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +import ( + "context" + "io/ioutil" + "net/http" + "net/url" + "strings" +) + +// Linger please +var ( + _ context.Context +) + +type ScheduleApiService service + +/* +ScheduleApiService +Check for availability of this DC + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param cron Cron syntax + - @param duration Duration in seconds + - @param startDate RFC3339 time for startDate + - @param stopDate RFC3339 time for stopDate + - @param requirements The object content +*/ +func (a *ScheduleApiService) ScheduleControllerCheckIfScheduleCanBeCreatedInThisDC(ctx context.Context, cron string, duration int32, startDate interface{}, stopDate interface{}, requirements ModelsExecutionRequirementsModel) (*http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Post") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/schedule/check" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + localVarQueryParams.Add("cron", parameterToString(cron, "")) + localVarQueryParams.Add("duration", parameterToString(duration, "")) + localVarQueryParams.Add("startDate", parameterToString(startDate, "")) + localVarQueryParams.Add("stopDate", parameterToString(stopDate, "")) + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + // body params + localVarPostBody = &requirements + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + return localVarHttpResponse, newErr + } + + return localVarHttpResponse, nil +} + +/* +ScheduleApiService +Create schedule for a workflow. It will return some future executions just as information + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param dcName Name of the node (oc-catalog) from where the workflow comes. + - @param workflowName Workflow Name + - @param cron Cron syntax with year. If no year is specified, will use the current + - @param duration Duration in seconds + - @param startDate RFC3339 time for startDate + - @param stopDate RFC3339 time for stopDate + - @param requirements The object content + +@return ModelsScheduleInfo +*/ +func (a *ScheduleApiService) ScheduleControllerCreateSchedule(ctx context.Context, dcName string, workflowName string, cron string, duration int32, startDate interface{}, stopDate interface{}, requirements ModelsExecutionRequirementsModel) (ModelsScheduleInfo, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Post") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue ModelsScheduleInfo + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/schedule/book" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + localVarQueryParams.Add("dcName", parameterToString(dcName, "")) + localVarQueryParams.Add("workflowName", parameterToString(workflowName, "")) + localVarQueryParams.Add("cron", parameterToString(cron, "")) + localVarQueryParams.Add("duration", parameterToString(duration, "")) + localVarQueryParams.Add("startDate", parameterToString(startDate, "")) + localVarQueryParams.Add("stopDate", parameterToString(stopDate, "")) + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + // body params + localVarPostBody = &requirements + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v ModelsScheduleInfo + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +ScheduleApiService +Give a date, get the next date where there are at least on schedule. If no hours specified, will assume 00:00 + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param baseDate Base date + +@return TimeTime +*/ +func (a *ScheduleApiService) ScheduleControllerGetNextSchedule(ctx context.Context, baseDate interface{}) (TimeTime, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue TimeTime + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/schedule/next" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + localVarQueryParams.Add("baseDate", parameterToString(baseDate, "")) + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v TimeTime + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +ScheduleApiService +Give a date, get the previous date where there are at least on schedule. If no hours specified, will assume 00:00 + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param baseDate Base date + +@return TimeTime +*/ +func (a *ScheduleApiService) ScheduleControllerGetPreviousSchedule(ctx context.Context, baseDate interface{}) (TimeTime, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue TimeTime + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/schedule/previous" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + localVarQueryParams.Add("baseDate", parameterToString(baseDate, "")) + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v TimeTime + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +ScheduleApiService +Get a list of next startDates schedules (inclusive). If timezone is not specified, will assume UTC + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param startDate Start date + - @param stopDate End date + +@return []ModelsScheduleDb +*/ +func (a *ScheduleApiService) ScheduleControllerGetSchedules(ctx context.Context, startDate interface{}, stopDate interface{}) ([]ModelsScheduleDb, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue []ModelsScheduleDb + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/schedule/" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + localVarQueryParams.Add("startDate", parameterToString(startDate, "")) + localVarQueryParams.Add("stopDate", parameterToString(stopDate, "")) + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v []ModelsScheduleDb + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} diff --git a/selfapi/api_search.go b/selfapi/api_search.go new file mode 100644 index 0000000..23f73f1 --- /dev/null +++ b/selfapi/api_search.go @@ -0,0 +1,113 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +import ( + "context" + "io/ioutil" + "net/http" + "net/url" + "strings" +) + +// Linger please +var ( + _ context.Context +) + +type SearchApiService service + +/* +SearchApiService +find resources by word + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param word Word to search across all resources + +@return ModelsSearchResult +*/ +func (a *SearchApiService) SearchControllerSearchByWord(ctx context.Context, word string) (ModelsSearchResult, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue ModelsSearchResult + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/search/byWord" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + localVarQueryParams.Add("word", parameterToString(word, "")) + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v ModelsSearchResult + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} diff --git a/selfapi/api_storage.go b/selfapi/api_storage.go new file mode 100644 index 0000000..86b61ff --- /dev/null +++ b/selfapi/api_storage.go @@ -0,0 +1,269 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +import ( + "context" + "fmt" + "io/ioutil" + "net/http" + "net/url" + "strings" +) + +// Linger please +var ( + _ context.Context +) + +type StorageApiService service + +/* +StorageApiService +submit storage object + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param body The object content +*/ +func (a *StorageApiService) StorageControllerCreateStorage(ctx context.Context, body ModelsStorageNewModel) (*http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Post") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/storage/" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + // body params + localVarPostBody = &body + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + return localVarHttpResponse, newErr + } + + return localVarHttpResponse, nil +} + +/* +StorageApiService +find storage by ID + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param iD the ID you want to get + +@return ModelsStorageModel +*/ +func (a *StorageApiService) StorageControllerGet(ctx context.Context, iD string) (ModelsStorageModel, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue ModelsStorageModel + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/storage/{ID}" + localVarPath = strings.Replace(localVarPath, "{"+"ID"+"}", fmt.Sprintf("%v", iD), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v ModelsStorageModel + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +StorageApiService +Return Storage objects if found in the DB. Not found IDs will be ignored + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param iDs List of storage IDs + +@return []ModelsComputingModel +*/ +func (a *StorageApiService) StorageControllerGetMultipleStoragesByIDs(ctx context.Context, iDs []string) ([]ModelsComputingModel, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue []ModelsComputingModel + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/storage/multi/{IDs}" + localVarPath = strings.Replace(localVarPath, "{"+"IDs"+"}", fmt.Sprintf("%v", iDs), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v []ModelsComputingModel + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} diff --git a/selfapi/api_user.go b/selfapi/api_user.go new file mode 100644 index 0000000..0ee82b2 --- /dev/null +++ b/selfapi/api_user.go @@ -0,0 +1,160 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +import ( + "context" + "io/ioutil" + "net/http" + "net/url" + "strings" +) + +// Linger please +var ( + _ context.Context +) + +type UserApiService service + +/* +UserApiService +Logs user into the system + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param username The username for login + - @param password The password for login +*/ +func (a *UserApiService) UserControllerLogin(ctx context.Context, username string, password string) (*http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/user/login" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + localVarQueryParams.Add("username", parameterToString(username, "")) + localVarQueryParams.Add("password", parameterToString(password, "")) + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + return localVarHttpResponse, newErr + } + + return localVarHttpResponse, nil +} + +/* +UserApiService +Logs out current logged in user session + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). +*/ +func (a *UserApiService) UserControllerLogout(ctx context.Context) (*http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/user/logout" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + return localVarHttpResponse, newErr + } + + return localVarHttpResponse, nil +} diff --git a/selfapi/api_workflow.go b/selfapi/api_workflow.go new file mode 100644 index 0000000..77a5dd8 --- /dev/null +++ b/selfapi/api_workflow.go @@ -0,0 +1,901 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +import ( + "context" + "fmt" + "io/ioutil" + "net/http" + "net/url" + "strings" + + "github.com/antihax/optional" +) + +// Linger please +var ( + _ context.Context +) + +type WorkflowApiService service + +/* +WorkflowApiService +Create a Rtype object from already added resources to the workspace + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param workflowName workflow Name + - @param rID rID of already existing item in Workspace +*/ +func (a *WorkflowApiService) WorkflowControllerAddNewObjectToAWorkflow(ctx context.Context, workflowName string, rID string) (*http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Post") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/workflow/{workflowName}/add" + localVarPath = strings.Replace(localVarPath, "{"+"workflowName"+"}", fmt.Sprintf("%v", workflowName), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + localVarQueryParams.Add("rID", parameterToString(rID, "")) + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + return localVarHttpResponse, newErr + } + + return localVarHttpResponse, nil +} + +/* +WorkflowApiService +Book a schedule in all DCs of the workflow. Must set a desired schedule first! + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param workflowName Workflow Name + +@return []ModelsDCstatus +*/ +func (a *WorkflowApiService) WorkflowControllerBookSchedule(ctx context.Context, workflowName string) ([]ModelsDCstatus, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Post") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue []ModelsDCstatus + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/workflow/{workflowName}/schedule/book" + localVarPath = strings.Replace(localVarPath, "{"+"workflowName"+"}", fmt.Sprintf("%v", workflowName), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v []ModelsDCstatus + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +WorkflowApiService +Check if we can schedule the project in other DCs. Must set a desired schedule first! + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param workflowName Workflow Name + +@return []ModelsDCstatus +*/ +func (a *WorkflowApiService) WorkflowControllerCheckSchedule(ctx context.Context, workflowName string) ([]ModelsDCstatus, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue []ModelsDCstatus + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/workflow/{workflowName}/schedule/check" + localVarPath = strings.Replace(localVarPath, "{"+"workflowName"+"}", fmt.Sprintf("%v", workflowName), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v []ModelsDCstatus + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +WorkflowApiService +Create a name for the new workflow + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param workflowName Name of the workflow +*/ +func (a *WorkflowApiService) WorkflowControllerCreateANewWorkflow(ctx context.Context, workflowName string) (*http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Post") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/workflow/" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + localVarQueryParams.Add("workflowName", parameterToString(workflowName, "")) + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + return localVarHttpResponse, newErr + } + + return localVarHttpResponse, nil +} + +/* +WorkflowApiService +Create a Rtype object from already added resources to the workspace + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param workflowName Workflow Name + - @param rObjIDsource Robject source. Usually Data + - @param isInput If the operation is for input (true) linkage or output (false) + - @param rObjIDtarger Robject where will be written the association +*/ +func (a *WorkflowApiService) WorkflowControllerCreateARealtionshipBetweenTwoRobjects(ctx context.Context, workflowName string, rObjIDsource string, isInput bool, rObjIDtarger string) (*http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Post") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/workflow/{workflowName}/link" + localVarPath = strings.Replace(localVarPath, "{"+"workflowName"+"}", fmt.Sprintf("%v", workflowName), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + localVarQueryParams.Add("rObjIDsource", parameterToString(rObjIDsource, "")) + localVarQueryParams.Add("isInput", parameterToString(isInput, "")) + localVarQueryParams.Add("rObjIDtarger", parameterToString(rObjIDtarger, "")) + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + return localVarHttpResponse, newErr + } + + return localVarHttpResponse, nil +} + +/* +WorkflowApiService +Obtain the last mxgraph XML status from the workflow + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param workflowName Workflow Name +*/ +func (a *WorkflowApiService) WorkflowControllerGetMxGraphLastStatus(ctx context.Context, workflowName string) (*http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/workflow/{workflowName}/mxGraphParser" + localVarPath = strings.Replace(localVarPath, "{"+"workflowName"+"}", fmt.Sprintf("%v", workflowName), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + return localVarHttpResponse, newErr + } + + return localVarHttpResponse, nil +} + +/* +WorkflowApiService +Obtain the desired schedule of this workflow + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param workflowName Workflow Name + +@return ModelsScheduleTime +*/ +func (a *WorkflowApiService) WorkflowControllerGetSchedule(ctx context.Context, workflowName string) (ModelsScheduleTime, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue ModelsScheduleTime + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/workflow/{workflowName}/schedule" + localVarPath = strings.Replace(localVarPath, "{"+"workflowName"+"}", fmt.Sprintf("%v", workflowName), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v ModelsScheduleTime + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +WorkflowApiService +Get a workflow by name + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param workflowName Workflow Name + +@return ModelsWorkflow +*/ +func (a *WorkflowApiService) WorkflowControllerGetWorkflow(ctx context.Context, workflowName string) (ModelsWorkflow, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue ModelsWorkflow + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/workflow/{workflowName}" + localVarPath = strings.Replace(localVarPath, "{"+"workflowName"+"}", fmt.Sprintf("%v", workflowName), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v ModelsWorkflow + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +WorkflowApiService +List available workflows + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). +*/ +func (a *WorkflowApiService) WorkflowControllerListWorkflows(ctx context.Context) (*http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/workflow/" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + return localVarHttpResponse, newErr + } + + return localVarHttpResponse, nil +} + +/* +WorkflowApiService +If we use this aproach to transofrm mxgraph representation in our representation, we should not use other API calls for modify the project structure or we'll have inconsistencies. + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param workflowName Workflow Name + - @param xmlData Xml representation of the workflow +*/ +func (a *WorkflowApiService) WorkflowControllerParseMxGraph(ctx context.Context, workflowName string, xmlData string) (*http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Post") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/workflow/{workflowName}/mxGraphParser" + localVarPath = strings.Replace(localVarPath, "{"+"workflowName"+"}", fmt.Sprintf("%v", workflowName), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + // body params + localVarPostBody = &xmlData + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + return localVarHttpResponse, newErr + } + + return localVarHttpResponse, nil +} + +/* +WorkflowApiService +Set desired schedule by the user. No other effects a part of saving the user input + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param workflowName Workflow Name + * @param isService True: Service, False: Task + * @param startDate RFC3339 time for startDate + * @param stopDate RFC3339 time for stopDate + * @param optional nil or *WorkflowApiWorkflowControllerSetScheduleOpts - Optional Parameters: + * @param "Events" (optional.String) - List of events separated by comma + * @param "CronString" (optional.String) - Cron string + * @param "Duration" (optional.Int32) - Duration in seconds + +@return ModelsScheduleInfo +*/ + +type WorkflowApiWorkflowControllerSetScheduleOpts struct { + Events optional.String + CronString optional.String + Duration optional.Int32 +} + +func (a *WorkflowApiService) WorkflowControllerSetSchedule(ctx context.Context, workflowName string, isService bool, startDate interface{}, stopDate interface{}, localVarOptionals *WorkflowApiWorkflowControllerSetScheduleOpts) (ModelsScheduleInfo, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Put") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue ModelsScheduleInfo + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/workflow/{workflowName}/schedule" + localVarPath = strings.Replace(localVarPath, "{"+"workflowName"+"}", fmt.Sprintf("%v", workflowName), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + localVarQueryParams.Add("isService", parameterToString(isService, "")) + localVarQueryParams.Add("startDate", parameterToString(startDate, "")) + localVarQueryParams.Add("stopDate", parameterToString(stopDate, "")) + if localVarOptionals != nil && localVarOptionals.Events.IsSet() { + localVarQueryParams.Add("events", parameterToString(localVarOptionals.Events.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.CronString.IsSet() { + localVarQueryParams.Add("cronString", parameterToString(localVarOptionals.CronString.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Duration.IsSet() { + localVarQueryParams.Add("duration", parameterToString(localVarOptionals.Duration.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v ModelsScheduleInfo + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} diff --git a/selfapi/api_workspace.go b/selfapi/api_workspace.go new file mode 100644 index 0000000..449abc6 --- /dev/null +++ b/selfapi/api_workspace.go @@ -0,0 +1,334 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +import ( + "context" + "io/ioutil" + "net/http" + "net/url" + "strings" +) + +// Linger please +var ( + _ context.Context +) + +type WorkspaceApiService service + +/* +WorkspaceApiService +Insert a resource in the workspace + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param id ID of a resource + - @param rtype Type of resource +*/ +func (a *WorkspaceApiService) WorkspaceControllerAddModelToWorkspace(ctx context.Context, id string, rtype string) (*http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Post") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/workspace/" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + localVarQueryParams.Add("id", parameterToString(id, "")) + localVarQueryParams.Add("rtype", parameterToString(rtype, "")) + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + return localVarHttpResponse, newErr + } + + return localVarHttpResponse, nil +} + +/* +WorkspaceApiService +Remove a resource from the workspace + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param id ID of a resource + - @param rtype Type of resource +*/ +func (a *WorkspaceApiService) WorkspaceControllerDeleteElementFromUserWorkspace(ctx context.Context, id string, rtype string) (*http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Delete") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/workspace/" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + localVarQueryParams.Add("id", parameterToString(id, "")) + localVarQueryParams.Add("rtype", parameterToString(rtype, "")) + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + return localVarHttpResponse, newErr + } + + return localVarHttpResponse, nil +} + +/* +WorkspaceApiService +Get full workspace elements based on user_id token + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + +@return ModelsWorkspaceModel +*/ +func (a *WorkspaceApiService) WorkspaceControllerGetFullWorkspace(ctx context.Context) (ModelsWorkspaceModel, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue ModelsWorkspaceModel + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/workspace/list_model" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v ModelsWorkspaceModel + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +WorkspaceApiService +Get workspace elements based on user_id token + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + +@return ModelsWorkspace +*/ +func (a *WorkspaceApiService) WorkspaceControllerGetWorkspace(ctx context.Context) (ModelsWorkspace, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue ModelsWorkspace + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/workspace/list" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v ModelsWorkspace + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} diff --git a/selfapi/client.go b/selfapi/client.go new file mode 100644 index 0000000..46a4509 --- /dev/null +++ b/selfapi/client.go @@ -0,0 +1,499 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +import ( + "bytes" + "context" + "encoding/json" + "encoding/xml" + "errors" + "fmt" + "io" + "mime/multipart" + "net/http" + "net/url" + "os" + "path/filepath" + "reflect" + "regexp" + "strconv" + "strings" + "time" + "unicode/utf8" + + "golang.org/x/oauth2" +) + +var ( + jsonCheck = regexp.MustCompile("(?i:(?:application|text)/json)") + xmlCheck = regexp.MustCompile("(?i:(?:application|text)/xml)") +) + +// APIClient manages communication with the oc-catalog API API v1.0.0 +// In most cases there should be only one, shared, APIClient. +type APIClient struct { + cfg *Configuration + common service // Reuse a single struct instead of allocating one for each service on the heap. + + // API Services + + ComputingApi *ComputingApiService + + DataApi *DataApiService + + DatacenterApi *DatacenterApiService + + ScheduleApi *ScheduleApiService + + SearchApi *SearchApiService + + StorageApi *StorageApiService + + UserApi *UserApiService + + WorkflowApi *WorkflowApiService + + WorkspaceApi *WorkspaceApiService +} + +type service struct { + client *APIClient +} + +// NewAPIClient creates a new API client. Requires a userAgent string describing your application. +// optionally a custom http.Client to allow for advanced features such as caching. +func NewAPIClient(cfg *Configuration) *APIClient { + if cfg.HTTPClient == nil { + cfg.HTTPClient = http.DefaultClient + } + + c := &APIClient{} + c.cfg = cfg + c.common.client = c + + // API Services + c.ComputingApi = (*ComputingApiService)(&c.common) + c.DataApi = (*DataApiService)(&c.common) + c.DatacenterApi = (*DatacenterApiService)(&c.common) + c.ScheduleApi = (*ScheduleApiService)(&c.common) + c.SearchApi = (*SearchApiService)(&c.common) + c.StorageApi = (*StorageApiService)(&c.common) + c.UserApi = (*UserApiService)(&c.common) + c.WorkflowApi = (*WorkflowApiService)(&c.common) + c.WorkspaceApi = (*WorkspaceApiService)(&c.common) + + return c +} + +func atoi(in string) (int, error) { + return strconv.Atoi(in) +} + +// selectHeaderContentType select a content type from the available list. +func selectHeaderContentType(contentTypes []string) string { + if len(contentTypes) == 0 { + return "" + } + if contains(contentTypes, "application/json") { + return "application/json" + } + return contentTypes[0] // use the first content type specified in 'consumes' +} + +// selectHeaderAccept join all accept types and return +func selectHeaderAccept(accepts []string) string { + if len(accepts) == 0 { + return "" + } + + if contains(accepts, "application/json") { + return "application/json" + } + + return strings.Join(accepts, ",") +} + +// contains is a case insenstive match, finding needle in a haystack +func contains(haystack []string, needle string) bool { + for _, a := range haystack { + if strings.ToLower(a) == strings.ToLower(needle) { + return true + } + } + return false +} + +// Verify optional parameters are of the correct type. +func typeCheckParameter(obj interface{}, expected string, name string) error { + // Make sure there is an object. + if obj == nil { + return nil + } + + // Check the type is as expected. + if reflect.TypeOf(obj).String() != expected { + return fmt.Errorf("Expected %s to be of type %s but received %s.", name, expected, reflect.TypeOf(obj).String()) + } + return nil +} + +// parameterToString convert interface{} parameters to string, using a delimiter if format is provided. +func parameterToString(obj interface{}, collectionFormat string) string { + var delimiter string + + switch collectionFormat { + case "pipes": + delimiter = "|" + case "ssv": + delimiter = " " + case "tsv": + delimiter = "\t" + case "csv": + delimiter = "," + } + + if reflect.TypeOf(obj).Kind() == reflect.Slice { + return strings.Trim(strings.Replace(fmt.Sprint(obj), " ", delimiter, -1), "[]") + } + + return fmt.Sprintf("%v", obj) +} + +// callAPI do the request. +func (c *APIClient) callAPI(request *http.Request) (*http.Response, error) { + return c.cfg.HTTPClient.Do(request) +} + +// Change base path to allow switching to mocks +func (c *APIClient) ChangeBasePath(path string) { + c.cfg.BasePath = path +} + +// prepareRequest build the request +func (c *APIClient) prepareRequest( + ctx context.Context, + path string, method string, + postBody interface{}, + headerParams map[string]string, + queryParams url.Values, + formParams url.Values, + fileName string, + fileBytes []byte) (localVarRequest *http.Request, err error) { + + var body *bytes.Buffer + + // Detect postBody type and post. + if postBody != nil { + contentType := headerParams["Content-Type"] + if contentType == "" { + contentType = detectContentType(postBody) + headerParams["Content-Type"] = contentType + } + + body, err = setBody(postBody, contentType) + if err != nil { + return nil, err + } + } + + // add form parameters and file if available. + if strings.HasPrefix(headerParams["Content-Type"], "multipart/form-data") && len(formParams) > 0 || (len(fileBytes) > 0 && fileName != "") { + if body != nil { + return nil, errors.New("Cannot specify postBody and multipart form at the same time.") + } + body = &bytes.Buffer{} + w := multipart.NewWriter(body) + + for k, v := range formParams { + for _, iv := range v { + if strings.HasPrefix(k, "@") { // file + err = addFile(w, k[1:], iv) + if err != nil { + return nil, err + } + } else { // form value + w.WriteField(k, iv) + } + } + } + if len(fileBytes) > 0 && fileName != "" { + w.Boundary() + //_, fileNm := filepath.Split(fileName) + part, err := w.CreateFormFile("file", filepath.Base(fileName)) + if err != nil { + return nil, err + } + _, err = part.Write(fileBytes) + if err != nil { + return nil, err + } + // Set the Boundary in the Content-Type + headerParams["Content-Type"] = w.FormDataContentType() + } + + // Set Content-Length + headerParams["Content-Length"] = fmt.Sprintf("%d", body.Len()) + w.Close() + } + + if strings.HasPrefix(headerParams["Content-Type"], "application/x-www-form-urlencoded") && len(formParams) > 0 { + if body != nil { + return nil, errors.New("Cannot specify postBody and x-www-form-urlencoded form at the same time.") + } + body = &bytes.Buffer{} + body.WriteString(formParams.Encode()) + // Set Content-Length + headerParams["Content-Length"] = fmt.Sprintf("%d", body.Len()) + } + + // Setup path and query parameters + url, err := url.Parse(path) + if err != nil { + return nil, err + } + + // Adding Query Param + query := url.Query() + for k, v := range queryParams { + for _, iv := range v { + query.Add(k, iv) + } + } + + // Encode the parameters. + url.RawQuery = query.Encode() + + // Generate a new request + if body != nil { + localVarRequest, err = http.NewRequest(method, url.String(), body) + } else { + localVarRequest, err = http.NewRequest(method, url.String(), nil) + } + if err != nil { + return nil, err + } + + // add header parameters, if any + if len(headerParams) > 0 { + headers := http.Header{} + for h, v := range headerParams { + headers.Set(h, v) + } + localVarRequest.Header = headers + } + + // Override request host, if applicable + if c.cfg.Host != "" { + localVarRequest.Host = c.cfg.Host + } + + // Add the user agent to the request. + localVarRequest.Header.Add("User-Agent", c.cfg.UserAgent) + + if ctx != nil { + // add context to the request + localVarRequest = localVarRequest.WithContext(ctx) + + // Walk through any authentication. + + // OAuth2 authentication + if tok, ok := ctx.Value(ContextOAuth2).(oauth2.TokenSource); ok { + // We were able to grab an oauth2 token from the context + var latestToken *oauth2.Token + if latestToken, err = tok.Token(); err != nil { + return nil, err + } + + latestToken.SetAuthHeader(localVarRequest) + } + + // Basic HTTP Authentication + if auth, ok := ctx.Value(ContextBasicAuth).(BasicAuth); ok { + localVarRequest.SetBasicAuth(auth.UserName, auth.Password) + } + + // AccessToken Authentication + if auth, ok := ctx.Value(ContextAccessToken).(string); ok { + localVarRequest.Header.Add("Authorization", "Bearer "+auth) + } + } + + for header, value := range c.cfg.DefaultHeader { + localVarRequest.Header.Add(header, value) + } + + return localVarRequest, nil +} + +func (c *APIClient) decode(v interface{}, b []byte, contentType string) (err error) { + if strings.Contains(contentType, "application/xml") { + if err = xml.Unmarshal(b, v); err != nil { + return err + } + return nil + } else if strings.Contains(contentType, "application/json") { + if err = json.Unmarshal(b, v); err != nil { + return err + } + return nil + } + return errors.New("undefined response type") +} + +// Add a file to the multipart request +func addFile(w *multipart.Writer, fieldName, path string) error { + file, err := os.Open(path) + if err != nil { + return err + } + defer file.Close() + + part, err := w.CreateFormFile(fieldName, filepath.Base(path)) + if err != nil { + return err + } + _, err = io.Copy(part, file) + + return err +} + +// Prevent trying to import "fmt" +func reportError(format string, a ...interface{}) error { + return fmt.Errorf(format, a...) +} + +// Set request body from an interface{} +func setBody(body interface{}, contentType string) (bodyBuf *bytes.Buffer, err error) { + if bodyBuf == nil { + bodyBuf = &bytes.Buffer{} + } + + if reader, ok := body.(io.Reader); ok { + _, err = bodyBuf.ReadFrom(reader) + } else if b, ok := body.([]byte); ok { + _, err = bodyBuf.Write(b) + } else if s, ok := body.(string); ok { + _, err = bodyBuf.WriteString(s) + } else if s, ok := body.(*string); ok { + _, err = bodyBuf.WriteString(*s) + } else if jsonCheck.MatchString(contentType) { + err = json.NewEncoder(bodyBuf).Encode(body) + } else if xmlCheck.MatchString(contentType) { + xml.NewEncoder(bodyBuf).Encode(body) + } + + if err != nil { + return nil, err + } + + if bodyBuf.Len() == 0 { + err = fmt.Errorf("Invalid body type %s\n", contentType) + return nil, err + } + return bodyBuf, nil +} + +// detectContentType method is used to figure out `Request.Body` content type for request header +func detectContentType(body interface{}) string { + contentType := "text/plain; charset=utf-8" + kind := reflect.TypeOf(body).Kind() + + switch kind { + case reflect.Struct, reflect.Map, reflect.Ptr: + contentType = "application/json; charset=utf-8" + case reflect.String: + contentType = "text/plain; charset=utf-8" + default: + if b, ok := body.([]byte); ok { + contentType = http.DetectContentType(b) + } else if kind == reflect.Slice { + contentType = "application/json; charset=utf-8" + } + } + + return contentType +} + +// Ripped from https://github.com/gregjones/httpcache/blob/master/httpcache.go +type cacheControl map[string]string + +func parseCacheControl(headers http.Header) cacheControl { + cc := cacheControl{} + ccHeader := headers.Get("Cache-Control") + for _, part := range strings.Split(ccHeader, ",") { + part = strings.Trim(part, " ") + if part == "" { + continue + } + if strings.ContainsRune(part, '=') { + keyval := strings.Split(part, "=") + cc[strings.Trim(keyval[0], " ")] = strings.Trim(keyval[1], ",") + } else { + cc[part] = "" + } + } + return cc +} + +// CacheExpires helper function to determine remaining time before repeating a request. +func CacheExpires(r *http.Response) time.Time { + // Figure out when the cache expires. + var expires time.Time + now, err := time.Parse(time.RFC1123, r.Header.Get("date")) + if err != nil { + return time.Now() + } + respCacheControl := parseCacheControl(r.Header) + + if maxAge, ok := respCacheControl["max-age"]; ok { + lifetime, err := time.ParseDuration(maxAge + "s") + if err != nil { + expires = now + } + expires = now.Add(lifetime) + } else { + expiresHeader := r.Header.Get("Expires") + if expiresHeader != "" { + expires, err = time.Parse(time.RFC1123, expiresHeader) + if err != nil { + expires = now + } + } + } + return expires +} + +func strlen(s string) int { + return utf8.RuneCountInString(s) +} + +// GenericSwaggerError Provides access to the body, error and model on returned errors. +type GenericSwaggerError struct { + body []byte + error string + model interface{} +} + +// Error returns non-empty string if there was an error. +func (e GenericSwaggerError) Error() string { + return e.error +} + +// Body returns the raw bytes of the response +func (e GenericSwaggerError) Body() []byte { + return e.body +} + +// Model returns the unpacked model of the error +func (e GenericSwaggerError) Model() interface{} { + return e.model +} diff --git a/selfapi/configuration.go b/selfapi/configuration.go new file mode 100644 index 0000000..418994a --- /dev/null +++ b/selfapi/configuration.go @@ -0,0 +1,73 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +import ( + "net/http" +) + +// contextKeys are used to identify the type of value in the context. +// Since these are string, it is possible to get a short description of the +// context key for logging and debugging using key.String(). + +type contextKey string + +func (c contextKey) String() string { + return "auth " + string(c) +} + +var ( + // ContextOAuth2 takes a oauth2.TokenSource as authentication for the request. + ContextOAuth2 = contextKey("token") + + // ContextBasicAuth takes BasicAuth as authentication for the request. + ContextBasicAuth = contextKey("basic") + + // ContextAccessToken takes a string oauth2 access token as authentication for the request. + ContextAccessToken = contextKey("accesstoken") + + // ContextAPIKey takes an APIKey as authentication for the request + ContextAPIKey = contextKey("apikey") +) + +// BasicAuth provides basic http authentication to a request passed via context using ContextBasicAuth +type BasicAuth struct { + UserName string `json:"userName,omitempty"` + Password string `json:"password,omitempty"` +} + +// APIKey provides API key based authentication to a request passed via context using ContextAPIKey +type APIKey struct { + Key string + Prefix string +} + +type Configuration struct { + BasePath string `json:"basePath,omitempty"` + Host string `json:"host,omitempty"` + Scheme string `json:"scheme,omitempty"` + DefaultHeader map[string]string `json:"defaultHeader,omitempty"` + UserAgent string `json:"userAgent,omitempty"` + HTTPClient *http.Client +} + +func NewConfiguration() *Configuration { + cfg := &Configuration{ + BasePath: "https://localhost:49618/v1", + DefaultHeader: make(map[string]string), + UserAgent: "Swagger-Codegen/1.0.0/go", + } + return cfg +} + +func (c *Configuration) AddDefaultHeader(key string, value string) { + c.DefaultHeader[key] = value +} diff --git a/selfapi/docs/ComputingApi.md b/selfapi/docs/ComputingApi.md new file mode 100644 index 0000000..a5f26e2 --- /dev/null +++ b/selfapi/docs/ComputingApi.md @@ -0,0 +1,95 @@ +# \ComputingApi + +All URIs are relative to *https://localhost:49618/v1* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**ComputingControllerAddComputing**](ComputingApi.md#ComputingControllerAddComputing) | **Post** /computing/ | +[**ComputingControllerGetComputingByID**](ComputingApi.md#ComputingControllerGetComputingByID) | **Get** /computing/{ID} | +[**ComputingControllerGetMultipleComputingByIDs**](ComputingApi.md#ComputingControllerGetMultipleComputingByIDs) | **Get** /computing/multi/{IDs} | + + +# **ComputingControllerAddComputing** +> ComputingControllerAddComputing(ctx, body) + + +Submit a computing object + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **body** | [**ModelsComputingNewModel**](ModelsComputingNewModel.md)| The object content | + +### Return type + + (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **ComputingControllerGetComputingByID** +> ModelsComputingModel ComputingControllerGetComputingByID(ctx, iD) + + +Find a computing resource based on ID + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **iD** | **string**| The ID of the resource | + +### Return type + +[**ModelsComputingModel**](models.ComputingModel.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **ComputingControllerGetMultipleComputingByIDs** +> []ModelsComputingModel ComputingControllerGetMultipleComputingByIDs(ctx, iDs) + + +Return Computing objects if found in the DB. Not found IDs will be ignored + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **iDs** | [**[]string**](string.md)| List of computing IDs | + +### Return type + +[**[]ModelsComputingModel**](models.ComputingModel.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/selfapi/docs/DataApi.md b/selfapi/docs/DataApi.md new file mode 100644 index 0000000..fa31aa8 --- /dev/null +++ b/selfapi/docs/DataApi.md @@ -0,0 +1,95 @@ +# \DataApi + +All URIs are relative to *https://localhost:49618/v1* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**DataControllerCreateData**](DataApi.md#DataControllerCreateData) | **Post** /data/ | +[**DataControllerGetDataByID**](DataApi.md#DataControllerGetDataByID) | **Get** /data/{ID} | +[**DataControllerGetMultipleDataByIDs**](DataApi.md#DataControllerGetMultipleDataByIDs) | **Get** /data/multi/{IDs} | + + +# **DataControllerCreateData** +> DataControllerCreateData(ctx, body) + + +Submit data object + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **body** | [**ModelsDataNewModel**](ModelsDataNewModel.md)| The object content | + +### Return type + + (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **DataControllerGetDataByID** +> ModelsDataModel DataControllerGetDataByID(ctx, iD) + + +Find rType data based on ID + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **iD** | **string**| The ID of the data resource | + +### Return type + +[**ModelsDataModel**](models.DataModel.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **DataControllerGetMultipleDataByIDs** +> []ModelsDataModel DataControllerGetMultipleDataByIDs(ctx, iDs) + + +Return Data object if found in the DB. Not found IDs will be ignored + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **iDs** | [**[]string**](string.md)| List of data IDs | + +### Return type + +[**[]ModelsDataModel**](models.DataModel.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/selfapi/docs/DatacenterApi.md b/selfapi/docs/DatacenterApi.md new file mode 100644 index 0000000..dc1b10e --- /dev/null +++ b/selfapi/docs/DatacenterApi.md @@ -0,0 +1,95 @@ +# \DatacenterApi + +All URIs are relative to *https://localhost:49618/v1* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**DatacenterControllerCreateDatacenter**](DatacenterApi.md#DatacenterControllerCreateDatacenter) | **Post** /datacenter/ | +[**DatacenterControllerGetMultipleDatacentersByIDs**](DatacenterApi.md#DatacenterControllerGetMultipleDatacentersByIDs) | **Get** /datacenter/multi/{IDs} | +[**DatacenterControllerGetOneDatacenter**](DatacenterApi.md#DatacenterControllerGetOneDatacenter) | **Get** /datacenter/{ID} | + + +# **DatacenterControllerCreateDatacenter** +> DatacenterControllerCreateDatacenter(ctx, body) + + +submit Datacenter object + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **body** | [**ModelsDatacenterNewModel**](ModelsDatacenterNewModel.md)| The object content | + +### Return type + + (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **DatacenterControllerGetMultipleDatacentersByIDs** +> []ModelsComputingModel DatacenterControllerGetMultipleDatacentersByIDs(ctx, iDs) + + +Return Datacenter objects if found in the DB. Not found IDs will be ignored + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **iDs** | [**[]string**](string.md)| List of datacenter IDs | + +### Return type + +[**[]ModelsComputingModel**](models.ComputingModel.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **DatacenterControllerGetOneDatacenter** +> ModelsDatacenterModel DatacenterControllerGetOneDatacenter(ctx, iD) + + +find datacenter by ID + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **iD** | **string**| the ID you want to get | + +### Return type + +[**ModelsDatacenterModel**](models.DatacenterModel.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/selfapi/docs/ModelsComputingModel.md b/selfapi/docs/ModelsComputingModel.md new file mode 100644 index 0000000..030935d --- /dev/null +++ b/selfapi/docs/ModelsComputingModel.md @@ -0,0 +1,20 @@ +# ModelsComputingModel + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**ID** | **string** | | [default to null] +**Description** | **string** | | [optional] [default to null] +**ExecutionRequirements** | [***ModelsExecutionRequirementsModel**](models.ExecutionRequirementsModel.md) | | [optional] [default to null] +**License** | **string** | | [optional] [default to null] +**Logo** | **string** | | [optional] [default to null] +**Name** | **string** | Name of the computing | [optional] [default to null] +**Owner** | **string** | | [optional] [default to null] +**Price** | **int32** | | [optional] [default to null] +**Repository** | [***ModelsRepositoryModel**](models.RepositoryModel.md) | | [optional] [default to null] +**ShortDescription** | **string** | | [optional] [default to null] +**Type_** | **string** | | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsComputingNewModel.md b/selfapi/docs/ModelsComputingNewModel.md new file mode 100644 index 0000000..7a0f539 --- /dev/null +++ b/selfapi/docs/ModelsComputingNewModel.md @@ -0,0 +1,19 @@ +# ModelsComputingNewModel + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Description** | **string** | | [default to null] +**ExecutionRequirements** | [***ModelsExecutionRequirementsModel**](models.ExecutionRequirementsModel.md) | | [optional] [default to null] +**License** | **string** | | [optional] [default to null] +**Logo** | **string** | | [default to null] +**Name** | **string** | Name of the computing | [default to null] +**Owner** | **string** | | [optional] [default to null] +**Price** | **int32** | | [optional] [default to null] +**Repository** | [***ModelsRepositoryModel**](models.RepositoryModel.md) | | [optional] [default to null] +**ShortDescription** | **string** | | [default to null] +**Type_** | **string** | | [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsComputingObject.md b/selfapi/docs/ModelsComputingObject.md new file mode 100644 index 0000000..6b90469 --- /dev/null +++ b/selfapi/docs/ModelsComputingObject.md @@ -0,0 +1,13 @@ +# ModelsComputingObject + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**DatacenterID** | **string** | Datacenter where the computing will be executed | [optional] [default to null] +**Inputs** | **[]string** | | [optional] [default to null] +**Outputs** | **[]string** | | [optional] [default to null] +**ReferenceID** | [***PrimitiveObjectId**](primitive.ObjectID.md) | Computing model ID | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsDCstatus.md b/selfapi/docs/ModelsDCstatus.md new file mode 100644 index 0000000..fffee88 --- /dev/null +++ b/selfapi/docs/ModelsDCstatus.md @@ -0,0 +1,15 @@ +# ModelsDCstatus + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Booked** | [***ModelsScheduleInfo**](models.ScheduleInfo.md) | | [optional] [default to null] +**DCname** | **string** | | [optional] [default to null] +**DCobjID** | **string** | | [optional] [default to null] +**ErrorMessage** | **string** | | [optional] [default to null] +**IsAvailable** | **bool** | | [optional] [default to null] +**IsReachable** | **bool** | | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsDataModel.md b/selfapi/docs/ModelsDataModel.md new file mode 100644 index 0000000..388179d --- /dev/null +++ b/selfapi/docs/ModelsDataModel.md @@ -0,0 +1,19 @@ +# ModelsDataModel + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**ID** | **string** | | [default to null] +**Description** | **string** | | [optional] [default to null] +**Example** | **string** | base64 encoded data | [optional] [default to null] +**Ftype** | **string** | | [optional] [default to null] +**Location** | **string** | | [optional] [default to null] +**Logo** | **string** | | [optional] [default to null] +**Name** | **string** | Name of the data | [optional] [default to null] +**Protocol** | **[]string** | | [optional] [default to null] +**ShortDescription** | **string** | | [optional] [default to null] +**Type_** | **string** | Define type of data | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsDataNewModel.md b/selfapi/docs/ModelsDataNewModel.md new file mode 100644 index 0000000..082835d --- /dev/null +++ b/selfapi/docs/ModelsDataNewModel.md @@ -0,0 +1,18 @@ +# ModelsDataNewModel + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Description** | **string** | | [default to null] +**Example** | **string** | base64 encoded data | [default to null] +**Ftype** | **string** | | [optional] [default to null] +**Location** | **string** | | [default to null] +**Logo** | **string** | | [default to null] +**Name** | **string** | Name of the data | [default to null] +**Protocol** | **[]string** | | [optional] [default to null] +**ShortDescription** | **string** | | [default to null] +**Type_** | **string** | Define type of data | [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsDataObject.md b/selfapi/docs/ModelsDataObject.md new file mode 100644 index 0000000..895e7c9 --- /dev/null +++ b/selfapi/docs/ModelsDataObject.md @@ -0,0 +1,10 @@ +# ModelsDataObject + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**ReferenceID** | [***PrimitiveObjectId**](primitive.ObjectID.md) | Data model ID | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsDatacenterCpuModel.md b/selfapi/docs/ModelsDatacenterCpuModel.md new file mode 100644 index 0000000..94b967c --- /dev/null +++ b/selfapi/docs/ModelsDatacenterCpuModel.md @@ -0,0 +1,14 @@ +# ModelsDatacenterCpuModel + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Architecture** | **string** | | [optional] [default to null] +**Cores** | **int32** | | [default to null] +**MinimumMemory** | **int32** | | [optional] [default to null] +**Platform** | **string** | | [optional] [default to null] +**Shared** | **bool** | | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsDatacenterGpuModel.md b/selfapi/docs/ModelsDatacenterGpuModel.md new file mode 100644 index 0000000..e21255e --- /dev/null +++ b/selfapi/docs/ModelsDatacenterGpuModel.md @@ -0,0 +1,13 @@ +# ModelsDatacenterGpuModel + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**CudaCores** | **int32** | | [optional] [default to null] +**Memory** | **int32** | Units in MB | [optional] [default to null] +**Model** | **string** | | [optional] [default to null] +**TensorCores** | **int32** | | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsDatacenterMemoryModel.md b/selfapi/docs/ModelsDatacenterMemoryModel.md new file mode 100644 index 0000000..8332114 --- /dev/null +++ b/selfapi/docs/ModelsDatacenterMemoryModel.md @@ -0,0 +1,11 @@ +# ModelsDatacenterMemoryModel + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Ecc** | **bool** | | [optional] [default to null] +**Size** | **int32** | Units in MB | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsDatacenterModel.md b/selfapi/docs/ModelsDatacenterModel.md new file mode 100644 index 0000000..0e7e65b --- /dev/null +++ b/selfapi/docs/ModelsDatacenterModel.md @@ -0,0 +1,22 @@ +# ModelsDatacenterModel + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**ID** | **string** | | [default to null] +**Acronym** | **string** | id of the DC | [optional] [default to null] +**BookingPrice** | **int64** | | [optional] [default to null] +**Cpu** | [***ModelsDatacenterCpuModel**](models.DatacenterCpuModel.md) | | [optional] [default to null] +**Description** | **string** | | [optional] [default to null] +**Gpu** | [**[]ModelsDatacenterGpuModel**](models.DatacenterGpuModel.md) | | [optional] [default to null] +**Hosts** | **[]string** | list of host:port | [optional] [default to null] +**Logo** | **string** | | [optional] [default to null] +**Name** | **string** | | [optional] [default to null] +**Owner** | **string** | | [optional] [default to null] +**Ram** | [***ModelsDatacenterMemoryModel**](models.DatacenterMemoryModel.md) | | [optional] [default to null] +**ShortDescription** | **string** | | [optional] [default to null] +**Type_** | **string** | | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsDatacenterNewModel.md b/selfapi/docs/ModelsDatacenterNewModel.md new file mode 100644 index 0000000..1ff92f9 --- /dev/null +++ b/selfapi/docs/ModelsDatacenterNewModel.md @@ -0,0 +1,21 @@ +# ModelsDatacenterNewModel + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Acronym** | **string** | id of the DC | [default to null] +**BookingPrice** | **int64** | | [optional] [default to null] +**Cpu** | [***ModelsDatacenterCpuModel**](models.DatacenterCpuModel.md) | | [default to null] +**Description** | **string** | | [default to null] +**Gpu** | [**[]ModelsDatacenterGpuModel**](models.DatacenterGpuModel.md) | | [default to null] +**Hosts** | **[]string** | list of host:port | [default to null] +**Logo** | **string** | | [default to null] +**Name** | **string** | | [default to null] +**Owner** | **string** | | [optional] [default to null] +**Ram** | [***ModelsDatacenterMemoryModel**](models.DatacenterMemoryModel.md) | | [default to null] +**ShortDescription** | **string** | | [default to null] +**Type_** | **string** | | [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsDatacenterObject.md b/selfapi/docs/ModelsDatacenterObject.md new file mode 100644 index 0000000..ca25aaa --- /dev/null +++ b/selfapi/docs/ModelsDatacenterObject.md @@ -0,0 +1,10 @@ +# ModelsDatacenterObject + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**ReferenceID** | [***PrimitiveObjectId**](primitive.ObjectID.md) | Data model ID | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsExecutionRequirementsModel.md b/selfapi/docs/ModelsExecutionRequirementsModel.md new file mode 100644 index 0000000..c2e2851 --- /dev/null +++ b/selfapi/docs/ModelsExecutionRequirementsModel.md @@ -0,0 +1,15 @@ +# ModelsExecutionRequirementsModel + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Cpus** | **int32** | | [default to null] +**DiskIo** | **string** | | [optional] [default to null] +**Gpus** | **int32** | Amount of GPUs needed | [optional] [default to null] +**Parallel** | **bool** | | [optional] [default to null] +**Ram** | **int32** | Units in MB | [default to null] +**ScalingModel** | **int32** | | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsRepositoryModel.md b/selfapi/docs/ModelsRepositoryModel.md new file mode 100644 index 0000000..046f8f1 --- /dev/null +++ b/selfapi/docs/ModelsRepositoryModel.md @@ -0,0 +1,11 @@ +# ModelsRepositoryModel + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Credentials** | **string** | | [optional] [default to null] +**Url** | **string** | | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsScheduleDb.md b/selfapi/docs/ModelsScheduleDb.md new file mode 100644 index 0000000..1a3cd78 --- /dev/null +++ b/selfapi/docs/ModelsScheduleDb.md @@ -0,0 +1,13 @@ +# ModelsScheduleDb + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**ResourceQty** | [***ModelsExecutionRequirementsModel**](models.ExecutionRequirementsModel.md) | | [optional] [default to null] +**StartDate** | **string** | | [optional] [default to null] +**StopDate** | **string** | | [optional] [default to null] +**Workflow** | **string** | | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsScheduleInfo.md b/selfapi/docs/ModelsScheduleInfo.md new file mode 100644 index 0000000..7df98b1 --- /dev/null +++ b/selfapi/docs/ModelsScheduleInfo.md @@ -0,0 +1,11 @@ +# ModelsScheduleInfo + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**NextExecutions** | **[]string** | | [optional] [default to null] +**Total** | **int64** | | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsScheduleTime.md b/selfapi/docs/ModelsScheduleTime.md new file mode 100644 index 0000000..f6d5731 --- /dev/null +++ b/selfapi/docs/ModelsScheduleTime.md @@ -0,0 +1,9 @@ +# ModelsScheduleTime + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsSearchResult.md b/selfapi/docs/ModelsSearchResult.md new file mode 100644 index 0000000..e064143 --- /dev/null +++ b/selfapi/docs/ModelsSearchResult.md @@ -0,0 +1,13 @@ +# ModelsSearchResult + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Computing** | [**[]ModelsComputingModel**](models.ComputingModel.md) | | [default to null] +**Data** | [**[]ModelsDataModel**](models.DataModel.md) | | [optional] [default to null] +**Datacenter** | [**[]ModelsDatacenterModel**](models.DatacenterModel.md) | | [optional] [default to null] +**Storage** | [**[]ModelsStorageModel**](models.StorageModel.md) | | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsStorageModel.md b/selfapi/docs/ModelsStorageModel.md new file mode 100644 index 0000000..67da13e --- /dev/null +++ b/selfapi/docs/ModelsStorageModel.md @@ -0,0 +1,21 @@ +# ModelsStorageModel + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**DCacronym** | **string** | Unique ID of the DC where it is the storage | [optional] [default to null] +**ID** | **string** | | [default to null] +**BookingPrice** | **int32** | | [optional] [default to null] +**Description** | **string** | | [optional] [default to null] +**Encryption** | **bool** | | [optional] [default to null] +**Logo** | **string** | | [optional] [default to null] +**Name** | **string** | | [optional] [default to null] +**Redundancy** | **string** | | [optional] [default to null] +**ShortDescription** | **string** | | [optional] [default to null] +**Size** | **int32** | | [optional] [default to null] +**Throughput** | **string** | | [optional] [default to null] +**Type_** | **string** | | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsStorageNewModel.md b/selfapi/docs/ModelsStorageNewModel.md new file mode 100644 index 0000000..ee1d552 --- /dev/null +++ b/selfapi/docs/ModelsStorageNewModel.md @@ -0,0 +1,20 @@ +# ModelsStorageNewModel + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**DCacronym** | **string** | Unique ID of the DC where it is the storage | [default to null] +**BookingPrice** | **int32** | | [optional] [default to null] +**Description** | **string** | | [default to null] +**Encryption** | **bool** | | [optional] [default to null] +**Logo** | **string** | | [default to null] +**Name** | **string** | | [default to null] +**Redundancy** | **string** | | [optional] [default to null] +**ShortDescription** | **string** | | [default to null] +**Size** | **int32** | | [default to null] +**Throughput** | **string** | | [optional] [default to null] +**Type_** | **string** | | [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsStorageObject.md b/selfapi/docs/ModelsStorageObject.md new file mode 100644 index 0000000..244eab5 --- /dev/null +++ b/selfapi/docs/ModelsStorageObject.md @@ -0,0 +1,12 @@ +# ModelsStorageObject + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Inputs** | **[]string** | | [optional] [default to null] +**Outputs** | **[]string** | | [optional] [default to null] +**ReferenceID** | [***PrimitiveObjectId**](primitive.ObjectID.md) | Storage model ID | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsWorkflow.md b/selfapi/docs/ModelsWorkflow.md new file mode 100644 index 0000000..f023ba2 --- /dev/null +++ b/selfapi/docs/ModelsWorkflow.md @@ -0,0 +1,15 @@ +# ModelsWorkflow + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**MxgraphXML** | **string** | State of the mxgraph | [optional] [default to null] +**Computing** | [***ModelsComputingObject**](models.ComputingObject.md) | | [optional] [default to null] +**Data** | [***ModelsDataObject**](models.DataObject.md) | | [optional] [default to null] +**Datacenter** | [***ModelsDatacenterObject**](models.DatacenterObject.md) | | [optional] [default to null] +**Schedules** | [***ModelsWorkflowSchedule**](models.WorkflowSchedule.md) | | [optional] [default to null] +**Storage** | [***ModelsStorageObject**](models.StorageObject.md) | | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsWorkflowSchedule.md b/selfapi/docs/ModelsWorkflowSchedule.md new file mode 100644 index 0000000..516c913 --- /dev/null +++ b/selfapi/docs/ModelsWorkflowSchedule.md @@ -0,0 +1,16 @@ +# ModelsWorkflowSchedule + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**IsService** | **bool** | Service: true, Task: false | [optional] [default to null] +**StartDate** | **string** | | [optional] [default to null] +**StopDate** | **string** | | [optional] [default to null] +**Cron** | **string** | | [optional] [default to null] +**Duration** | **int32** | Durantion in seconds | [optional] [default to null] +**Events** | **string** | | [optional] [default to null] +**IsBooked** | **bool** | | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsWorkspace.md b/selfapi/docs/ModelsWorkspace.md new file mode 100644 index 0000000..d7f36b9 --- /dev/null +++ b/selfapi/docs/ModelsWorkspace.md @@ -0,0 +1,15 @@ +# ModelsWorkspace + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Workflows** | [***ModelsWorkflow**](models.Workflow.md) | | [optional] [default to null] +**Computing** | **[]string** | | [optional] [default to null] +**Data** | **[]string** | | [optional] [default to null] +**Datacenter** | **[]string** | | [optional] [default to null] +**Storage** | **[]string** | | [optional] [default to null] +**UserId** | **string** | | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ModelsWorkspaceModel.md b/selfapi/docs/ModelsWorkspaceModel.md new file mode 100644 index 0000000..bf646a1 --- /dev/null +++ b/selfapi/docs/ModelsWorkspaceModel.md @@ -0,0 +1,14 @@ +# ModelsWorkspaceModel + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Computing** | [**[]ModelsComputingModel**](models.ComputingModel.md) | | [optional] [default to null] +**Data** | [**[]ModelsDataModel**](models.DataModel.md) | | [optional] [default to null] +**Datacenter** | [**[]ModelsDatacenterModel**](models.DatacenterModel.md) | | [optional] [default to null] +**Storage** | [**[]ModelsStorageModel**](models.StorageModel.md) | | [optional] [default to null] +**UserId** | **string** | | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/PrimitiveObjectId.md b/selfapi/docs/PrimitiveObjectId.md new file mode 100644 index 0000000..011ef59 --- /dev/null +++ b/selfapi/docs/PrimitiveObjectId.md @@ -0,0 +1,9 @@ +# PrimitiveObjectId + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/ScheduleApi.md b/selfapi/docs/ScheduleApi.md new file mode 100644 index 0000000..73a6e70 --- /dev/null +++ b/selfapi/docs/ScheduleApi.md @@ -0,0 +1,164 @@ +# \ScheduleApi + +All URIs are relative to *https://localhost:49618/v1* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**ScheduleControllerCheckIfScheduleCanBeCreatedInThisDC**](ScheduleApi.md#ScheduleControllerCheckIfScheduleCanBeCreatedInThisDC) | **Post** /schedule/check | +[**ScheduleControllerCreateSchedule**](ScheduleApi.md#ScheduleControllerCreateSchedule) | **Post** /schedule/book | +[**ScheduleControllerGetNextSchedule**](ScheduleApi.md#ScheduleControllerGetNextSchedule) | **Get** /schedule/next | +[**ScheduleControllerGetPreviousSchedule**](ScheduleApi.md#ScheduleControllerGetPreviousSchedule) | **Get** /schedule/previous | +[**ScheduleControllerGetSchedules**](ScheduleApi.md#ScheduleControllerGetSchedules) | **Get** /schedule/ | + + +# **ScheduleControllerCheckIfScheduleCanBeCreatedInThisDC** +> ScheduleControllerCheckIfScheduleCanBeCreatedInThisDC(ctx, cron, duration, startDate, stopDate, requirements) + + +Check for availability of this DC + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **cron** | **string**| Cron syntax | + **duration** | **int32**| Duration in seconds | + **startDate** | [**interface{}**](.md)| RFC3339 time for startDate | + **stopDate** | [**interface{}**](.md)| RFC3339 time for stopDate | + **requirements** | [**ModelsExecutionRequirementsModel**](ModelsExecutionRequirementsModel.md)| The object content | + +### Return type + + (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **ScheduleControllerCreateSchedule** +> ModelsScheduleInfo ScheduleControllerCreateSchedule(ctx, dcName, workflowName, cron, duration, startDate, stopDate, requirements) + + +Create schedule for a workflow. It will return some future executions just as information + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **dcName** | **string**| Name of the node (oc-catalog) from where the workflow comes. | + **workflowName** | **string**| Workflow Name | + **cron** | **string**| Cron syntax with year. If no year is specified, will use the current | + **duration** | **int32**| Duration in seconds | + **startDate** | [**interface{}**](.md)| RFC3339 time for startDate | + **stopDate** | [**interface{}**](.md)| RFC3339 time for stopDate | + **requirements** | [**ModelsExecutionRequirementsModel**](ModelsExecutionRequirementsModel.md)| The object content | + +### Return type + +[**ModelsScheduleInfo**](models.ScheduleInfo.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **ScheduleControllerGetNextSchedule** +> TimeTime ScheduleControllerGetNextSchedule(ctx, baseDate) + + +Give a date, get the next date where there are at least on schedule. If no hours specified, will assume 00:00 + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **baseDate** | [**interface{}**](.md)| Base date | + +### Return type + +[**TimeTime**](*time.Time.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **ScheduleControllerGetPreviousSchedule** +> TimeTime ScheduleControllerGetPreviousSchedule(ctx, baseDate) + + +Give a date, get the previous date where there are at least on schedule. If no hours specified, will assume 00:00 + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **baseDate** | [**interface{}**](.md)| Base date | + +### Return type + +[**TimeTime**](*time.Time.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **ScheduleControllerGetSchedules** +> []ModelsScheduleDb ScheduleControllerGetSchedules(ctx, startDate, stopDate) + + +Get a list of next startDates schedules (inclusive). If timezone is not specified, will assume UTC + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **startDate** | [**interface{}**](.md)| Start date | + **stopDate** | [**interface{}**](.md)| End date | + +### Return type + +[**[]ModelsScheduleDb**](models.ScheduleDB.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/selfapi/docs/SearchApi.md b/selfapi/docs/SearchApi.md new file mode 100644 index 0000000..b21faf9 --- /dev/null +++ b/selfapi/docs/SearchApi.md @@ -0,0 +1,37 @@ +# \SearchApi + +All URIs are relative to *https://localhost:49618/v1* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**SearchControllerSearchByWord**](SearchApi.md#SearchControllerSearchByWord) | **Get** /search/byWord | + + +# **SearchControllerSearchByWord** +> ModelsSearchResult SearchControllerSearchByWord(ctx, word) + + +find resources by word + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **word** | **string**| Word to search across all resources | + +### Return type + +[**ModelsSearchResult**](models.SearchResult.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/selfapi/docs/StorageApi.md b/selfapi/docs/StorageApi.md new file mode 100644 index 0000000..c024202 --- /dev/null +++ b/selfapi/docs/StorageApi.md @@ -0,0 +1,95 @@ +# \StorageApi + +All URIs are relative to *https://localhost:49618/v1* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**StorageControllerCreateStorage**](StorageApi.md#StorageControllerCreateStorage) | **Post** /storage/ | +[**StorageControllerGet**](StorageApi.md#StorageControllerGet) | **Get** /storage/{ID} | +[**StorageControllerGetMultipleStoragesByIDs**](StorageApi.md#StorageControllerGetMultipleStoragesByIDs) | **Get** /storage/multi/{IDs} | + + +# **StorageControllerCreateStorage** +> StorageControllerCreateStorage(ctx, body) + + +submit storage object + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **body** | [**ModelsStorageNewModel**](ModelsStorageNewModel.md)| The object content | + +### Return type + + (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **StorageControllerGet** +> ModelsStorageModel StorageControllerGet(ctx, iD) + + +find storage by ID + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **iD** | **string**| the ID you want to get | + +### Return type + +[**ModelsStorageModel**](models.StorageModel.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **StorageControllerGetMultipleStoragesByIDs** +> []ModelsComputingModel StorageControllerGetMultipleStoragesByIDs(ctx, iDs) + + +Return Storage objects if found in the DB. Not found IDs will be ignored + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **iDs** | [**[]string**](string.md)| List of storage IDs | + +### Return type + +[**[]ModelsComputingModel**](models.ComputingModel.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/selfapi/docs/TimeTime.md b/selfapi/docs/TimeTime.md new file mode 100644 index 0000000..f1b1d48 --- /dev/null +++ b/selfapi/docs/TimeTime.md @@ -0,0 +1,9 @@ +# TimeTime + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/selfapi/docs/UserApi.md b/selfapi/docs/UserApi.md new file mode 100644 index 0000000..cb8e3f7 --- /dev/null +++ b/selfapi/docs/UserApi.md @@ -0,0 +1,63 @@ +# \UserApi + +All URIs are relative to *https://localhost:49618/v1* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**UserControllerLogin**](UserApi.md#UserControllerLogin) | **Get** /user/login | +[**UserControllerLogout**](UserApi.md#UserControllerLogout) | **Get** /user/logout | + + +# **UserControllerLogin** +> UserControllerLogin(ctx, username, password) + + +Logs user into the system + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **username** | **string**| The username for login | + **password** | **string**| The password for login | + +### Return type + + (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **UserControllerLogout** +> UserControllerLogout(ctx, ) + + +Logs out current logged in user session + +### Required Parameters +This endpoint does not need any parameter. + +### Return type + + (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/selfapi/docs/WorkflowApi.md b/selfapi/docs/WorkflowApi.md new file mode 100644 index 0000000..2c29535 --- /dev/null +++ b/selfapi/docs/WorkflowApi.md @@ -0,0 +1,345 @@ +# \WorkflowApi + +All URIs are relative to *https://localhost:49618/v1* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**WorkflowControllerAddNewObjectToAWorkflow**](WorkflowApi.md#WorkflowControllerAddNewObjectToAWorkflow) | **Post** /workflow/{workflowName}/add | +[**WorkflowControllerBookSchedule**](WorkflowApi.md#WorkflowControllerBookSchedule) | **Post** /workflow/{workflowName}/schedule/book | +[**WorkflowControllerCheckSchedule**](WorkflowApi.md#WorkflowControllerCheckSchedule) | **Get** /workflow/{workflowName}/schedule/check | +[**WorkflowControllerCreateANewWorkflow**](WorkflowApi.md#WorkflowControllerCreateANewWorkflow) | **Post** /workflow/ | +[**WorkflowControllerCreateARealtionshipBetweenTwoRobjects**](WorkflowApi.md#WorkflowControllerCreateARealtionshipBetweenTwoRobjects) | **Post** /workflow/{workflowName}/link | +[**WorkflowControllerGetMxGraphLastStatus**](WorkflowApi.md#WorkflowControllerGetMxGraphLastStatus) | **Get** /workflow/{workflowName}/mxGraphParser | +[**WorkflowControllerGetSchedule**](WorkflowApi.md#WorkflowControllerGetSchedule) | **Get** /workflow/{workflowName}/schedule | +[**WorkflowControllerGetWorkflow**](WorkflowApi.md#WorkflowControllerGetWorkflow) | **Get** /workflow/{workflowName} | +[**WorkflowControllerListWorkflows**](WorkflowApi.md#WorkflowControllerListWorkflows) | **Get** /workflow/ | +[**WorkflowControllerParseMxGraph**](WorkflowApi.md#WorkflowControllerParseMxGraph) | **Post** /workflow/{workflowName}/mxGraphParser | +[**WorkflowControllerSetSchedule**](WorkflowApi.md#WorkflowControllerSetSchedule) | **Put** /workflow/{workflowName}/schedule | + + +# **WorkflowControllerAddNewObjectToAWorkflow** +> WorkflowControllerAddNewObjectToAWorkflow(ctx, workflowName, rID) + + +Create a Rtype object from already added resources to the workspace + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **workflowName** | **string**| workflow Name | + **rID** | **string**| rID of already existing item in Workspace | + +### Return type + + (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **WorkflowControllerBookSchedule** +> []ModelsDCstatus WorkflowControllerBookSchedule(ctx, workflowName) + + +Book a schedule in all DCs of the workflow. Must set a desired schedule first! + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **workflowName** | **string**| Workflow Name | + +### Return type + +[**[]ModelsDCstatus**](models.DCstatus.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **WorkflowControllerCheckSchedule** +> []ModelsDCstatus WorkflowControllerCheckSchedule(ctx, workflowName) + + +Check if we can schedule the project in other DCs. Must set a desired schedule first! + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **workflowName** | **string**| Workflow Name | + +### Return type + +[**[]ModelsDCstatus**](models.DCstatus.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **WorkflowControllerCreateANewWorkflow** +> WorkflowControllerCreateANewWorkflow(ctx, workflowName) + + +Create a name for the new workflow + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **workflowName** | **string**| Name of the workflow | + +### Return type + + (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **WorkflowControllerCreateARealtionshipBetweenTwoRobjects** +> WorkflowControllerCreateARealtionshipBetweenTwoRobjects(ctx, workflowName, rObjIDsource, isInput, rObjIDtarger) + + +Create a Rtype object from already added resources to the workspace + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **workflowName** | **string**| Workflow Name | + **rObjIDsource** | **string**| Robject source. Usually Data | + **isInput** | **bool**| If the operation is for input (true) linkage or output (false) | + **rObjIDtarger** | **string**| Robject where will be written the association | + +### Return type + + (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **WorkflowControllerGetMxGraphLastStatus** +> WorkflowControllerGetMxGraphLastStatus(ctx, workflowName) + + +Obtain the last mxgraph XML status from the workflow + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **workflowName** | **string**| Workflow Name | + +### Return type + + (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **WorkflowControllerGetSchedule** +> ModelsScheduleTime WorkflowControllerGetSchedule(ctx, workflowName) + + +Obtain the desired schedule of this workflow + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **workflowName** | **string**| Workflow Name | + +### Return type + +[**ModelsScheduleTime**](models.ScheduleTime.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **WorkflowControllerGetWorkflow** +> ModelsWorkflow WorkflowControllerGetWorkflow(ctx, workflowName) + + +Get a workflow by name + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **workflowName** | **string**| Workflow Name | + +### Return type + +[**ModelsWorkflow**](models.Workflow.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **WorkflowControllerListWorkflows** +> WorkflowControllerListWorkflows(ctx, ) + + +List available workflows + +### Required Parameters +This endpoint does not need any parameter. + +### Return type + + (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **WorkflowControllerParseMxGraph** +> WorkflowControllerParseMxGraph(ctx, workflowName, xmlData) + + +If we use this aproach to transofrm mxgraph representation in our representation, we should not use other API calls for modify the project structure or we'll have inconsistencies. + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **workflowName** | **string**| Workflow Name | + **xmlData** | **string**| Xml representation of the workflow | + +### Return type + + (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **WorkflowControllerSetSchedule** +> ModelsScheduleInfo WorkflowControllerSetSchedule(ctx, workflowName, isService, startDate, stopDate, optional) + + +Set desired schedule by the user. No other effects a part of saving the user input + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **workflowName** | **string**| Workflow Name | + **isService** | **bool**| True: Service, False: Task | + **startDate** | [**interface{}**](.md)| RFC3339 time for startDate | + **stopDate** | [**interface{}**](.md)| RFC3339 time for stopDate | + **optional** | ***WorkflowApiWorkflowControllerSetScheduleOpts** | optional parameters | nil if no parameters + +### Optional Parameters +Optional parameters are passed through a pointer to a WorkflowApiWorkflowControllerSetScheduleOpts struct + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + + + + + **events** | **optional.String**| List of events separated by comma | + **cronString** | **optional.String**| Cron string | + **duration** | **optional.Int32**| Duration in seconds | + +### Return type + +[**ModelsScheduleInfo**](models.ScheduleInfo.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/selfapi/docs/WorkspaceApi.md b/selfapi/docs/WorkspaceApi.md new file mode 100644 index 0000000..1fec5d8 --- /dev/null +++ b/selfapi/docs/WorkspaceApi.md @@ -0,0 +1,118 @@ +# \WorkspaceApi + +All URIs are relative to *https://localhost:49618/v1* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**WorkspaceControllerAddModelToWorkspace**](WorkspaceApi.md#WorkspaceControllerAddModelToWorkspace) | **Post** /workspace/ | +[**WorkspaceControllerDeleteElementFromUserWorkspace**](WorkspaceApi.md#WorkspaceControllerDeleteElementFromUserWorkspace) | **Delete** /workspace/ | +[**WorkspaceControllerGetFullWorkspace**](WorkspaceApi.md#WorkspaceControllerGetFullWorkspace) | **Get** /workspace/list_model | +[**WorkspaceControllerGetWorkspace**](WorkspaceApi.md#WorkspaceControllerGetWorkspace) | **Get** /workspace/list | + + +# **WorkspaceControllerAddModelToWorkspace** +> WorkspaceControllerAddModelToWorkspace(ctx, id, rtype) + + +Insert a resource in the workspace + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **id** | **string**| ID of a resource | + **rtype** | **string**| Type of resource | + +### Return type + + (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **WorkspaceControllerDeleteElementFromUserWorkspace** +> WorkspaceControllerDeleteElementFromUserWorkspace(ctx, id, rtype) + + +Remove a resource from the workspace + +### Required Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. + **id** | **string**| ID of a resource | + **rtype** | **string**| Type of resource | + +### Return type + + (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **WorkspaceControllerGetFullWorkspace** +> ModelsWorkspaceModel WorkspaceControllerGetFullWorkspace(ctx, ) + + +Get full workspace elements based on user_id token + +### Required Parameters +This endpoint does not need any parameter. + +### Return type + +[**ModelsWorkspaceModel**](models.WorkspaceModel.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **WorkspaceControllerGetWorkspace** +> ModelsWorkspace WorkspaceControllerGetWorkspace(ctx, ) + + +Get workspace elements based on user_id token + +### Required Parameters +This endpoint does not need any parameter. + +### Return type + +[**ModelsWorkspace**](models.Workspace.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/selfapi/git_push.sh b/selfapi/git_push.sh new file mode 100644 index 0000000..ae01b18 --- /dev/null +++ b/selfapi/git_push.sh @@ -0,0 +1,52 @@ +#!/bin/sh +# ref: https://help.github.com/articles/adding-an-existing-project-to-github-using-the-command-line/ +# +# Usage example: /bin/sh ./git_push.sh wing328 swagger-petstore-perl "minor update" + +git_user_id=$1 +git_repo_id=$2 +release_note=$3 + +if [ "$git_user_id" = "" ]; then + git_user_id="GIT_USER_ID" + echo "[INFO] No command line input provided. Set \$git_user_id to $git_user_id" +fi + +if [ "$git_repo_id" = "" ]; then + git_repo_id="GIT_REPO_ID" + echo "[INFO] No command line input provided. Set \$git_repo_id to $git_repo_id" +fi + +if [ "$release_note" = "" ]; then + release_note="Minor update" + echo "[INFO] No command line input provided. Set \$release_note to $release_note" +fi + +# Initialize the local directory as a Git repository +git init + +# Adds the files in the local repository and stages them for commit. +git add . + +# Commits the tracked changes and prepares them to be pushed to a remote repository. +git commit -m "$release_note" + +# Sets the new remote +git_remote=`git remote` +if [ "$git_remote" = "" ]; then # git remote not defined + + if [ "$GIT_TOKEN" = "" ]; then + echo "[INFO] \$GIT_TOKEN (environment variable) is not set. Using the git credential in your environment." + git remote add origin https://github.com/${git_user_id}/${git_repo_id}.git + else + git remote add origin https://${git_user_id}:${GIT_TOKEN}@github.com/${git_user_id}/${git_repo_id}.git + fi + +fi + +git pull origin master + +# Pushes (Forces) the changes in the local repository up to the remote repository +echo "Git pushing to https://github.com/${git_user_id}/${git_repo_id}.git" +git push origin master 2>&1 | grep -v 'To https' + diff --git a/selfapi/model_models_computing_model.go b/selfapi/model_models_computing_model.go new file mode 100644 index 0000000..2c75140 --- /dev/null +++ b/selfapi/model_models_computing_model.go @@ -0,0 +1,26 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsComputingModel struct { + ID string `json:"ID"` + Description string `json:"description,omitempty"` + ExecutionRequirements *ModelsExecutionRequirementsModel `json:"execution_requirements,omitempty"` + License string `json:"license,omitempty"` + Logo string `json:"logo,omitempty"` + // Name of the computing + Name string `json:"name,omitempty"` + Owner string `json:"owner,omitempty"` + Price int32 `json:"price,omitempty"` + Repository *ModelsRepositoryModel `json:"repository,omitempty"` + ShortDescription string `json:"short_description,omitempty"` + Type_ string `json:"type,omitempty"` +} diff --git a/selfapi/model_models_computing_new_model.go b/selfapi/model_models_computing_new_model.go new file mode 100644 index 0000000..8841cbd --- /dev/null +++ b/selfapi/model_models_computing_new_model.go @@ -0,0 +1,25 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsComputingNewModel struct { + Description string `json:"description"` + ExecutionRequirements *ModelsExecutionRequirementsModel `json:"execution_requirements,omitempty"` + License string `json:"license,omitempty"` + Logo string `json:"logo"` + // Name of the computing + Name string `json:"name"` + Owner string `json:"owner,omitempty"` + Price int32 `json:"price,omitempty"` + Repository *ModelsRepositoryModel `json:"repository,omitempty"` + ShortDescription string `json:"short_description"` + Type_ string `json:"type"` +} diff --git a/selfapi/model_models_computing_object.go b/selfapi/model_models_computing_object.go new file mode 100644 index 0000000..61e8fca --- /dev/null +++ b/selfapi/model_models_computing_object.go @@ -0,0 +1,20 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsComputingObject struct { + // Datacenter where the computing will be executed + DatacenterID string `json:"datacenterID,omitempty"` + Inputs []string `json:"inputs,omitempty"` + Outputs []string `json:"outputs,omitempty"` + // Computing model ID + ReferenceID *PrimitiveObjectId `json:"referenceID,omitempty"` +} diff --git a/selfapi/model_models_d_cstatus.go b/selfapi/model_models_d_cstatus.go new file mode 100644 index 0000000..58fd69e --- /dev/null +++ b/selfapi/model_models_d_cstatus.go @@ -0,0 +1,20 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsDCstatus struct { + Booked *ModelsScheduleInfo `json:"Booked,omitempty"` + DCname string `json:"DCname,omitempty"` + DCobjID string `json:"DCobjID,omitempty"` + ErrorMessage string `json:"ErrorMessage,omitempty"` + IsAvailable bool `json:"IsAvailable,omitempty"` + IsReachable bool `json:"IsReachable,omitempty"` +} diff --git a/selfapi/model_models_data_model.go b/selfapi/model_models_data_model.go new file mode 100644 index 0000000..1019309 --- /dev/null +++ b/selfapi/model_models_data_model.go @@ -0,0 +1,27 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsDataModel struct { + ID string `json:"ID"` + Description string `json:"description,omitempty"` + // base64 encoded data + Example string `json:"example,omitempty"` + Ftype string `json:"ftype,omitempty"` + Location string `json:"location,omitempty"` + Logo string `json:"logo,omitempty"` + // Name of the data + Name string `json:"name,omitempty"` + Protocol []string `json:"protocol,omitempty"` + ShortDescription string `json:"short_description,omitempty"` + // Define type of data + Type_ string `json:"type,omitempty"` +} diff --git a/selfapi/model_models_data_new_model.go b/selfapi/model_models_data_new_model.go new file mode 100644 index 0000000..ffc520c --- /dev/null +++ b/selfapi/model_models_data_new_model.go @@ -0,0 +1,26 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsDataNewModel struct { + Description string `json:"description"` + // base64 encoded data + Example string `json:"example"` + Ftype string `json:"ftype,omitempty"` + Location string `json:"location"` + Logo string `json:"logo"` + // Name of the data + Name string `json:"name"` + Protocol []string `json:"protocol,omitempty"` + ShortDescription string `json:"short_description"` + // Define type of data + Type_ string `json:"type"` +} diff --git a/selfapi/model_models_data_object.go b/selfapi/model_models_data_object.go new file mode 100644 index 0000000..376dcb0 --- /dev/null +++ b/selfapi/model_models_data_object.go @@ -0,0 +1,16 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsDataObject struct { + // Data model ID + ReferenceID *PrimitiveObjectId `json:"referenceID,omitempty"` +} diff --git a/selfapi/model_models_datacenter_cpu_model.go b/selfapi/model_models_datacenter_cpu_model.go new file mode 100644 index 0000000..c7599ae --- /dev/null +++ b/selfapi/model_models_datacenter_cpu_model.go @@ -0,0 +1,19 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsDatacenterCpuModel struct { + Architecture string `json:"architecture,omitempty"` + Cores int32 `json:"cores"` + MinimumMemory int32 `json:"minimum_memory,omitempty"` + Platform string `json:"platform,omitempty"` + Shared bool `json:"shared,omitempty"` +} diff --git a/selfapi/model_models_datacenter_gpu_model.go b/selfapi/model_models_datacenter_gpu_model.go new file mode 100644 index 0000000..630451d --- /dev/null +++ b/selfapi/model_models_datacenter_gpu_model.go @@ -0,0 +1,19 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsDatacenterGpuModel struct { + CudaCores int32 `json:"cuda_cores,omitempty"` + // Units in MB + Memory int32 `json:"memory,omitempty"` + Model string `json:"model,omitempty"` + TensorCores int32 `json:"tensor_cores,omitempty"` +} diff --git a/selfapi/model_models_datacenter_memory_model.go b/selfapi/model_models_datacenter_memory_model.go new file mode 100644 index 0000000..3ee913b --- /dev/null +++ b/selfapi/model_models_datacenter_memory_model.go @@ -0,0 +1,17 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsDatacenterMemoryModel struct { + Ecc bool `json:"ecc,omitempty"` + // Units in MB + Size int32 `json:"size,omitempty"` +} diff --git a/selfapi/model_models_datacenter_model.go b/selfapi/model_models_datacenter_model.go new file mode 100644 index 0000000..a761ff8 --- /dev/null +++ b/selfapi/model_models_datacenter_model.go @@ -0,0 +1,29 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsDatacenterModel struct { + ID string `json:"ID"` + // id of the DC + Acronym string `json:"acronym,omitempty"` + BookingPrice int64 `json:"bookingPrice,omitempty"` + Cpu *ModelsDatacenterCpuModel `json:"cpu,omitempty"` + Description string `json:"description,omitempty"` + Gpu []ModelsDatacenterGpuModel `json:"gpu,omitempty"` + // list of host:port + Hosts []string `json:"hosts,omitempty"` + Logo string `json:"logo,omitempty"` + Name string `json:"name,omitempty"` + Owner string `json:"owner,omitempty"` + Ram *ModelsDatacenterMemoryModel `json:"ram,omitempty"` + ShortDescription string `json:"short_description,omitempty"` + Type_ string `json:"type,omitempty"` +} diff --git a/selfapi/model_models_datacenter_new_model.go b/selfapi/model_models_datacenter_new_model.go new file mode 100644 index 0000000..8b3f786 --- /dev/null +++ b/selfapi/model_models_datacenter_new_model.go @@ -0,0 +1,28 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsDatacenterNewModel struct { + // id of the DC + Acronym string `json:"acronym"` + BookingPrice int64 `json:"bookingPrice,omitempty"` + Cpu *ModelsDatacenterCpuModel `json:"cpu"` + Description string `json:"description"` + Gpu []ModelsDatacenterGpuModel `json:"gpu"` + // list of host:port + Hosts []string `json:"hosts"` + Logo string `json:"logo"` + Name string `json:"name"` + Owner string `json:"owner,omitempty"` + Ram *ModelsDatacenterMemoryModel `json:"ram"` + ShortDescription string `json:"short_description"` + Type_ string `json:"type"` +} diff --git a/selfapi/model_models_datacenter_object.go b/selfapi/model_models_datacenter_object.go new file mode 100644 index 0000000..e54a11c --- /dev/null +++ b/selfapi/model_models_datacenter_object.go @@ -0,0 +1,16 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsDatacenterObject struct { + // Data model ID + ReferenceID *PrimitiveObjectId `json:"referenceID,omitempty"` +} diff --git a/selfapi/model_models_execution_requirements_model.go b/selfapi/model_models_execution_requirements_model.go new file mode 100644 index 0000000..8272f80 --- /dev/null +++ b/selfapi/model_models_execution_requirements_model.go @@ -0,0 +1,22 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsExecutionRequirementsModel struct { + Cpus int32 `json:"cpus"` + DiskIo string `json:"disk_io,omitempty"` + // Amount of GPUs needed + Gpus int32 `json:"gpus,omitempty"` + Parallel bool `json:"parallel,omitempty"` + // Units in MB + Ram int32 `json:"ram"` + ScalingModel int32 `json:"scaling_model,omitempty"` +} diff --git a/selfapi/model_models_repository_model.go b/selfapi/model_models_repository_model.go new file mode 100644 index 0000000..d541b06 --- /dev/null +++ b/selfapi/model_models_repository_model.go @@ -0,0 +1,16 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsRepositoryModel struct { + Credentials string `json:"credentials,omitempty"` + Url string `json:"url,omitempty"` +} diff --git a/selfapi/model_models_schedule_db.go b/selfapi/model_models_schedule_db.go new file mode 100644 index 0000000..c5d252c --- /dev/null +++ b/selfapi/model_models_schedule_db.go @@ -0,0 +1,18 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsScheduleDb struct { + ResourceQty *ModelsExecutionRequirementsModel `json:"ResourceQty,omitempty"` + StartDate string `json:"StartDate,omitempty"` + StopDate string `json:"StopDate,omitempty"` + Workflow string `json:"Workflow,omitempty"` +} diff --git a/selfapi/model_models_schedule_info.go b/selfapi/model_models_schedule_info.go new file mode 100644 index 0000000..da3c5bb --- /dev/null +++ b/selfapi/model_models_schedule_info.go @@ -0,0 +1,16 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsScheduleInfo struct { + NextExecutions []string `json:"NextExecutions,omitempty"` + Total int64 `json:"Total,omitempty"` +} diff --git a/selfapi/model_models_schedule_time.go b/selfapi/model_models_schedule_time.go new file mode 100644 index 0000000..49c3bf3 --- /dev/null +++ b/selfapi/model_models_schedule_time.go @@ -0,0 +1,14 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsScheduleTime struct { +} diff --git a/selfapi/model_models_search_result.go b/selfapi/model_models_search_result.go new file mode 100644 index 0000000..91281f4 --- /dev/null +++ b/selfapi/model_models_search_result.go @@ -0,0 +1,18 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsSearchResult struct { + Computing []ModelsComputingModel `json:"computing"` + Data []ModelsDataModel `json:"data,omitempty"` + Datacenter []ModelsDatacenterModel `json:"datacenter,omitempty"` + Storage []ModelsStorageModel `json:"storage,omitempty"` +} diff --git a/selfapi/model_models_storage_model.go b/selfapi/model_models_storage_model.go new file mode 100644 index 0000000..4b85302 --- /dev/null +++ b/selfapi/model_models_storage_model.go @@ -0,0 +1,27 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsStorageModel struct { + // Unique ID of the DC where it is the storage + DCacronym string `json:"DCacronym,omitempty"` + ID string `json:"ID"` + BookingPrice int32 `json:"bookingPrice,omitempty"` + Description string `json:"description,omitempty"` + Encryption bool `json:"encryption,omitempty"` + Logo string `json:"logo,omitempty"` + Name string `json:"name,omitempty"` + Redundancy string `json:"redundancy,omitempty"` + ShortDescription string `json:"short_description,omitempty"` + Size int32 `json:"size,omitempty"` + Throughput string `json:"throughput,omitempty"` + Type_ string `json:"type,omitempty"` +} diff --git a/selfapi/model_models_storage_new_model.go b/selfapi/model_models_storage_new_model.go new file mode 100644 index 0000000..0b97f1f --- /dev/null +++ b/selfapi/model_models_storage_new_model.go @@ -0,0 +1,26 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsStorageNewModel struct { + // Unique ID of the DC where it is the storage + DCacronym string `json:"DCacronym"` + BookingPrice int32 `json:"bookingPrice,omitempty"` + Description string `json:"description"` + Encryption bool `json:"encryption,omitempty"` + Logo string `json:"logo"` + Name string `json:"name"` + Redundancy string `json:"redundancy,omitempty"` + ShortDescription string `json:"short_description"` + Size int32 `json:"size"` + Throughput string `json:"throughput,omitempty"` + Type_ string `json:"type"` +} diff --git a/selfapi/model_models_storage_object.go b/selfapi/model_models_storage_object.go new file mode 100644 index 0000000..114ab49 --- /dev/null +++ b/selfapi/model_models_storage_object.go @@ -0,0 +1,18 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsStorageObject struct { + Inputs []string `json:"inputs,omitempty"` + Outputs []string `json:"outputs,omitempty"` + // Storage model ID + ReferenceID *PrimitiveObjectId `json:"referenceID,omitempty"` +} diff --git a/selfapi/model_models_workflow.go b/selfapi/model_models_workflow.go new file mode 100644 index 0000000..6db7e7a --- /dev/null +++ b/selfapi/model_models_workflow.go @@ -0,0 +1,21 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsWorkflow struct { + // State of the mxgraph + MxgraphXML string `json:"MxgraphXML,omitempty"` + Computing *ModelsComputingObject `json:"computing,omitempty"` + Data *ModelsDataObject `json:"data,omitempty"` + Datacenter *ModelsDatacenterObject `json:"datacenter,omitempty"` + Schedules *ModelsWorkflowSchedule `json:"schedules,omitempty"` + Storage *ModelsStorageObject `json:"storage,omitempty"` +} diff --git a/selfapi/model_models_workflow_schedule.go b/selfapi/model_models_workflow_schedule.go new file mode 100644 index 0000000..6ff9e2c --- /dev/null +++ b/selfapi/model_models_workflow_schedule.go @@ -0,0 +1,23 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsWorkflowSchedule struct { + // Service: true, Task: false + IsService bool `json:"IsService,omitempty"` + StartDate string `json:"StartDate,omitempty"` + StopDate string `json:"StopDate,omitempty"` + Cron string `json:"cron,omitempty"` + // Durantion in seconds + Duration int32 `json:"duration,omitempty"` + Events string `json:"events,omitempty"` + IsBooked bool `json:"isBooked,omitempty"` +} diff --git a/selfapi/model_models_workspace.go b/selfapi/model_models_workspace.go new file mode 100644 index 0000000..4352d82 --- /dev/null +++ b/selfapi/model_models_workspace.go @@ -0,0 +1,20 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsWorkspace struct { + Workflows *ModelsWorkflow `json:"Workflows,omitempty"` + Computing []string `json:"computing,omitempty"` + Data []string `json:"data,omitempty"` + Datacenter []string `json:"datacenter,omitempty"` + Storage []string `json:"storage,omitempty"` + UserId string `json:"user_id,omitempty"` +} diff --git a/selfapi/model_models_workspace_model.go b/selfapi/model_models_workspace_model.go new file mode 100644 index 0000000..89a3388 --- /dev/null +++ b/selfapi/model_models_workspace_model.go @@ -0,0 +1,19 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type ModelsWorkspaceModel struct { + Computing []ModelsComputingModel `json:"computing,omitempty"` + Data []ModelsDataModel `json:"data,omitempty"` + Datacenter []ModelsDatacenterModel `json:"datacenter,omitempty"` + Storage []ModelsStorageModel `json:"storage,omitempty"` + UserId string `json:"user_id,omitempty"` +} diff --git a/selfapi/model_primitive_object_id.go b/selfapi/model_primitive_object_id.go new file mode 100644 index 0000000..15403bf --- /dev/null +++ b/selfapi/model_primitive_object_id.go @@ -0,0 +1,14 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type PrimitiveObjectId struct { +} diff --git a/selfapi/model_time_time.go b/selfapi/model_time_time.go new file mode 100644 index 0000000..76d52bb --- /dev/null +++ b/selfapi/model_time_time.go @@ -0,0 +1,14 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +type TimeTime struct { +} diff --git a/selfapi/response.go b/selfapi/response.go new file mode 100644 index 0000000..5f5f83d --- /dev/null +++ b/selfapi/response.go @@ -0,0 +1,44 @@ +/* + * oc-catalog API + * + * Backend of the oc-search project + * + * API version: 1.0.0 + * Contact: opencloud@irt-saintexupery.com + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package swagger + +import ( + "net/http" +) + +type APIResponse struct { + *http.Response `json:"-"` + Message string `json:"message,omitempty"` + // Operation is the name of the swagger operation. + Operation string `json:"operation,omitempty"` + // RequestURL is the request URL. This value is always available, even if the + // embedded *http.Response is nil. + RequestURL string `json:"url,omitempty"` + // Method is the HTTP method used for the request. This value is always + // available, even if the embedded *http.Response is nil. + Method string `json:"method,omitempty"` + // Payload holds the contents of the response body (which may be nil or empty). + // This is provided here as the raw response.Body() reader will have already + // been drained. + Payload []byte `json:"-"` +} + +func NewAPIResponse(r *http.Response) *APIResponse { + + response := &APIResponse{Response: r} + return response +} + +func NewAPIResponseWithError(errorMessage string) *APIResponse { + + response := &APIResponse{Message: errorMessage} + return response +} diff --git a/services/discovery.go b/services/discovery.go new file mode 100644 index 0000000..f533184 --- /dev/null +++ b/services/discovery.go @@ -0,0 +1,35 @@ +package services + +import ( + "os" + + SelfAPI "cloud.o-forge.io/core/oc-catalog/selfapi" + "github.com/beego/beego/v2/core/logs" + beego "github.com/beego/beego/v2/server/web" +) + +var DC_NAME string + +func GetSelfAPI(host string) *SelfAPI.APIClient { + return SelfAPI.NewAPIClient(&SelfAPI.Configuration{BasePath: "http://" + host + "/v1"}) +} + +func Discoveryinit() { + + dcNameOS := os.Getenv("DOCKER_DCNAME") + if len(dcNameOS) != 0 { + DC_NAME = dcNameOS + return + } + + //FIXME: Beego doesn't retrieve the dcname + beegoDC, err := beego.AppConfig.String("DCNAME") + if err == nil && len(beegoDC) != 0 { + DC_NAME = beegoDC + return + } + + DC_NAME = "DC_DEFAULT" + logs.Warning("Default DC name is used") + +} diff --git a/services/init.go b/services/init.go new file mode 100644 index 0000000..f82ffbf --- /dev/null +++ b/services/init.go @@ -0,0 +1,28 @@ +package services + +import ( + "os" + + "github.com/beego/beego/v2/core/logs" + beego "github.com/beego/beego/v2/server/web" +) + +func Init() { + Discoveryinit() //First init DC name + + var DBpoint string + var err error + + DBpoint = os.Getenv("DOCKER_DBPOINT") + if len(DBpoint) == 0 { + DBpoint, err = beego.AppConfig.String("DBPOINT") + if err != nil { + logs.Critical("DBPOINT URI error: %v", err) + panic(err) + } + + } + + Mongoinit(DC_NAME + "-" + DBpoint) + +} diff --git a/services/mongo.go b/services/mongo.go new file mode 100644 index 0000000..d16f92e --- /dev/null +++ b/services/mongo.go @@ -0,0 +1,157 @@ +package services + +import ( + "context" + "os" + "time" + + "github.com/beego/beego/v2/core/logs" + beego "github.com/beego/beego/v2/server/web" + + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" + "go.mongodb.org/mongo-driver/x/bsonx" +) + +type MongoCollectionNames struct { + DATA string + COMPUTING string + STORAGE string + DATACENTER string + WORKSPACE string + SCHEDULE string +} + +var ( + mngoClient *mongo.Client + mngoDB *mongo.Database + MngoCtx context.Context + + MngoNamesCollection = MongoCollectionNames{ + DATA: "datas", + COMPUTING: "computings", + STORAGE: "storages", + DATACENTER: "datacenters", + WORKSPACE: "WORKSPACE", + SCHEDULE: "SCHEDULE", + } + + MngoCollData *mongo.Collection + MngoCollComputing *mongo.Collection + MngoCollStorage *mongo.Collection + MngoCollDatacenter *mongo.Collection + MngoCollWorkspace *mongo.Collection + MngoCollSchedule *mongo.Collection +) + +// func GetMongoDBclient() *mongo.Client { +// return mongoDBclient +// } + +func MongoDisconnect() { + if err := mngoClient.Disconnect(MngoCtx); err != nil { + panic(err) + } +} + +func Mongoinit(DBname string) { + + var baseConfig string + + if len(os.Getenv("DOCKER_ENVIRONMENT")) == 0 { + baseConfig = "mongodb" + } else { + baseConfig = "mongodb_docker" + } + + mongoURI, err := beego.AppConfig.String(baseConfig + "::url") + if err != nil { + logs.Critical("MongoDB URI error: %v", err) + panic(err) + } + + logs.Info("Connecting to %v", mongoURI) + + clientOptions := options.Client().ApplyURI(mongoURI) + // mngoClient, err = mongo.NewClient(options.Client().ApplyURI(mongoURI)) + // if err = mngoClient.Connect(MngoCtx); err != nil { + // logs.Critical("Mongodb NewClient %v: %v", mongoURI, err) + // panic(err) + // } + MngoCtx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + + defer cancel() + + // Ping the primary + if mngoClient, err = mongo.Connect(MngoCtx, clientOptions); err != nil { + logs.Critical("Mongodb Connect %v: %v", mongoURI, err) + panic(err) + } + + if err = mngoClient.Ping(MngoCtx, nil); err != nil { + logs.Critical("Mongodb Ping %v: %v", mongoURI, err) + panic(err) + } + + mngoDB = mngoClient.Database(DBname) + + MngoCollData = mngoDB.Collection(MngoNamesCollection.DATA) + MngoCollComputing = mngoDB.Collection(MngoNamesCollection.COMPUTING) + MngoCollStorage = mngoDB.Collection(MngoNamesCollection.STORAGE) + MngoCollDatacenter = mngoDB.Collection(MngoNamesCollection.DATACENTER) + MngoCollWorkspace = mngoDB.Collection(MngoNamesCollection.WORKSPACE) + MngoCollSchedule = mngoDB.Collection(MngoNamesCollection.SCHEDULE) + + if _, err = MngoCollData.Indexes().CreateMany(MngoCtx, []mongo.IndexModel{ + { + Keys: bsonx.Doc{ + {Key: "description", Value: bsonx.String("text")}, + {Key: "example", Value: bsonx.String("text")}, + }, + }, + }); err != nil && err.(mongo.CommandError).Code != 85 { + logs.Critical(err) + panic(err) + } + + if _, err = MngoCollComputing.Indexes().CreateMany(MngoCtx, []mongo.IndexModel{ + { + Keys: bsonx.Doc{ + {Key: "description", Value: bsonx.String("text")}, + {Key: "owner", Value: bsonx.String("text")}, + {Key: "license", Value: bsonx.String("text")}, + }, + }, + }); err != nil && err.(mongo.CommandError).Code != 85 { + logs.Critical(err) + panic(err) + } + + if _, err = MngoCollStorage.Indexes().CreateMany(MngoCtx, []mongo.IndexModel{ + { + Keys: bsonx.Doc{ + {Key: "name", Value: bsonx.String("text")}, + {Key: "description", Value: bsonx.String("text")}, + }, + }, + }); err != nil && err.(mongo.CommandError).Code != 85 { + logs.Critical(err) + panic(err) + } + + if _, err = MngoCollDatacenter.Indexes().CreateMany(MngoCtx, []mongo.IndexModel{ + { + Keys: bsonx.Doc{ + {Key: "name", Value: bsonx.String("text")}, + {Key: "description", Value: bsonx.String("text")}, + {Key: "owner", Value: bsonx.String("text")}, + }, + }, + }); err != nil && err.(mongo.CommandError).Code != 85 { + logs.Critical(err) + panic(err) + } + + logs.Info("Database is READY") + +}