improved graph/workflow retrieval
This commit is contained in:
parent
0856c90930
commit
559bac5eb9
@ -1,12 +1,68 @@
|
|||||||
package daemons
|
package daemons
|
||||||
|
|
||||||
import "oc-scheduler/models"
|
import (
|
||||||
|
"oc-scheduler/logger"
|
||||||
|
"oc-scheduler/models"
|
||||||
|
"oc-scheduler/workflow_builder"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
type ExecutionManager struct {
|
type ExecutionManager struct {
|
||||||
Bookings models.ScheduledBooking
|
bookings *models.ScheduledBooking
|
||||||
|
executions []models.Booking
|
||||||
}
|
}
|
||||||
|
|
||||||
func (em *ExecutionManager) test(){
|
func (em *ExecutionManager) SetBookings(b *models.ScheduledBooking){
|
||||||
em.Bookings.Mu.Lock()
|
em.bookings = b
|
||||||
defer em.Bookings.Mu.Unlock()
|
}
|
||||||
|
|
||||||
|
// Loop every second on the booking's list and move the booking that must start to a new list
|
||||||
|
// that will be looped over to start them
|
||||||
|
func (em *ExecutionManager) RetrieveNextExecutions(){
|
||||||
|
|
||||||
|
if(em.bookings == nil){
|
||||||
|
logger.Logger.Fatal().Msg("booking has not been set in the exection manager")
|
||||||
|
}
|
||||||
|
|
||||||
|
for(true){
|
||||||
|
logger.Logger.Debug().Msg("New loop")
|
||||||
|
em.bookings.Mu.Lock()
|
||||||
|
bookings := em.bookings.Bookings
|
||||||
|
if (len(bookings) > 0){
|
||||||
|
for i := len( bookings) - 1 ; i >= 0 ; i--{
|
||||||
|
logger.Logger.Debug().Msg("It should start at " + bookings[i].Start.String() + " and it is now " + time.Now().UTC() .String())
|
||||||
|
if (bookings[i].Start.Before(time.Now().UTC())){
|
||||||
|
logger.Logger.Info().Msg("Will execute " + bookings[i].Workflow + " soon")
|
||||||
|
go em.executeBooking(bookings[i])
|
||||||
|
bookings = append(bookings[:i], bookings[i+1:]...)
|
||||||
|
em.bookings.Bookings = bookings
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
em.bookings.Mu.Unlock()
|
||||||
|
time.Sleep(time.Second)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (em *ExecutionManager) executeBooking(booking models.Booking){
|
||||||
|
// create argo
|
||||||
|
new_graph := workflow_builder.Graph{}
|
||||||
|
|
||||||
|
err := new_graph.LoadFrom(booking.Workflow)
|
||||||
|
if err != nil {
|
||||||
|
logger.Logger.Error().Msg("Could not retrieve workflow " + booking.Workflow + " from oc-catalog API")
|
||||||
|
}
|
||||||
|
|
||||||
|
argo_file, err := new_graph.ExportToArgo()
|
||||||
|
if err != nil {
|
||||||
|
logger.Logger.Error().Msg("Could not create the Argo file for " + booking.Workflow )
|
||||||
|
logger.Logger.Error().Msg(err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
_ = argo_file
|
||||||
|
// start execution
|
||||||
|
// locally launch a pod that contains oc-monitor, give it the name of the workflow
|
||||||
|
// create the yaml that describes the pod : filename, path/url to Loki
|
||||||
|
// locally launch an argo workflow with the filename `argo submit PATH_TO_YAML --watch --serviceaccount=argo -n argo`
|
||||||
}
|
}
|
@ -18,12 +18,14 @@ import (
|
|||||||
|
|
||||||
type ScheduleManager struct {
|
type ScheduleManager struct {
|
||||||
Api_url string
|
Api_url string
|
||||||
list models.ScheduledBooking
|
bookings *models.ScheduledBooking
|
||||||
ws HttpQuery
|
ws models.HttpQuery
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *ScheduleManager) SetBookings(b *models.ScheduledBooking){
|
||||||
|
s.bookings = b
|
||||||
|
}
|
||||||
|
|
||||||
// Goroutine listening to a NATS server for updates
|
// Goroutine listening to a NATS server for updates
|
||||||
// on workflows' scheduling. Messages must contain
|
// on workflows' scheduling. Messages must contain
|
||||||
@ -31,6 +33,10 @@ type ScheduleManager struct {
|
|||||||
// is no way to get scheduling infos for a specific workflow
|
// is no way to get scheduling infos for a specific workflow
|
||||||
func (s *ScheduleManager) ListenWorkflowSubmissions(){
|
func (s *ScheduleManager) ListenWorkflowSubmissions(){
|
||||||
|
|
||||||
|
if(s.bookings == nil){
|
||||||
|
logger.Logger.Fatal().Msg("booking has not been set in the schedule manager")
|
||||||
|
}
|
||||||
|
|
||||||
nc, _ := nats.Connect(nats.DefaultURL)
|
nc, _ := nats.Connect(nats.DefaultURL)
|
||||||
defer nc.Close()
|
defer nc.Close()
|
||||||
|
|
||||||
@ -48,7 +54,7 @@ func (s *ScheduleManager) ListenWorkflowSubmissions(){
|
|||||||
|
|
||||||
map_mess := retrieveMapFromSub(msg.Data)
|
map_mess := retrieveMapFromSub(msg.Data)
|
||||||
|
|
||||||
s.list.Mu.Lock()
|
s.bookings.Mu.Lock()
|
||||||
|
|
||||||
start, err := time.Parse(time.RFC3339,map_mess["start_date"])
|
start, err := time.Parse(time.RFC3339,map_mess["start_date"])
|
||||||
if err != nil{
|
if err != nil{
|
||||||
@ -59,8 +65,8 @@ func (s *ScheduleManager) ListenWorkflowSubmissions(){
|
|||||||
logger.Logger.Error().Msg(err.Error())
|
logger.Logger.Error().Msg(err.Error())
|
||||||
}
|
}
|
||||||
|
|
||||||
s.list.AddSchedule(models.Booking{Workflow: map_mess["workflow"], Start: start, Stop: stop })
|
s.bookings.AddSchedule(models.Booking{Workflow: map_mess["workflow"], Start: start, Stop: stop })
|
||||||
s.list.Mu.Unlock()
|
s.bookings.Mu.Unlock()
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -81,7 +87,7 @@ func (s *ScheduleManager) RetrieveScheduling (){
|
|||||||
}
|
}
|
||||||
|
|
||||||
logger.Logger.Info().Msg("Current list of schedules")
|
logger.Logger.Info().Msg("Current list of schedules")
|
||||||
fmt.Println(s.list.Bookings)
|
fmt.Println(s.bookings.Bookings)
|
||||||
|
|
||||||
time.Sleep(time.Minute * 5)
|
time.Sleep(time.Minute * 5)
|
||||||
}
|
}
|
||||||
@ -102,14 +108,14 @@ func (s *ScheduleManager) getNextScheduledWorkflows(apiurl string, hours float64
|
|||||||
var workflows []map[string]string
|
var workflows []map[string]string
|
||||||
json.Unmarshal(body,&workflows)
|
json.Unmarshal(body,&workflows)
|
||||||
|
|
||||||
s.list.Mu.Lock()
|
s.bookings.Mu.Lock()
|
||||||
defer s.list.Mu.Unlock()
|
defer s.bookings.Mu.Unlock()
|
||||||
|
|
||||||
for _, workflow := range(workflows){
|
for _, workflow := range(workflows){
|
||||||
start, _ := time.Parse(time.RFC3339,workflow["start_date"])
|
start, _ := time.Parse(time.RFC3339,workflow["start_date"])
|
||||||
stop, _ := time.Parse(time.RFC3339,workflow["stop_date"])
|
stop, _ := time.Parse(time.RFC3339,workflow["stop_date"])
|
||||||
|
|
||||||
s.list.AddSchedule(models.Booking{Workflow: workflow["Workflow"], Start: start, Stop: stop})
|
s.bookings.AddSchedule(models.Booking{Workflow: workflow["Workflow"], Start: start, Stop: stop})
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
package daemons
|
package models
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"io"
|
"io"
|
@ -2,7 +2,7 @@
|
|||||||
// via its lists of components into an argo file, using the a list of
|
// via its lists of components into an argo file, using the a list of
|
||||||
// link ID to build the dag
|
// link ID to build the dag
|
||||||
|
|
||||||
package main
|
package workflow_builder
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
@ -40,7 +40,7 @@ type Spec struct {
|
|||||||
Templates []Template `yaml:"templates"`
|
Templates []Template `yaml:"templates"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *ArgoBuilder) CreateDAG() bool {
|
func (b *ArgoBuilder) CreateDAG() (string, error) {
|
||||||
fmt.Println("list of branches : ", b.branches)
|
fmt.Println("list of branches : ", b.branches)
|
||||||
|
|
||||||
b.createTemplates()
|
b.createTemplates()
|
||||||
@ -54,10 +54,9 @@ func (b *ArgoBuilder) CreateDAG() bool {
|
|||||||
b.Workflow.Metadata.GenerateName = "oc-test-" + random_name
|
b.Workflow.Metadata.GenerateName = "oc-test-" + random_name
|
||||||
|
|
||||||
yamlified, err := yaml.Marshal(b.Workflow)
|
yamlified, err := yaml.Marshal(b.Workflow)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logs.Error("Could not transform object to yaml file")
|
logs.Error("Could not transform object to yaml file")
|
||||||
return false
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Give a unique name to each argo file with its timestamp DD:MM:YYYY_hhmmss
|
// Give a unique name to each argo file with its timestamp DD:MM:YYYY_hhmmss
|
||||||
@ -67,11 +66,10 @@ func (b *ArgoBuilder) CreateDAG() bool {
|
|||||||
err = os.WriteFile(workflows_dir + file_name , []byte(yamlified), 0660)
|
err = os.WriteFile(workflows_dir + file_name , []byte(yamlified), 0660)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logs.Error("Could not write the yaml file")
|
logs.Error("Could not write the yaml file")
|
||||||
return false
|
return "",err
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Println("Created " + file_name)
|
return file_name, nil
|
||||||
return true
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *ArgoBuilder) createTemplates() {
|
func (b *ArgoBuilder) createTemplates() {
|
@ -1,13 +1,15 @@
|
|||||||
package main
|
package workflow_builder
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"maps"
|
"maps"
|
||||||
"net/url"
|
|
||||||
"oc-scheduler/daemons"
|
|
||||||
|
|
||||||
"cloud.o-forge.io/core/oc-catalog/models"
|
"oc-scheduler/conf"
|
||||||
|
"oc-scheduler/logger"
|
||||||
|
models "oc-scheduler/models"
|
||||||
|
|
||||||
|
catalog_models "cloud.o-forge.io/core/oc-catalog/models" // this will be replaced with oc-lib
|
||||||
|
|
||||||
"github.com/beego/beego/v2/core/logs"
|
"github.com/beego/beego/v2/core/logs"
|
||||||
"github.com/tidwall/gjson"
|
"github.com/tidwall/gjson"
|
||||||
@ -16,12 +18,13 @@ import (
|
|||||||
|
|
||||||
|
|
||||||
type Graph struct {
|
type Graph struct {
|
||||||
Datas []models.DataModel
|
workflow_name string // used to test if the graph has been instatiated, private so can only be set by a graph's method
|
||||||
Computings []models.ComputingModel
|
Datas []catalog_models.DataModel
|
||||||
Datacenters []models.DatacenterModel
|
Computings []catalog_models.ComputingModel
|
||||||
Storages []models.StorageModel
|
Datacenters []catalog_models.DatacenterModel
|
||||||
Links map[string]models.Link
|
Storages []catalog_models.StorageModel
|
||||||
ws daemons.HttpQuery
|
Links map[string]catalog_models.Link
|
||||||
|
ws models.HttpQuery
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a dictionnaries with each existing workflow from a workspace, associated to the JSON representation of its content
|
// Create a dictionnaries with each existing workflow from a workspace, associated to the JSON representation of its content
|
||||||
@ -40,21 +43,38 @@ func (g *Graph) GetGraphList(apiurl string) (map[string]string, error) {
|
|||||||
return workspaces, nil
|
return workspaces, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Should the parameter be removed, since we have oc-catalog url in the conf ?
|
||||||
|
func (g *Graph) GetGraph(apiurl string, workflow string) (string, error) {
|
||||||
|
g.ws.Init(apiurl)
|
||||||
|
body, err := g.ws.Get("v1/workflow/" + workflow)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
graph := string(body)
|
||||||
|
|
||||||
|
// result := gjson.Get(string(body), "Workflows")
|
||||||
|
// result.ForEach(func(key, value gjson.Result) bool {
|
||||||
|
// workspaces[key.Str] = value.String()
|
||||||
|
// return true // keep iterating
|
||||||
|
// })
|
||||||
|
return graph, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
// Create the objects from the mxgraphxml stored in the workflow given as a parameter
|
// Create the objects from the mxgraphxml stored in the workflow given as a parameter
|
||||||
func (g *Graph) LoadFrom(workspace string) error {
|
func (g *Graph) LoadFrom(workflow_name string) error {
|
||||||
// Extract the xmlgraph from the given workspace
|
// Extract the xmlgraph from the given workspace
|
||||||
xml := gjson.Get(workspace, "MxgraphXML").String()
|
graph, err := g.GetGraph(conf.GetConfig().OcCatalogUrl,workflow_name)
|
||||||
decodedValue, err := url.QueryUnescape(xml)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
_ = decodedValue
|
|
||||||
|
|
||||||
// os.WriteFile("graph.xml", []byte(decodedValue), 0660)
|
// os.WriteFile("graph.xml", []byte(decodedValue), 0660)
|
||||||
|
|
||||||
g.GetWorkflowComponents(workspace)
|
g.GetWorkflowComponents(graph)
|
||||||
g.GetLinks(workspace)
|
g.GetLinks(graph)
|
||||||
|
|
||||||
|
g.workflow_name = workflow_name
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@ -94,12 +114,12 @@ func (g *Graph) GetWorkflowComponents(workflow string){
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (g *Graph) GetLinks(workflow string){
|
func (g *Graph) GetLinks(workflow string){
|
||||||
g.Links = make(map[string]models.Link)
|
g.Links = make(map[string]catalog_models.Link)
|
||||||
result := gjson.Get(workflow, "link")
|
result := gjson.Get(workflow, "link")
|
||||||
|
|
||||||
if (result.Type != gjson.Null) {
|
if (result.Type != gjson.Null) {
|
||||||
result.ForEach(func(id, value gjson.Result) bool{
|
result.ForEach(func(id, value gjson.Result) bool{
|
||||||
var l models.Link
|
var l catalog_models.Link
|
||||||
|
|
||||||
json.Unmarshal([]byte(value.Raw),&l)
|
json.Unmarshal([]byte(value.Raw),&l)
|
||||||
g.Links[id.Str] = l
|
g.Links[id.Str] = l
|
||||||
@ -109,7 +129,7 @@ func (g *Graph) GetLinks(workflow string){
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (g *Graph) AddDataModel(id string, user_input gjson.Result, wf_id string) error {
|
func (g *Graph) AddDataModel(id string, user_input gjson.Result, wf_id string) error {
|
||||||
var d models.DataModel
|
var d catalog_models.DataModel
|
||||||
resp, err := g.ws.Get("v1/data/" + id)
|
resp, err := g.ws.Get("v1/data/" + id)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -122,7 +142,7 @@ func (g *Graph) AddDataModel(id string, user_input gjson.Result, wf_id string) e
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (g *Graph) AddDatacenterModel(id string, user_input gjson.Result, wf_id string) error {
|
func (g *Graph) AddDatacenterModel(id string, user_input gjson.Result, wf_id string) error {
|
||||||
var d models.DatacenterModel
|
var d catalog_models.DatacenterModel
|
||||||
resp, err := g.ws.Get("v1/datacenter/" + id)
|
resp, err := g.ws.Get("v1/datacenter/" + id)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -135,7 +155,7 @@ func (g *Graph) AddDatacenterModel(id string, user_input gjson.Result, wf_id str
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (g *Graph) AddComputingModel(id string, user_input gjson.Result, wf_id string) error {
|
func (g *Graph) AddComputingModel(id string, user_input gjson.Result, wf_id string) error {
|
||||||
var c models.ComputingModel
|
var c catalog_models.ComputingModel
|
||||||
resp, err := g.ws.Get("v1/computing/" + id)
|
resp, err := g.ws.Get("v1/computing/" + id)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -148,7 +168,7 @@ func (g *Graph) AddComputingModel(id string, user_input gjson.Result, wf_id stri
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (g *Graph) AddStorageModel(id string, user_input gjson.Result, wf_id string) error {
|
func (g *Graph) AddStorageModel(id string, user_input gjson.Result, wf_id string) error {
|
||||||
var s models.StorageModel
|
var s catalog_models.StorageModel
|
||||||
resp, err := g.ws.Get("v1/storage/" + id)
|
resp, err := g.ws.Get("v1/storage/" + id)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -160,8 +180,11 @@ func (g *Graph) AddStorageModel(id string, user_input gjson.Result, wf_id string
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g *Graph) ExportToArgo(id string) error {
|
func (g *Graph) ExportToArgo() (string, error) {
|
||||||
end_links := make(map[string]models.Link)
|
if len(g.workflow_name) == 0 {
|
||||||
|
return "",fmt.Errorf("can't export a graph that has not been loaded yet")
|
||||||
|
}
|
||||||
|
end_links := make(map[string]catalog_models.Link)
|
||||||
|
|
||||||
for i, link := range g.Links {
|
for i, link := range g.Links {
|
||||||
if (!link.DCLink && !g.isSource(link.Destination,i)){
|
if (!link.DCLink && !g.isSource(link.Destination,i)){
|
||||||
@ -184,20 +207,23 @@ func (g *Graph) ExportToArgo(id string) error {
|
|||||||
|
|
||||||
fmt.Println("Identified branches : ", list_branches)
|
fmt.Println("Identified branches : ", list_branches)
|
||||||
argo_builder := ArgoBuilder{graph : *g, branches: list_branches}
|
argo_builder := ArgoBuilder{graph : *g, branches: list_branches}
|
||||||
argo_builder.CreateDAG()
|
filename, err := argo_builder.CreateDAG()
|
||||||
|
if err != nil {
|
||||||
return nil
|
logger.Logger.Error().Msg("Could not create the argo file for " + g.workflow_name)
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return filename, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return a list containing the IDs of each link that make up a branch in the graph
|
// Return a list containing the IDs of each link that make up a branch in the graph
|
||||||
func (g *Graph) getListBranches(end_links map[string]models.Link, unvisited_links_list map[string]models.Link, current_branch []string) (list_branches [][]string) {
|
func (g *Graph) getListBranches(end_links map[string]catalog_models.Link, unvisited_links_list map[string]catalog_models.Link, current_branch []string) (list_branches [][]string) {
|
||||||
|
|
||||||
if current_branch == nil {
|
if current_branch == nil {
|
||||||
current_branch = make([]string, 0)
|
current_branch = make([]string, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
if unvisited_links_list == nil {
|
if unvisited_links_list == nil {
|
||||||
unvisited_links_list = make(map[string]models.Link,len(g.Links))
|
unvisited_links_list = make(map[string]catalog_models.Link,len(g.Links))
|
||||||
maps.Copy(unvisited_links_list,g.Links)
|
maps.Copy(unvisited_links_list,g.Links)
|
||||||
fmt.Println(unvisited_links_list)
|
fmt.Println(unvisited_links_list)
|
||||||
}
|
}
|
||||||
@ -215,7 +241,7 @@ func (g *Graph) getListBranches(end_links map[string]models.Link, unvisited_link
|
|||||||
current_branch = append([]string{link_id},current_branch...)
|
current_branch = append([]string{link_id},current_branch...)
|
||||||
delete(unvisited_links_list, link_id)
|
delete(unvisited_links_list, link_id)
|
||||||
// create a new branch for each previous link, appending the current path to this node to the created branch
|
// create a new branch for each previous link, appending the current path to this node to the created branch
|
||||||
new_end_link := make(map[string]models.Link,0)
|
new_end_link := make(map[string]catalog_models.Link,0)
|
||||||
new_end_link[id_link] = g.Links[id_link]
|
new_end_link[id_link] = g.Links[id_link]
|
||||||
new_branches = g.getListBranches(new_end_link,unvisited_links_list,current_branch)
|
new_branches = g.getListBranches(new_end_link,unvisited_links_list,current_branch)
|
||||||
|
|
||||||
@ -264,7 +290,7 @@ func (g *Graph) isSource(comp_id string,link_id string) bool {
|
|||||||
// with the same Destination id that the Source id in g.Links[linkIndex]
|
// with the same Destination id that the Source id in g.Links[linkIndex]
|
||||||
// or nil if not
|
// or nil if not
|
||||||
|
|
||||||
func (g *Graph) getPreviousLink(link_id string,map_link map[string]models.Link) (previous_id []string) {
|
func (g *Graph) getPreviousLink(link_id string,map_link map[string]catalog_models.Link) (previous_id []string) {
|
||||||
for k, link := range map_link{
|
for k, link := range map_link{
|
||||||
if(k != link_id && link.Destination == g.Links[link_id].Source){
|
if(k != link_id && link.Destination == g.Links[link_id].Source){
|
||||||
previous_id = append(previous_id, k)
|
previous_id = append(previous_id, k)
|
||||||
@ -320,7 +346,7 @@ func (g *Graph) getComponentType(component_id string) string {
|
|||||||
|
|
||||||
// Returns a slice of id, in case the link is made of twice the same type of component
|
// Returns a slice of id, in case the link is made of twice the same type of component
|
||||||
|
|
||||||
func (g *Graph) getComponentByType(compType string, link models.Link) (ids []string){
|
func (g *Graph) getComponentByType(compType string, link catalog_models.Link) (ids []string){
|
||||||
if(g.getComponentType(link.Source) == compType){
|
if(g.getComponentType(link.Source) == compType){
|
||||||
ids = append(ids, link.Source)
|
ids = append(ids, link.Source)
|
||||||
}
|
}
|
Loading…
Reference in New Issue
Block a user