mirror of
https://github.com/GlueOps/autoglue.git
synced 2026-02-14 13:20:05 +01:00
Compare commits
11 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4d37a6363f | ||
|
|
1dbdd04808 | ||
|
|
45b55015ac | ||
|
|
6b191089a5 | ||
|
|
d2e6ff9812 | ||
|
|
98a6cf7e51 | ||
|
|
fb4af74e3c | ||
|
|
1021e06655 | ||
|
|
c6be7bf8eb | ||
|
|
1429c40b2b | ||
|
|
73c4904a42 |
41
cmd/serve.go
41
cmd/serve.go
@@ -115,6 +115,47 @@ var serveCmd = &cobra.Command{
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("failed to enqueue bootstrap_bastion: %v", err)
|
log.Printf("failed to enqueue bootstrap_bastion: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_, err = jobs.Enqueue(
|
||||||
|
context.Background(),
|
||||||
|
uuid.NewString(),
|
||||||
|
"prepare_cluster",
|
||||||
|
bg.ClusterPrepareArgs{IntervalS: 120},
|
||||||
|
archer.WithMaxRetries(3),
|
||||||
|
archer.WithScheduleTime(time.Now().Add(60*time.Second)),
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("failed to enqueue prepare_cluster: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = jobs.Enqueue(
|
||||||
|
context.Background(),
|
||||||
|
uuid.NewString(),
|
||||||
|
"cluster_setup",
|
||||||
|
bg.ClusterSetupArgs{
|
||||||
|
IntervalS: 120,
|
||||||
|
},
|
||||||
|
archer.WithMaxRetries(3),
|
||||||
|
archer.WithScheduleTime(time.Now().Add(60*time.Second)),
|
||||||
|
)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("failed to enqueue cluster setup: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = jobs.Enqueue(
|
||||||
|
context.Background(),
|
||||||
|
uuid.NewString(),
|
||||||
|
"cluster_bootstrap",
|
||||||
|
bg.ClusterBootstrapArgs{
|
||||||
|
IntervalS: 120,
|
||||||
|
},
|
||||||
|
archer.WithMaxRetries(3),
|
||||||
|
archer.WithScheduleTime(time.Now().Add(60*time.Second)),
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("failed to enqueue cluster bootstrap: %v", err)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_ = auth.Refresh(rt.DB, rt.Cfg.JWTPrivateEncKey)
|
_ = auth.Refresh(rt.DB, rt.Cfg.JWTPrivateEncKey)
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -96,6 +96,10 @@ components:
|
|||||||
$ref: '#/components/schemas/dto.RecordSetResponse'
|
$ref: '#/components/schemas/dto.RecordSetResponse'
|
||||||
created_at:
|
created_at:
|
||||||
type: string
|
type: string
|
||||||
|
docker_image:
|
||||||
|
type: string
|
||||||
|
docker_tag:
|
||||||
|
type: string
|
||||||
glueops_load_balancer:
|
glueops_load_balancer:
|
||||||
$ref: '#/components/schemas/dto.LoadBalancerResponse'
|
$ref: '#/components/schemas/dto.LoadBalancerResponse'
|
||||||
id:
|
id:
|
||||||
@@ -129,6 +133,10 @@ components:
|
|||||||
properties:
|
properties:
|
||||||
cluster_provider:
|
cluster_provider:
|
||||||
type: string
|
type: string
|
||||||
|
docker_image:
|
||||||
|
type: string
|
||||||
|
docker_tag:
|
||||||
|
type: string
|
||||||
name:
|
name:
|
||||||
type: string
|
type: string
|
||||||
region:
|
region:
|
||||||
@@ -713,6 +721,10 @@ components:
|
|||||||
properties:
|
properties:
|
||||||
cluster_provider:
|
cluster_provider:
|
||||||
type: string
|
type: string
|
||||||
|
docker_image:
|
||||||
|
type: string
|
||||||
|
docker_tag:
|
||||||
|
type: string
|
||||||
name:
|
name:
|
||||||
type: string
|
type: string
|
||||||
region:
|
region:
|
||||||
|
|||||||
2
go.mod
2
go.mod
@@ -26,7 +26,7 @@ require (
|
|||||||
github.com/spf13/viper v1.21.0
|
github.com/spf13/viper v1.21.0
|
||||||
github.com/swaggo/swag/v2 v2.0.0-rc4
|
github.com/swaggo/swag/v2 v2.0.0-rc4
|
||||||
golang.org/x/crypto v0.45.0
|
golang.org/x/crypto v0.45.0
|
||||||
golang.org/x/oauth2 v0.33.0
|
golang.org/x/oauth2 v0.34.0
|
||||||
gopkg.in/yaml.v3 v3.0.1
|
gopkg.in/yaml.v3 v3.0.1
|
||||||
gorm.io/datatypes v1.2.7
|
gorm.io/datatypes v1.2.7
|
||||||
gorm.io/driver/postgres v1.6.0
|
gorm.io/driver/postgres v1.6.0
|
||||||
|
|||||||
2
go.sum
2
go.sum
@@ -306,6 +306,8 @@ golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
|
|||||||
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
|
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
|
||||||
golang.org/x/oauth2 v0.33.0 h1:4Q+qn+E5z8gPRJfmRy7C2gGG3T4jIprK6aSYgTXGRpo=
|
golang.org/x/oauth2 v0.33.0 h1:4Q+qn+E5z8gPRJfmRy7C2gGG3T4jIprK6aSYgTXGRpo=
|
||||||
golang.org/x/oauth2 v0.33.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
|
golang.org/x/oauth2 v0.33.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
|
||||||
|
golang.org/x/oauth2 v0.34.0 h1:hqK/t4AKgbqWkdkcAeI8XLmbK+4m4G5YeQRrmiotGlw=
|
||||||
|
golang.org/x/oauth2 v0.34.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
|
||||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
|||||||
@@ -107,6 +107,28 @@ func NewJobs(gdb *gorm.DB, dbUrl string) (*Jobs, error) {
|
|||||||
archer.WithInstances(1),
|
archer.WithInstances(1),
|
||||||
archer.WithTimeout(2*time.Minute),
|
archer.WithTimeout(2*time.Minute),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
c.Register(
|
||||||
|
"prepare_cluster",
|
||||||
|
ClusterPrepareWorker(gdb, jobs),
|
||||||
|
archer.WithInstances(1),
|
||||||
|
archer.WithTimeout(2*time.Minute),
|
||||||
|
)
|
||||||
|
|
||||||
|
c.Register(
|
||||||
|
"cluster_setup",
|
||||||
|
ClusterSetupWorker(gdb, jobs),
|
||||||
|
archer.WithInstances(1),
|
||||||
|
archer.WithTimeout(2*time.Minute),
|
||||||
|
)
|
||||||
|
|
||||||
|
c.Register(
|
||||||
|
"cluster_bootstrap",
|
||||||
|
ClusterBootstrapWorker(gdb, jobs),
|
||||||
|
archer.WithInstances(1),
|
||||||
|
archer.WithTimeout(60*time.Minute),
|
||||||
|
)
|
||||||
|
|
||||||
return jobs, nil
|
return jobs, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
121
internal/bg/cluster_bootstrap.go
Normal file
121
internal/bg/cluster_bootstrap.go
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
package bg
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/dyaksa/archer"
|
||||||
|
"github.com/dyaksa/archer/job"
|
||||||
|
"github.com/glueops/autoglue/internal/models"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
"gorm.io/gorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ClusterBootstrapArgs struct {
|
||||||
|
IntervalS int `json:"interval_seconds,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ClusterBootstrapResult struct {
|
||||||
|
Status string `json:"status"`
|
||||||
|
Processed int `json:"processed"`
|
||||||
|
Ready int `json:"ready"`
|
||||||
|
Failed int `json:"failed"`
|
||||||
|
ElapsedMs int `json:"elapsed_ms"`
|
||||||
|
FailedIDs []uuid.UUID `json:"failed_cluster_ids"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func ClusterBootstrapWorker(db *gorm.DB, jobs *Jobs) archer.WorkerFn {
|
||||||
|
return func(ctx context.Context, j job.Job) (any, error) {
|
||||||
|
args := ClusterBootstrapArgs{IntervalS: 120}
|
||||||
|
jobID := j.ID
|
||||||
|
start := time.Now()
|
||||||
|
|
||||||
|
_ = j.ParseArguments(&args)
|
||||||
|
if args.IntervalS <= 0 {
|
||||||
|
args.IntervalS = 120
|
||||||
|
}
|
||||||
|
|
||||||
|
var clusters []models.Cluster
|
||||||
|
if err := db.
|
||||||
|
Preload("BastionServer.SshKey").
|
||||||
|
Where("status = ?", clusterStatusProvisioning).
|
||||||
|
Find(&clusters).Error; err != nil {
|
||||||
|
log.Error().Err(err).Msg("[cluster_bootstrap] query clusters failed")
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
proc, ready, failCount := 0, 0, 0
|
||||||
|
var failedIDs []uuid.UUID
|
||||||
|
|
||||||
|
perClusterTimeout := 60 * time.Minute
|
||||||
|
|
||||||
|
for i := range clusters {
|
||||||
|
c := &clusters[i]
|
||||||
|
proc++
|
||||||
|
|
||||||
|
if c.BastionServer.ID == uuid.Nil || c.BastionServer.Status != "ready" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
logger := log.With().
|
||||||
|
Str("job", jobID).
|
||||||
|
Str("cluster_id", c.ID.String()).
|
||||||
|
Str("cluster_name", c.Name).
|
||||||
|
Logger()
|
||||||
|
|
||||||
|
logger.Info().Msg("[cluster_bootstrap] running make bootstrap")
|
||||||
|
|
||||||
|
runCtx, cancel := context.WithTimeout(ctx, perClusterTimeout)
|
||||||
|
out, err := runMakeOnBastion(runCtx, db, c, "bootstrap")
|
||||||
|
cancel()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
failCount++
|
||||||
|
failedIDs = append(failedIDs, c.ID)
|
||||||
|
logger.Error().Err(err).Str("output", out).Msg("[cluster_bootstrap] make bootstrap failed")
|
||||||
|
_ = setClusterStatus(db, c.ID, clusterStatusFailed, fmt.Sprintf("make bootstrap: %v", err))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// you can choose a different terminal status here if you like
|
||||||
|
if err := setClusterStatus(db, c.ID, clusterStatusReady, ""); err != nil {
|
||||||
|
failCount++
|
||||||
|
failedIDs = append(failedIDs, c.ID)
|
||||||
|
logger.Error().Err(err).Msg("[cluster_bootstrap] failed to mark cluster ready")
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
ready++
|
||||||
|
logger.Info().Msg("[cluster_bootstrap] cluster marked ready")
|
||||||
|
}
|
||||||
|
|
||||||
|
res := ClusterBootstrapResult{
|
||||||
|
Status: "ok",
|
||||||
|
Processed: proc,
|
||||||
|
Ready: ready,
|
||||||
|
Failed: failCount,
|
||||||
|
ElapsedMs: int(time.Since(start).Milliseconds()),
|
||||||
|
FailedIDs: failedIDs,
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Info().
|
||||||
|
Int("processed", proc).
|
||||||
|
Int("ready", ready).
|
||||||
|
Int("failed", failCount).
|
||||||
|
Msg("[cluster_bootstrap] reconcile tick ok")
|
||||||
|
|
||||||
|
// self-reschedule
|
||||||
|
next := time.Now().Add(time.Duration(args.IntervalS) * time.Second)
|
||||||
|
_, _ = jobs.Enqueue(
|
||||||
|
ctx,
|
||||||
|
uuid.NewString(),
|
||||||
|
"cluster_bootstrap",
|
||||||
|
args,
|
||||||
|
archer.WithScheduleTime(next),
|
||||||
|
archer.WithMaxRetries(1),
|
||||||
|
)
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
120
internal/bg/cluster_setup.go
Normal file
120
internal/bg/cluster_setup.go
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
package bg
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/dyaksa/archer"
|
||||||
|
"github.com/dyaksa/archer/job"
|
||||||
|
"github.com/glueops/autoglue/internal/models"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
"gorm.io/gorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ClusterSetupArgs struct {
|
||||||
|
IntervalS int `json:"interval_seconds,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ClusterSetupResult struct {
|
||||||
|
Status string `json:"status"`
|
||||||
|
Processed int `json:"processed"`
|
||||||
|
Provisioning int `json:"provisioning"`
|
||||||
|
Failed int `json:"failed"`
|
||||||
|
ElapsedMs int `json:"elapsed_ms"`
|
||||||
|
FailedCluster []uuid.UUID `json:"failed_cluster_ids"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func ClusterSetupWorker(db *gorm.DB, jobs *Jobs) archer.WorkerFn {
|
||||||
|
return func(ctx context.Context, j job.Job) (any, error) {
|
||||||
|
args := ClusterSetupArgs{IntervalS: 120}
|
||||||
|
jobID := j.ID
|
||||||
|
start := time.Now()
|
||||||
|
|
||||||
|
_ = j.ParseArguments(&args)
|
||||||
|
if args.IntervalS <= 0 {
|
||||||
|
args.IntervalS = 120
|
||||||
|
}
|
||||||
|
|
||||||
|
var clusters []models.Cluster
|
||||||
|
if err := db.
|
||||||
|
Preload("BastionServer.SshKey").
|
||||||
|
Where("status = ?", clusterStatusPending).
|
||||||
|
Find(&clusters).Error; err != nil {
|
||||||
|
log.Error().Err(err).Msg("[cluster_setup] query clusters failed")
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
proc, prov, failCount := 0, 0, 0
|
||||||
|
var failedIDs []uuid.UUID
|
||||||
|
|
||||||
|
perClusterTimeout := 30 * time.Minute
|
||||||
|
|
||||||
|
for i := range clusters {
|
||||||
|
c := &clusters[i]
|
||||||
|
proc++
|
||||||
|
|
||||||
|
if c.BastionServer.ID == uuid.Nil || c.BastionServer.Status != "ready" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
logger := log.With().
|
||||||
|
Str("job", jobID).
|
||||||
|
Str("cluster_id", c.ID.String()).
|
||||||
|
Str("cluster_name", c.Name).
|
||||||
|
Logger()
|
||||||
|
|
||||||
|
logger.Info().Msg("[cluster_setup] running make setup")
|
||||||
|
|
||||||
|
runCtx, cancel := context.WithTimeout(ctx, perClusterTimeout)
|
||||||
|
out, err := runMakeOnBastion(runCtx, db, c, "setup")
|
||||||
|
cancel()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
failCount++
|
||||||
|
failedIDs = append(failedIDs, c.ID)
|
||||||
|
logger.Error().Err(err).Str("output", out).Msg("[cluster_setup] make setup failed")
|
||||||
|
_ = setClusterStatus(db, c.ID, clusterStatusFailed, fmt.Sprintf("make setup: %v", err))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := setClusterStatus(db, c.ID, clusterStatusProvisioning, ""); err != nil {
|
||||||
|
failCount++
|
||||||
|
failedIDs = append(failedIDs, c.ID)
|
||||||
|
logger.Error().Err(err).Msg("[cluster_setup] failed to mark cluster provisioning")
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
prov++
|
||||||
|
logger.Info().Msg("[cluster_setup] cluster moved to provisioning")
|
||||||
|
}
|
||||||
|
|
||||||
|
res := ClusterSetupResult{
|
||||||
|
Status: "ok",
|
||||||
|
Processed: proc,
|
||||||
|
Provisioning: prov,
|
||||||
|
Failed: failCount,
|
||||||
|
ElapsedMs: int(time.Since(start).Milliseconds()),
|
||||||
|
FailedCluster: failedIDs,
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Info().
|
||||||
|
Int("processed", proc).
|
||||||
|
Int("provisioning", prov).
|
||||||
|
Int("failed", failCount).
|
||||||
|
Msg("[cluster_setup] reconcile tick ok")
|
||||||
|
|
||||||
|
// self-reschedule
|
||||||
|
next := time.Now().Add(time.Duration(args.IntervalS) * time.Second)
|
||||||
|
_, _ = jobs.Enqueue(
|
||||||
|
ctx,
|
||||||
|
uuid.NewString(),
|
||||||
|
"cluster_setup",
|
||||||
|
args,
|
||||||
|
archer.WithScheduleTime(next),
|
||||||
|
archer.WithMaxRetries(1),
|
||||||
|
)
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
510
internal/bg/prepare_cluster.go
Normal file
510
internal/bg/prepare_cluster.go
Normal file
@@ -0,0 +1,510 @@
|
|||||||
|
package bg
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"encoding/base64"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"net"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/dyaksa/archer"
|
||||||
|
"github.com/dyaksa/archer/job"
|
||||||
|
"github.com/glueops/autoglue/internal/models"
|
||||||
|
"github.com/glueops/autoglue/internal/utils"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
"golang.org/x/crypto/ssh"
|
||||||
|
"gorm.io/gorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ClusterPrepareArgs struct {
|
||||||
|
IntervalS int `json:"interval_seconds,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ClusterPrepareFailure struct {
|
||||||
|
ClusterID uuid.UUID `json:"cluster_id"`
|
||||||
|
Step string `json:"step"`
|
||||||
|
Reason string `json:"reason"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ClusterPrepareResult struct {
|
||||||
|
Status string `json:"status"`
|
||||||
|
Processed int `json:"processed"`
|
||||||
|
MarkedPending int `json:"marked_pending"`
|
||||||
|
Failed int `json:"failed"`
|
||||||
|
ElapsedMs int `json:"elapsed_ms"`
|
||||||
|
FailedIDs []uuid.UUID `json:"failed_cluster_ids"`
|
||||||
|
Failures []ClusterPrepareFailure `json:"failures"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Alias the status constants from models to avoid string drift.
|
||||||
|
const (
|
||||||
|
clusterStatusPrePending = models.ClusterStatusPrePending
|
||||||
|
clusterStatusPending = models.ClusterStatusPending
|
||||||
|
clusterStatusProvisioning = models.ClusterStatusProvisioning
|
||||||
|
clusterStatusReady = models.ClusterStatusReady
|
||||||
|
clusterStatusFailed = models.ClusterStatusFailed
|
||||||
|
)
|
||||||
|
|
||||||
|
func ClusterPrepareWorker(db *gorm.DB, jobs *Jobs) archer.WorkerFn {
|
||||||
|
return func(ctx context.Context, j job.Job) (any, error) {
|
||||||
|
args := ClusterPrepareArgs{IntervalS: 120}
|
||||||
|
jobID := j.ID
|
||||||
|
start := time.Now()
|
||||||
|
|
||||||
|
_ = j.ParseArguments(&args)
|
||||||
|
if args.IntervalS <= 0 {
|
||||||
|
args.IntervalS = 120
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load all clusters that are pre_pending; we’ll filter for bastion.ready in memory.
|
||||||
|
var clusters []models.Cluster
|
||||||
|
if err := db.
|
||||||
|
Preload("BastionServer.SshKey").
|
||||||
|
Preload("CaptainDomain").
|
||||||
|
Preload("ControlPlaneRecordSet").
|
||||||
|
Preload("NodePools.Servers.SshKey").
|
||||||
|
Where("status = ?", clusterStatusPrePending).
|
||||||
|
Find(&clusters).Error; err != nil {
|
||||||
|
log.Error().Err(err).Msg("[cluster_prepare] query clusters failed")
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
proc, ok, fail := 0, 0, 0
|
||||||
|
var failedIDs []uuid.UUID
|
||||||
|
var failures []ClusterPrepareFailure
|
||||||
|
|
||||||
|
perClusterTimeout := 8 * time.Minute
|
||||||
|
|
||||||
|
for i := range clusters {
|
||||||
|
c := &clusters[i]
|
||||||
|
proc++
|
||||||
|
|
||||||
|
// bastion must exist and be ready
|
||||||
|
if c.BastionServer == nil || c.BastionServerID == nil || *c.BastionServerID == uuid.Nil || c.BastionServer.Status != "ready" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
clusterLog := log.With().
|
||||||
|
Str("job", jobID).
|
||||||
|
Str("cluster_id", c.ID.String()).
|
||||||
|
Str("cluster_name", c.Name).
|
||||||
|
Logger()
|
||||||
|
|
||||||
|
clusterLog.Info().Msg("[cluster_prepare] starting")
|
||||||
|
|
||||||
|
if err := validateClusterForPrepare(c); err != nil {
|
||||||
|
fail++
|
||||||
|
failedIDs = append(failedIDs, c.ID)
|
||||||
|
failures = append(failures, ClusterPrepareFailure{
|
||||||
|
ClusterID: c.ID,
|
||||||
|
Step: "validate",
|
||||||
|
Reason: err.Error(),
|
||||||
|
})
|
||||||
|
clusterLog.Error().Err(err).Msg("[cluster_prepare] validation failed")
|
||||||
|
_ = setClusterStatus(db, c.ID, clusterStatusFailed, err.Error())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
allServers := flattenClusterServers(c)
|
||||||
|
keyPayloads, sshConfig, err := buildSSHAssetsForCluster(db, c, allServers)
|
||||||
|
if err != nil {
|
||||||
|
fail++
|
||||||
|
failedIDs = append(failedIDs, c.ID)
|
||||||
|
failures = append(failures, ClusterPrepareFailure{
|
||||||
|
ClusterID: c.ID,
|
||||||
|
Step: "build_ssh_assets",
|
||||||
|
Reason: err.Error(),
|
||||||
|
})
|
||||||
|
clusterLog.Error().Err(err).Msg("[cluster_prepare] build ssh assets failed")
|
||||||
|
_ = setClusterStatus(db, c.ID, clusterStatusFailed, err.Error())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
payloadJSON, err := json.MarshalIndent(c, "", " ")
|
||||||
|
if err != nil {
|
||||||
|
fail++
|
||||||
|
failedIDs = append(failedIDs, c.ID)
|
||||||
|
failures = append(failures, ClusterPrepareFailure{
|
||||||
|
ClusterID: c.ID,
|
||||||
|
Step: "marshal_payload",
|
||||||
|
Reason: err.Error(),
|
||||||
|
})
|
||||||
|
clusterLog.Error().Err(err).Msg("[cluster_prepare] json marshal failed")
|
||||||
|
_ = setClusterStatus(db, c.ID, clusterStatusFailed, err.Error())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
runCtx, cancel := context.WithTimeout(ctx, perClusterTimeout)
|
||||||
|
err = pushAssetsToBastion(runCtx, db, c, sshConfig, keyPayloads, payloadJSON)
|
||||||
|
cancel()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
fail++
|
||||||
|
failedIDs = append(failedIDs, c.ID)
|
||||||
|
failures = append(failures, ClusterPrepareFailure{
|
||||||
|
ClusterID: c.ID,
|
||||||
|
Step: "ssh_push",
|
||||||
|
Reason: err.Error(),
|
||||||
|
})
|
||||||
|
clusterLog.Error().Err(err).Msg("[cluster_prepare] failed to push assets to bastion")
|
||||||
|
_ = setClusterStatus(db, c.ID, clusterStatusFailed, err.Error())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := setClusterStatus(db, c.ID, clusterStatusPending, ""); err != nil {
|
||||||
|
fail++
|
||||||
|
failedIDs = append(failedIDs, c.ID)
|
||||||
|
failures = append(failures, ClusterPrepareFailure{
|
||||||
|
ClusterID: c.ID,
|
||||||
|
Step: "set_pending",
|
||||||
|
Reason: err.Error(),
|
||||||
|
})
|
||||||
|
clusterLog.Error().Err(err).Msg("[cluster_prepare] failed to mark cluster pending")
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
ok++
|
||||||
|
clusterLog.Info().Msg("[cluster_prepare] cluster marked pending")
|
||||||
|
}
|
||||||
|
|
||||||
|
res := ClusterPrepareResult{
|
||||||
|
Status: "ok",
|
||||||
|
Processed: proc,
|
||||||
|
MarkedPending: ok,
|
||||||
|
Failed: fail,
|
||||||
|
ElapsedMs: int(time.Since(start).Milliseconds()),
|
||||||
|
FailedIDs: failedIDs,
|
||||||
|
Failures: failures,
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Info().
|
||||||
|
Int("processed", proc).
|
||||||
|
Int("pending", ok).
|
||||||
|
Int("failed", fail).
|
||||||
|
Msg("[cluster_prepare] reconcile tick ok")
|
||||||
|
|
||||||
|
next := time.Now().Add(time.Duration(args.IntervalS) * time.Second)
|
||||||
|
_, _ = jobs.Enqueue(
|
||||||
|
ctx,
|
||||||
|
uuid.NewString(),
|
||||||
|
"prepare_cluster",
|
||||||
|
args,
|
||||||
|
archer.WithScheduleTime(next),
|
||||||
|
archer.WithMaxRetries(1),
|
||||||
|
)
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------- helpers ----------
|
||||||
|
|
||||||
|
func validateClusterForPrepare(c *models.Cluster) error {
|
||||||
|
if c.BastionServer == nil || c.BastionServerID == nil || *c.BastionServerID == uuid.Nil {
|
||||||
|
return fmt.Errorf("missing bastion server")
|
||||||
|
}
|
||||||
|
if c.BastionServer.Status != "ready" {
|
||||||
|
return fmt.Errorf("bastion server not ready (status=%s)", c.BastionServer.Status)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CaptainDomain is a value type; presence is via *ID
|
||||||
|
if c.CaptainDomainID == nil || *c.CaptainDomainID == uuid.Nil {
|
||||||
|
return fmt.Errorf("missing captain domain for cluster")
|
||||||
|
}
|
||||||
|
|
||||||
|
// ControlPlaneRecordSet is a pointer; presence is via *ID + non-nil struct
|
||||||
|
if c.ControlPlaneRecordSetID == nil || *c.ControlPlaneRecordSetID == uuid.Nil || c.ControlPlaneRecordSet == nil {
|
||||||
|
return fmt.Errorf("missing control_plane_record_set for cluster")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(c.NodePools) == 0 {
|
||||||
|
return fmt.Errorf("cluster has no node pools")
|
||||||
|
}
|
||||||
|
|
||||||
|
hasServer := false
|
||||||
|
for i := range c.NodePools {
|
||||||
|
if len(c.NodePools[i].Servers) > 0 {
|
||||||
|
hasServer = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !hasServer {
|
||||||
|
return fmt.Errorf("cluster has no servers attached to node pools")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func flattenClusterServers(c *models.Cluster) []*models.Server {
|
||||||
|
var out []*models.Server
|
||||||
|
for i := range c.NodePools {
|
||||||
|
for j := range c.NodePools[i].Servers {
|
||||||
|
s := &c.NodePools[i].Servers[j]
|
||||||
|
out = append(out, s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
type keyPayload struct {
|
||||||
|
FileName string
|
||||||
|
PrivateKeyB64 string
|
||||||
|
}
|
||||||
|
|
||||||
|
// build ssh-config for all servers + decrypt keys.
|
||||||
|
// ssh-config is intended to live on the bastion and connect via *private* IPs.
|
||||||
|
func buildSSHAssetsForCluster(db *gorm.DB, c *models.Cluster, servers []*models.Server) (map[uuid.UUID]keyPayload, string, error) {
|
||||||
|
var sb strings.Builder
|
||||||
|
keys := make(map[uuid.UUID]keyPayload)
|
||||||
|
|
||||||
|
for _, s := range servers {
|
||||||
|
// Defensive checks
|
||||||
|
if strings.TrimSpace(s.PrivateIPAddress) == "" {
|
||||||
|
return nil, "", fmt.Errorf("server %s missing private ip", s.ID)
|
||||||
|
}
|
||||||
|
if s.SshKeyID == uuid.Nil {
|
||||||
|
return nil, "", fmt.Errorf("server %s missing ssh key relation", s.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// de-dupe keys: many servers may share the same ssh key
|
||||||
|
if _, ok := keys[s.SshKeyID]; !ok {
|
||||||
|
priv, err := utils.DecryptForOrg(
|
||||||
|
s.OrganizationID,
|
||||||
|
s.SshKey.EncryptedPrivateKey,
|
||||||
|
s.SshKey.PrivateIV,
|
||||||
|
s.SshKey.PrivateTag,
|
||||||
|
db,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, "", fmt.Errorf("decrypt key for server %s: %w", s.ID, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fname := fmt.Sprintf("%s.pem", s.SshKeyID.String())
|
||||||
|
keys[s.SshKeyID] = keyPayload{
|
||||||
|
FileName: fname,
|
||||||
|
PrivateKeyB64: base64.StdEncoding.EncodeToString([]byte(priv)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ssh config entry per server
|
||||||
|
keyFile := keys[s.SshKeyID].FileName
|
||||||
|
|
||||||
|
hostAlias := s.Hostname
|
||||||
|
if hostAlias == "" {
|
||||||
|
hostAlias = s.ID.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.WriteString(fmt.Sprintf("Host %s\n", hostAlias))
|
||||||
|
sb.WriteString(fmt.Sprintf(" HostName %s\n", s.PrivateIPAddress))
|
||||||
|
sb.WriteString(fmt.Sprintf(" User %s\n", s.SSHUser))
|
||||||
|
sb.WriteString(fmt.Sprintf(" IdentityFile ~/.ssh/autoglue/keys/%s\n", keyFile))
|
||||||
|
sb.WriteString(" IdentitiesOnly yes\n")
|
||||||
|
sb.WriteString(" StrictHostKeyChecking accept-new\n\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
return keys, sb.String(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func pushAssetsToBastion(
|
||||||
|
ctx context.Context,
|
||||||
|
db *gorm.DB,
|
||||||
|
c *models.Cluster,
|
||||||
|
sshConfig string,
|
||||||
|
keyPayloads map[uuid.UUID]keyPayload,
|
||||||
|
payloadJSON []byte,
|
||||||
|
) error {
|
||||||
|
bastion := c.BastionServer
|
||||||
|
if bastion == nil {
|
||||||
|
return fmt.Errorf("bastion server is nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
if bastion.PublicIPAddress == nil || strings.TrimSpace(*bastion.PublicIPAddress) == "" {
|
||||||
|
return fmt.Errorf("bastion server missing public ip")
|
||||||
|
}
|
||||||
|
|
||||||
|
privKey, err := utils.DecryptForOrg(
|
||||||
|
bastion.OrganizationID,
|
||||||
|
bastion.SshKey.EncryptedPrivateKey,
|
||||||
|
bastion.SshKey.PrivateIV,
|
||||||
|
bastion.SshKey.PrivateTag,
|
||||||
|
db,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("decrypt bastion key: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
signer, err := ssh.ParsePrivateKey([]byte(privKey))
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("parse bastion private key: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
hkcb := makeDBHostKeyCallback(db, bastion)
|
||||||
|
|
||||||
|
config := &ssh.ClientConfig{
|
||||||
|
User: bastion.SSHUser,
|
||||||
|
Auth: []ssh.AuthMethod{ssh.PublicKeys(signer)},
|
||||||
|
HostKeyCallback: hkcb,
|
||||||
|
Timeout: 30 * time.Second,
|
||||||
|
}
|
||||||
|
|
||||||
|
host := net.JoinHostPort(*bastion.PublicIPAddress, "22")
|
||||||
|
|
||||||
|
dialer := &net.Dialer{}
|
||||||
|
conn, err := dialer.DialContext(ctx, "tcp", host)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("dial bastion: %w", err)
|
||||||
|
}
|
||||||
|
defer conn.Close()
|
||||||
|
|
||||||
|
cconn, chans, reqs, err := ssh.NewClientConn(conn, host, config)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("ssh handshake bastion: %w", err)
|
||||||
|
}
|
||||||
|
client := ssh.NewClient(cconn, chans, reqs)
|
||||||
|
defer client.Close()
|
||||||
|
|
||||||
|
sess, err := client.NewSession()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("ssh session: %w", err)
|
||||||
|
}
|
||||||
|
defer sess.Close()
|
||||||
|
|
||||||
|
// build one shot script to:
|
||||||
|
// - mkdir ~/.ssh/autoglue/keys
|
||||||
|
// - write cluster-specific ssh-config
|
||||||
|
// - write all private keys
|
||||||
|
// - write payload.json
|
||||||
|
clusterDir := fmt.Sprintf("$HOME/autoglue/clusters/%s", c.ID.String())
|
||||||
|
configPath := fmt.Sprintf("$HOME/.ssh/autoglue/cluster-%s.config", c.ID.String())
|
||||||
|
|
||||||
|
var script bytes.Buffer
|
||||||
|
|
||||||
|
script.WriteString("set -euo pipefail\n")
|
||||||
|
script.WriteString("mkdir -p \"$HOME/.ssh/autoglue/keys\"\n")
|
||||||
|
script.WriteString("mkdir -p " + clusterDir + "\n")
|
||||||
|
script.WriteString("chmod 700 \"$HOME/.ssh\" || true\n")
|
||||||
|
|
||||||
|
// ssh-config
|
||||||
|
script.WriteString("cat > " + configPath + " <<'EOF_CFG'\n")
|
||||||
|
script.WriteString(sshConfig)
|
||||||
|
script.WriteString("EOF_CFG\n")
|
||||||
|
script.WriteString("chmod 600 " + configPath + "\n")
|
||||||
|
|
||||||
|
// keys
|
||||||
|
for id, kp := range keyPayloads {
|
||||||
|
tag := "KEY_" + id.String()
|
||||||
|
target := fmt.Sprintf("$HOME/.ssh/autoglue/keys/%s", kp.FileName)
|
||||||
|
|
||||||
|
script.WriteString("cat <<'" + tag + "' | base64 -d > " + target + "\n")
|
||||||
|
script.WriteString(kp.PrivateKeyB64 + "\n")
|
||||||
|
script.WriteString(tag + "\n")
|
||||||
|
script.WriteString("chmod 600 " + target + "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// payload.json
|
||||||
|
payloadPath := clusterDir + "/payload.json"
|
||||||
|
script.WriteString("cat > " + payloadPath + " <<'EOF_PAYLOAD'\n")
|
||||||
|
script.Write(payloadJSON)
|
||||||
|
script.WriteString("\nEOF_PAYLOAD\n")
|
||||||
|
script.WriteString("chmod 600 " + payloadPath + "\n")
|
||||||
|
|
||||||
|
// If you later want to always include cluster configs automatically, you can
|
||||||
|
// optionally manage ~/.ssh/config here (kept simple for now).
|
||||||
|
|
||||||
|
sess.Stdin = strings.NewReader(script.String())
|
||||||
|
out, runErr := sess.CombinedOutput("bash -s")
|
||||||
|
|
||||||
|
if runErr != nil {
|
||||||
|
return wrapSSHError(runErr, string(out))
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func setClusterStatus(db *gorm.DB, id uuid.UUID, status, lastError string) error {
|
||||||
|
updates := map[string]any{
|
||||||
|
"status": status,
|
||||||
|
"updated_at": time.Now(),
|
||||||
|
}
|
||||||
|
if lastError != "" {
|
||||||
|
updates["last_error"] = lastError
|
||||||
|
}
|
||||||
|
return db.Model(&models.Cluster{}).
|
||||||
|
Where("id = ?", id).
|
||||||
|
Updates(updates).Error
|
||||||
|
}
|
||||||
|
|
||||||
|
// runMakeOnBastion runs `make <target>` from the cluster's directory on the bastion.
|
||||||
|
func runMakeOnBastion(
|
||||||
|
ctx context.Context,
|
||||||
|
db *gorm.DB,
|
||||||
|
c *models.Cluster,
|
||||||
|
target string,
|
||||||
|
) (string, error) {
|
||||||
|
bastion := c.BastionServer
|
||||||
|
if bastion == nil {
|
||||||
|
return "", fmt.Errorf("bastion server is nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
if bastion.PublicIPAddress == nil || strings.TrimSpace(*bastion.PublicIPAddress) == "" {
|
||||||
|
return "", fmt.Errorf("bastion server missing public ip")
|
||||||
|
}
|
||||||
|
|
||||||
|
privKey, err := utils.DecryptForOrg(
|
||||||
|
bastion.OrganizationID,
|
||||||
|
bastion.SshKey.EncryptedPrivateKey,
|
||||||
|
bastion.SshKey.PrivateIV,
|
||||||
|
bastion.SshKey.PrivateTag,
|
||||||
|
db,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("decrypt bastion key: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
signer, err := ssh.ParsePrivateKey([]byte(privKey))
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("parse bastion private key: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
hkcb := makeDBHostKeyCallback(db, bastion)
|
||||||
|
|
||||||
|
config := &ssh.ClientConfig{
|
||||||
|
User: bastion.SSHUser,
|
||||||
|
Auth: []ssh.AuthMethod{ssh.PublicKeys(signer)},
|
||||||
|
HostKeyCallback: hkcb,
|
||||||
|
Timeout: 30 * time.Second,
|
||||||
|
}
|
||||||
|
|
||||||
|
host := net.JoinHostPort(*bastion.PublicIPAddress, "22")
|
||||||
|
|
||||||
|
dialer := &net.Dialer{}
|
||||||
|
conn, err := dialer.DialContext(ctx, "tcp", host)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("dial bastion: %w", err)
|
||||||
|
}
|
||||||
|
defer conn.Close()
|
||||||
|
|
||||||
|
cconn, chans, reqs, err := ssh.NewClientConn(conn, host, config)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("ssh handshake bastion: %w", err)
|
||||||
|
}
|
||||||
|
client := ssh.NewClient(cconn, chans, reqs)
|
||||||
|
defer client.Close()
|
||||||
|
|
||||||
|
sess, err := client.NewSession()
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("ssh session: %w", err)
|
||||||
|
}
|
||||||
|
defer sess.Close()
|
||||||
|
|
||||||
|
clusterDir := fmt.Sprintf("$HOME/autoglue/clusters/%s", c.ID.String())
|
||||||
|
cmd := fmt.Sprintf("cd %s && make %s", clusterDir, target)
|
||||||
|
|
||||||
|
out, runErr := sess.CombinedOutput(cmd)
|
||||||
|
if runErr != nil {
|
||||||
|
return string(out), wrapSSHError(runErr, string(out))
|
||||||
|
}
|
||||||
|
return string(out), nil
|
||||||
|
}
|
||||||
@@ -189,6 +189,8 @@ func CreateCluster(db *gorm.DB) http.HandlerFunc {
|
|||||||
LastError: "",
|
LastError: "",
|
||||||
CertificateKey: certificateKey,
|
CertificateKey: certificateKey,
|
||||||
RandomToken: randomToken,
|
RandomToken: randomToken,
|
||||||
|
DockerImage: in.DockerImage,
|
||||||
|
DockerTag: in.DockerTag,
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := db.Create(&c).Error; err != nil {
|
if err := db.Create(&c).Error; err != nil {
|
||||||
@@ -262,6 +264,14 @@ func UpdateCluster(db *gorm.DB) http.HandlerFunc {
|
|||||||
cluster.Region = *in.Region
|
cluster.Region = *in.Region
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if in.DockerImage != nil {
|
||||||
|
cluster.DockerImage = *in.DockerImage
|
||||||
|
}
|
||||||
|
|
||||||
|
if in.DockerTag != nil {
|
||||||
|
cluster.DockerTag = *in.DockerTag
|
||||||
|
}
|
||||||
|
|
||||||
if err := db.Save(&cluster).Error; err != nil {
|
if err := db.Save(&cluster).Error; err != nil {
|
||||||
utils.WriteError(w, http.StatusInternalServerError, "db_error", "db error")
|
utils.WriteError(w, http.StatusInternalServerError, "db_error", "db error")
|
||||||
return
|
return
|
||||||
@@ -1547,6 +1557,8 @@ func clusterToDTO(c models.Cluster) dto.ClusterResponse {
|
|||||||
RandomToken: c.RandomToken,
|
RandomToken: c.RandomToken,
|
||||||
CertificateKey: c.CertificateKey,
|
CertificateKey: c.CertificateKey,
|
||||||
NodePools: nps,
|
NodePools: nps,
|
||||||
|
DockerImage: c.DockerImage,
|
||||||
|
DockerTag: c.DockerTag,
|
||||||
CreatedAt: c.CreatedAt,
|
CreatedAt: c.CreatedAt,
|
||||||
UpdatedAt: c.UpdatedAt,
|
UpdatedAt: c.UpdatedAt,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -22,6 +22,8 @@ type ClusterResponse struct {
|
|||||||
RandomToken string `json:"random_token"`
|
RandomToken string `json:"random_token"`
|
||||||
CertificateKey string `json:"certificate_key"`
|
CertificateKey string `json:"certificate_key"`
|
||||||
NodePools []NodePoolResponse `json:"node_pools,omitempty"`
|
NodePools []NodePoolResponse `json:"node_pools,omitempty"`
|
||||||
|
DockerImage string `json:"docker_image"`
|
||||||
|
DockerTag string `json:"docker_tag"`
|
||||||
CreatedAt time.Time `json:"created_at"`
|
CreatedAt time.Time `json:"created_at"`
|
||||||
UpdatedAt time.Time `json:"updated_at"`
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
}
|
}
|
||||||
@@ -30,12 +32,16 @@ type CreateClusterRequest struct {
|
|||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
ClusterProvider string `json:"cluster_provider"`
|
ClusterProvider string `json:"cluster_provider"`
|
||||||
Region string `json:"region"`
|
Region string `json:"region"`
|
||||||
|
DockerImage string `json:"docker_image"`
|
||||||
|
DockerTag string `json:"docker_tag"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type UpdateClusterRequest struct {
|
type UpdateClusterRequest struct {
|
||||||
Name *string `json:"name,omitempty"`
|
Name *string `json:"name,omitempty"`
|
||||||
ClusterProvider *string `json:"cluster_provider,omitempty"`
|
ClusterProvider *string `json:"cluster_provider,omitempty"`
|
||||||
Region *string `json:"region,omitempty"`
|
Region *string `json:"region,omitempty"`
|
||||||
|
DockerImage *string `json:"docker_image,omitempty"`
|
||||||
|
DockerTag *string `json:"docker_tag,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type AttachCaptainDomainRequest struct {
|
type AttachCaptainDomainRequest struct {
|
||||||
|
|||||||
@@ -40,6 +40,8 @@ type Cluster struct {
|
|||||||
EncryptedKubeconfig string `gorm:"type:text" json:"-"`
|
EncryptedKubeconfig string `gorm:"type:text" json:"-"`
|
||||||
KubeIV string `json:"-"`
|
KubeIV string `json:"-"`
|
||||||
KubeTag string `json:"-"`
|
KubeTag string `json:"-"`
|
||||||
|
DockerImage string `json:"docker_image"`
|
||||||
|
DockerTag string `json:"docker_tag"`
|
||||||
CreatedAt time.Time `json:"created_at,omitempty" gorm:"type:timestamptz;column:created_at;not null;default:now()"`
|
CreatedAt time.Time `json:"created_at,omitempty" gorm:"type:timestamptz;column:created_at;not null;default:now()"`
|
||||||
UpdatedAt time.Time `json:"updated_at,omitempty" gorm:"type:timestamptz;autoUpdateTime;column:updated_at;not null;default:now()"`
|
UpdatedAt time.Time `json:"updated_at,omitempty" gorm:"type:timestamptz;autoUpdateTime;column:updated_at;not null;default:now()"`
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,6 +12,8 @@
|
|||||||
| `control_plane_fqdn` | string |
|
| `control_plane_fqdn` | string |
|
||||||
| `control_plane_record_set` | [DtoRecordSetResponse](DtoRecordSetResponse.md) |
|
| `control_plane_record_set` | [DtoRecordSetResponse](DtoRecordSetResponse.md) |
|
||||||
| `created_at` | string |
|
| `created_at` | string |
|
||||||
|
| `docker_image` | string |
|
||||||
|
| `docker_tag` | string |
|
||||||
| `glueops_load_balancer` | [DtoLoadBalancerResponse](DtoLoadBalancerResponse.md) |
|
| `glueops_load_balancer` | [DtoLoadBalancerResponse](DtoLoadBalancerResponse.md) |
|
||||||
| `id` | string |
|
| `id` | string |
|
||||||
| `last_error` | string |
|
| `last_error` | string |
|
||||||
@@ -37,6 +39,8 @@ const example = {
|
|||||||
control_plane_fqdn: null,
|
control_plane_fqdn: null,
|
||||||
control_plane_record_set: null,
|
control_plane_record_set: null,
|
||||||
created_at: null,
|
created_at: null,
|
||||||
|
docker_image: null,
|
||||||
|
docker_tag: null,
|
||||||
glueops_load_balancer: null,
|
glueops_load_balancer: null,
|
||||||
id: null,
|
id: null,
|
||||||
last_error: null,
|
last_error: null,
|
||||||
|
|||||||
@@ -5,6 +5,8 @@
|
|||||||
| Name | Type |
|
| Name | Type |
|
||||||
| ------------------ | ------ |
|
| ------------------ | ------ |
|
||||||
| `cluster_provider` | string |
|
| `cluster_provider` | string |
|
||||||
|
| `docker_image` | string |
|
||||||
|
| `docker_tag` | string |
|
||||||
| `name` | string |
|
| `name` | string |
|
||||||
| `region` | string |
|
| `region` | string |
|
||||||
|
|
||||||
@@ -16,6 +18,8 @@ import type { DtoCreateClusterRequest } from "@glueops/autoglue-sdk-go";
|
|||||||
// TODO: Update the object below with actual values
|
// TODO: Update the object below with actual values
|
||||||
const example = {
|
const example = {
|
||||||
cluster_provider: null,
|
cluster_provider: null,
|
||||||
|
docker_image: null,
|
||||||
|
docker_tag: null,
|
||||||
name: null,
|
name: null,
|
||||||
region: null,
|
region: null,
|
||||||
} satisfies DtoCreateClusterRequest;
|
} satisfies DtoCreateClusterRequest;
|
||||||
|
|||||||
@@ -5,6 +5,8 @@
|
|||||||
| Name | Type |
|
| Name | Type |
|
||||||
| ------------------ | ------ |
|
| ------------------ | ------ |
|
||||||
| `cluster_provider` | string |
|
| `cluster_provider` | string |
|
||||||
|
| `docker_image` | string |
|
||||||
|
| `docker_tag` | string |
|
||||||
| `name` | string |
|
| `name` | string |
|
||||||
| `region` | string |
|
| `region` | string |
|
||||||
|
|
||||||
@@ -16,6 +18,8 @@ import type { DtoUpdateClusterRequest } from "@glueops/autoglue-sdk-go";
|
|||||||
// TODO: Update the object below with actual values
|
// TODO: Update the object below with actual values
|
||||||
const example = {
|
const example = {
|
||||||
cluster_provider: null,
|
cluster_provider: null,
|
||||||
|
docker_image: null,
|
||||||
|
docker_tag: null,
|
||||||
name: null,
|
name: null,
|
||||||
region: null,
|
region: null,
|
||||||
} satisfies DtoUpdateClusterRequest;
|
} satisfies DtoUpdateClusterRequest;
|
||||||
|
|||||||
@@ -16,6 +16,6 @@
|
|||||||
"prepare": "npm run build"
|
"prepare": "npm run build"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"typescript": "5.9.3"
|
"typescript": "^4.0 || ^5.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,10 +13,17 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import * as runtime from "../runtime";
|
import * as runtime from "../runtime";
|
||||||
import type {DtoAnnotationResponse, DtoCreateAnnotationRequest, DtoUpdateAnnotationRequest,} from "../models/index";
|
import type {
|
||||||
|
DtoAnnotationResponse,
|
||||||
|
DtoCreateAnnotationRequest,
|
||||||
|
DtoUpdateAnnotationRequest,
|
||||||
|
} from "../models/index";
|
||||||
import {
|
import {
|
||||||
DtoAnnotationResponseFromJSON,
|
DtoAnnotationResponseFromJSON,
|
||||||
|
DtoAnnotationResponseToJSON,
|
||||||
|
DtoCreateAnnotationRequestFromJSON,
|
||||||
DtoCreateAnnotationRequestToJSON,
|
DtoCreateAnnotationRequestToJSON,
|
||||||
|
DtoUpdateAnnotationRequestFromJSON,
|
||||||
DtoUpdateAnnotationRequestToJSON,
|
DtoUpdateAnnotationRequestToJSON,
|
||||||
} from "../models/index";
|
} from "../models/index";
|
||||||
|
|
||||||
|
|||||||
@@ -13,8 +13,22 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import * as runtime from "../runtime";
|
import * as runtime from "../runtime";
|
||||||
import type {DtoEnqueueRequest, DtoJob, DtoPageJob, DtoQueueInfo,} from "../models/index";
|
import type {
|
||||||
import {DtoEnqueueRequestToJSON, DtoJobFromJSON, DtoPageJobFromJSON, DtoQueueInfoFromJSON,} from "../models/index";
|
DtoEnqueueRequest,
|
||||||
|
DtoJob,
|
||||||
|
DtoPageJob,
|
||||||
|
DtoQueueInfo,
|
||||||
|
} from "../models/index";
|
||||||
|
import {
|
||||||
|
DtoEnqueueRequestFromJSON,
|
||||||
|
DtoEnqueueRequestToJSON,
|
||||||
|
DtoJobFromJSON,
|
||||||
|
DtoJobToJSON,
|
||||||
|
DtoPageJobFromJSON,
|
||||||
|
DtoPageJobToJSON,
|
||||||
|
DtoQueueInfoFromJSON,
|
||||||
|
DtoQueueInfoToJSON,
|
||||||
|
} from "../models/index";
|
||||||
|
|
||||||
export interface AdminCancelArcherJobRequest {
|
export interface AdminCancelArcherJobRequest {
|
||||||
id: string;
|
id: string;
|
||||||
|
|||||||
@@ -13,13 +13,24 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import * as runtime from "../runtime";
|
import * as runtime from "../runtime";
|
||||||
import type {DtoAuthStartResponse, DtoJWKS, DtoLogoutRequest, DtoRefreshRequest, DtoTokenPair,} from "../models/index";
|
import type {
|
||||||
|
DtoAuthStartResponse,
|
||||||
|
DtoJWKS,
|
||||||
|
DtoLogoutRequest,
|
||||||
|
DtoRefreshRequest,
|
||||||
|
DtoTokenPair,
|
||||||
|
} from "../models/index";
|
||||||
import {
|
import {
|
||||||
DtoAuthStartResponseFromJSON,
|
DtoAuthStartResponseFromJSON,
|
||||||
|
DtoAuthStartResponseToJSON,
|
||||||
DtoJWKSFromJSON,
|
DtoJWKSFromJSON,
|
||||||
|
DtoJWKSToJSON,
|
||||||
|
DtoLogoutRequestFromJSON,
|
||||||
DtoLogoutRequestToJSON,
|
DtoLogoutRequestToJSON,
|
||||||
|
DtoRefreshRequestFromJSON,
|
||||||
DtoRefreshRequestToJSON,
|
DtoRefreshRequestToJSON,
|
||||||
DtoTokenPairFromJSON,
|
DtoTokenPairFromJSON,
|
||||||
|
DtoTokenPairToJSON,
|
||||||
} from "../models/index";
|
} from "../models/index";
|
||||||
|
|
||||||
export interface AuthCallbackRequest {
|
export interface AuthCallbackRequest {
|
||||||
|
|||||||
@@ -25,14 +25,23 @@ import type {
|
|||||||
DtoUpdateClusterRequest,
|
DtoUpdateClusterRequest,
|
||||||
} from "../models/index";
|
} from "../models/index";
|
||||||
import {
|
import {
|
||||||
|
DtoAttachBastionRequestFromJSON,
|
||||||
DtoAttachBastionRequestToJSON,
|
DtoAttachBastionRequestToJSON,
|
||||||
|
DtoAttachCaptainDomainRequestFromJSON,
|
||||||
DtoAttachCaptainDomainRequestToJSON,
|
DtoAttachCaptainDomainRequestToJSON,
|
||||||
|
DtoAttachLoadBalancerRequestFromJSON,
|
||||||
DtoAttachLoadBalancerRequestToJSON,
|
DtoAttachLoadBalancerRequestToJSON,
|
||||||
|
DtoAttachNodePoolRequestFromJSON,
|
||||||
DtoAttachNodePoolRequestToJSON,
|
DtoAttachNodePoolRequestToJSON,
|
||||||
|
DtoAttachRecordSetRequestFromJSON,
|
||||||
DtoAttachRecordSetRequestToJSON,
|
DtoAttachRecordSetRequestToJSON,
|
||||||
DtoClusterResponseFromJSON,
|
DtoClusterResponseFromJSON,
|
||||||
|
DtoClusterResponseToJSON,
|
||||||
|
DtoCreateClusterRequestFromJSON,
|
||||||
DtoCreateClusterRequestToJSON,
|
DtoCreateClusterRequestToJSON,
|
||||||
|
DtoSetKubeconfigRequestFromJSON,
|
||||||
DtoSetKubeconfigRequestToJSON,
|
DtoSetKubeconfigRequestToJSON,
|
||||||
|
DtoUpdateClusterRequestFromJSON,
|
||||||
DtoUpdateClusterRequestToJSON,
|
DtoUpdateClusterRequestToJSON,
|
||||||
} from "../models/index";
|
} from "../models/index";
|
||||||
|
|
||||||
|
|||||||
@@ -13,10 +13,17 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import * as runtime from "../runtime";
|
import * as runtime from "../runtime";
|
||||||
import type {DtoCreateCredentialRequest, DtoCredentialOut, DtoUpdateCredentialRequest,} from "../models/index";
|
import type {
|
||||||
|
DtoCreateCredentialRequest,
|
||||||
|
DtoCredentialOut,
|
||||||
|
DtoUpdateCredentialRequest,
|
||||||
|
} from "../models/index";
|
||||||
import {
|
import {
|
||||||
|
DtoCreateCredentialRequestFromJSON,
|
||||||
DtoCreateCredentialRequestToJSON,
|
DtoCreateCredentialRequestToJSON,
|
||||||
DtoCredentialOutFromJSON,
|
DtoCredentialOutFromJSON,
|
||||||
|
DtoCredentialOutToJSON,
|
||||||
|
DtoUpdateCredentialRequestFromJSON,
|
||||||
DtoUpdateCredentialRequestToJSON,
|
DtoUpdateCredentialRequestToJSON,
|
||||||
} from "../models/index";
|
} from "../models/index";
|
||||||
|
|
||||||
|
|||||||
@@ -22,11 +22,17 @@ import type {
|
|||||||
DtoUpdateRecordSetRequest,
|
DtoUpdateRecordSetRequest,
|
||||||
} from "../models/index";
|
} from "../models/index";
|
||||||
import {
|
import {
|
||||||
|
DtoCreateDomainRequestFromJSON,
|
||||||
DtoCreateDomainRequestToJSON,
|
DtoCreateDomainRequestToJSON,
|
||||||
|
DtoCreateRecordSetRequestFromJSON,
|
||||||
DtoCreateRecordSetRequestToJSON,
|
DtoCreateRecordSetRequestToJSON,
|
||||||
DtoDomainResponseFromJSON,
|
DtoDomainResponseFromJSON,
|
||||||
|
DtoDomainResponseToJSON,
|
||||||
DtoRecordSetResponseFromJSON,
|
DtoRecordSetResponseFromJSON,
|
||||||
|
DtoRecordSetResponseToJSON,
|
||||||
|
DtoUpdateDomainRequestFromJSON,
|
||||||
DtoUpdateDomainRequestToJSON,
|
DtoUpdateDomainRequestToJSON,
|
||||||
|
DtoUpdateRecordSetRequestFromJSON,
|
||||||
DtoUpdateRecordSetRequestToJSON,
|
DtoUpdateRecordSetRequestToJSON,
|
||||||
} from "../models/index";
|
} from "../models/index";
|
||||||
|
|
||||||
|
|||||||
@@ -13,8 +13,11 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import * as runtime from "../runtime";
|
import * as runtime from "../runtime";
|
||||||
import type {HandlersHealthStatus} from "../models/index";
|
import type { HandlersHealthStatus } from "../models/index";
|
||||||
import {HandlersHealthStatusFromJSON,} from "../models/index";
|
import {
|
||||||
|
HandlersHealthStatusFromJSON,
|
||||||
|
HandlersHealthStatusToJSON,
|
||||||
|
} from "../models/index";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
|||||||
@@ -13,8 +13,19 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import * as runtime from "../runtime";
|
import * as runtime from "../runtime";
|
||||||
import type {DtoCreateLabelRequest, DtoLabelResponse, DtoUpdateLabelRequest,} from "../models/index";
|
import type {
|
||||||
import {DtoCreateLabelRequestToJSON, DtoLabelResponseFromJSON, DtoUpdateLabelRequestToJSON,} from "../models/index";
|
DtoCreateLabelRequest,
|
||||||
|
DtoLabelResponse,
|
||||||
|
DtoUpdateLabelRequest,
|
||||||
|
} from "../models/index";
|
||||||
|
import {
|
||||||
|
DtoCreateLabelRequestFromJSON,
|
||||||
|
DtoCreateLabelRequestToJSON,
|
||||||
|
DtoLabelResponseFromJSON,
|
||||||
|
DtoLabelResponseToJSON,
|
||||||
|
DtoUpdateLabelRequestFromJSON,
|
||||||
|
DtoUpdateLabelRequestToJSON,
|
||||||
|
} from "../models/index";
|
||||||
|
|
||||||
export interface CreateLabelRequest {
|
export interface CreateLabelRequest {
|
||||||
dtoCreateLabelRequest: DtoCreateLabelRequest;
|
dtoCreateLabelRequest: DtoCreateLabelRequest;
|
||||||
|
|||||||
@@ -19,8 +19,11 @@ import type {
|
|||||||
DtoUpdateLoadBalancerRequest,
|
DtoUpdateLoadBalancerRequest,
|
||||||
} from "../models/index";
|
} from "../models/index";
|
||||||
import {
|
import {
|
||||||
|
DtoCreateLoadBalancerRequestFromJSON,
|
||||||
DtoCreateLoadBalancerRequestToJSON,
|
DtoCreateLoadBalancerRequestToJSON,
|
||||||
DtoLoadBalancerResponseFromJSON,
|
DtoLoadBalancerResponseFromJSON,
|
||||||
|
DtoLoadBalancerResponseToJSON,
|
||||||
|
DtoUpdateLoadBalancerRequestFromJSON,
|
||||||
DtoUpdateLoadBalancerRequestToJSON,
|
DtoUpdateLoadBalancerRequestToJSON,
|
||||||
} from "../models/index";
|
} from "../models/index";
|
||||||
|
|
||||||
|
|||||||
@@ -13,8 +13,16 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import * as runtime from "../runtime";
|
import * as runtime from "../runtime";
|
||||||
import type {HandlersCreateUserKeyRequest, HandlersUserAPIKeyOut,} from "../models/index";
|
import type {
|
||||||
import {HandlersCreateUserKeyRequestToJSON, HandlersUserAPIKeyOutFromJSON,} from "../models/index";
|
HandlersCreateUserKeyRequest,
|
||||||
|
HandlersUserAPIKeyOut,
|
||||||
|
} from "../models/index";
|
||||||
|
import {
|
||||||
|
HandlersCreateUserKeyRequestFromJSON,
|
||||||
|
HandlersCreateUserKeyRequestToJSON,
|
||||||
|
HandlersUserAPIKeyOutFromJSON,
|
||||||
|
HandlersUserAPIKeyOutToJSON,
|
||||||
|
} from "../models/index";
|
||||||
|
|
||||||
export interface CreateUserAPIKeyRequest {
|
export interface CreateUserAPIKeyRequest {
|
||||||
handlersCreateUserKeyRequest: HandlersCreateUserKeyRequest;
|
handlersCreateUserKeyRequest: HandlersCreateUserKeyRequest;
|
||||||
|
|||||||
@@ -13,8 +13,19 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import * as runtime from "../runtime";
|
import * as runtime from "../runtime";
|
||||||
import type {HandlersMeResponse, HandlersUpdateMeRequest, ModelsUser,} from "../models/index";
|
import type {
|
||||||
import {HandlersMeResponseFromJSON, HandlersUpdateMeRequestToJSON, ModelsUserFromJSON,} from "../models/index";
|
HandlersMeResponse,
|
||||||
|
HandlersUpdateMeRequest,
|
||||||
|
ModelsUser,
|
||||||
|
} from "../models/index";
|
||||||
|
import {
|
||||||
|
HandlersMeResponseFromJSON,
|
||||||
|
HandlersMeResponseToJSON,
|
||||||
|
HandlersUpdateMeRequestFromJSON,
|
||||||
|
HandlersUpdateMeRequestToJSON,
|
||||||
|
ModelsUserFromJSON,
|
||||||
|
ModelsUserToJSON,
|
||||||
|
} from "../models/index";
|
||||||
|
|
||||||
export interface UpdateMeRequest {
|
export interface UpdateMeRequest {
|
||||||
handlersUpdateMeRequest: HandlersUpdateMeRequest;
|
handlersUpdateMeRequest: HandlersUpdateMeRequest;
|
||||||
|
|||||||
@@ -13,8 +13,11 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import * as runtime from "../runtime";
|
import * as runtime from "../runtime";
|
||||||
import type {HandlersVersionResponse} from "../models/index";
|
import type { HandlersVersionResponse } from "../models/index";
|
||||||
import {HandlersVersionResponseFromJSON,} from "../models/index";
|
import {
|
||||||
|
HandlersVersionResponseFromJSON,
|
||||||
|
HandlersVersionResponseToJSON,
|
||||||
|
} from "../models/index";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
|||||||
@@ -28,15 +28,26 @@ import type {
|
|||||||
} from "../models/index";
|
} from "../models/index";
|
||||||
import {
|
import {
|
||||||
DtoAnnotationResponseFromJSON,
|
DtoAnnotationResponseFromJSON,
|
||||||
|
DtoAnnotationResponseToJSON,
|
||||||
|
DtoAttachAnnotationsRequestFromJSON,
|
||||||
DtoAttachAnnotationsRequestToJSON,
|
DtoAttachAnnotationsRequestToJSON,
|
||||||
|
DtoAttachLabelsRequestFromJSON,
|
||||||
DtoAttachLabelsRequestToJSON,
|
DtoAttachLabelsRequestToJSON,
|
||||||
|
DtoAttachServersRequestFromJSON,
|
||||||
DtoAttachServersRequestToJSON,
|
DtoAttachServersRequestToJSON,
|
||||||
|
DtoAttachTaintsRequestFromJSON,
|
||||||
DtoAttachTaintsRequestToJSON,
|
DtoAttachTaintsRequestToJSON,
|
||||||
|
DtoCreateNodePoolRequestFromJSON,
|
||||||
DtoCreateNodePoolRequestToJSON,
|
DtoCreateNodePoolRequestToJSON,
|
||||||
DtoLabelResponseFromJSON,
|
DtoLabelResponseFromJSON,
|
||||||
|
DtoLabelResponseToJSON,
|
||||||
DtoNodePoolResponseFromJSON,
|
DtoNodePoolResponseFromJSON,
|
||||||
|
DtoNodePoolResponseToJSON,
|
||||||
DtoServerResponseFromJSON,
|
DtoServerResponseFromJSON,
|
||||||
|
DtoServerResponseToJSON,
|
||||||
DtoTaintResponseFromJSON,
|
DtoTaintResponseFromJSON,
|
||||||
|
DtoTaintResponseToJSON,
|
||||||
|
DtoUpdateNodePoolRequestFromJSON,
|
||||||
DtoUpdateNodePoolRequestToJSON,
|
DtoUpdateNodePoolRequestToJSON,
|
||||||
} from "../models/index";
|
} from "../models/index";
|
||||||
|
|
||||||
|
|||||||
@@ -22,16 +22,27 @@ import type {
|
|||||||
HandlersOrgUpdateReq,
|
HandlersOrgUpdateReq,
|
||||||
ModelsAPIKey,
|
ModelsAPIKey,
|
||||||
ModelsOrganization,
|
ModelsOrganization,
|
||||||
|
UtilsErrorResponse,
|
||||||
} from "../models/index";
|
} from "../models/index";
|
||||||
import {
|
import {
|
||||||
HandlersMemberOutFromJSON,
|
HandlersMemberOutFromJSON,
|
||||||
|
HandlersMemberOutToJSON,
|
||||||
|
HandlersMemberUpsertReqFromJSON,
|
||||||
HandlersMemberUpsertReqToJSON,
|
HandlersMemberUpsertReqToJSON,
|
||||||
|
HandlersOrgCreateReqFromJSON,
|
||||||
HandlersOrgCreateReqToJSON,
|
HandlersOrgCreateReqToJSON,
|
||||||
|
HandlersOrgKeyCreateReqFromJSON,
|
||||||
HandlersOrgKeyCreateReqToJSON,
|
HandlersOrgKeyCreateReqToJSON,
|
||||||
HandlersOrgKeyCreateRespFromJSON,
|
HandlersOrgKeyCreateRespFromJSON,
|
||||||
|
HandlersOrgKeyCreateRespToJSON,
|
||||||
|
HandlersOrgUpdateReqFromJSON,
|
||||||
HandlersOrgUpdateReqToJSON,
|
HandlersOrgUpdateReqToJSON,
|
||||||
ModelsAPIKeyFromJSON,
|
ModelsAPIKeyFromJSON,
|
||||||
|
ModelsAPIKeyToJSON,
|
||||||
ModelsOrganizationFromJSON,
|
ModelsOrganizationFromJSON,
|
||||||
|
ModelsOrganizationToJSON,
|
||||||
|
UtilsErrorResponseFromJSON,
|
||||||
|
UtilsErrorResponseToJSON,
|
||||||
} from "../models/index";
|
} from "../models/index";
|
||||||
|
|
||||||
export interface AddOrUpdateMemberRequest {
|
export interface AddOrUpdateMemberRequest {
|
||||||
|
|||||||
@@ -13,8 +13,19 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import * as runtime from "../runtime";
|
import * as runtime from "../runtime";
|
||||||
import type {DtoCreateServerRequest, DtoServerResponse, DtoUpdateServerRequest,} from "../models/index";
|
import type {
|
||||||
import {DtoCreateServerRequestToJSON, DtoServerResponseFromJSON, DtoUpdateServerRequestToJSON,} from "../models/index";
|
DtoCreateServerRequest,
|
||||||
|
DtoServerResponse,
|
||||||
|
DtoUpdateServerRequest,
|
||||||
|
} from "../models/index";
|
||||||
|
import {
|
||||||
|
DtoCreateServerRequestFromJSON,
|
||||||
|
DtoCreateServerRequestToJSON,
|
||||||
|
DtoServerResponseFromJSON,
|
||||||
|
DtoServerResponseToJSON,
|
||||||
|
DtoUpdateServerRequestFromJSON,
|
||||||
|
DtoUpdateServerRequestToJSON,
|
||||||
|
} from "../models/index";
|
||||||
|
|
||||||
export interface CreateServerRequest {
|
export interface CreateServerRequest {
|
||||||
dtoCreateServerRequest: DtoCreateServerRequest;
|
dtoCreateServerRequest: DtoCreateServerRequest;
|
||||||
|
|||||||
@@ -13,8 +13,19 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import * as runtime from "../runtime";
|
import * as runtime from "../runtime";
|
||||||
import type {DtoCreateSSHRequest, DtoSshResponse, GetSSHKey200Response,} from "../models/index";
|
import type {
|
||||||
import {DtoCreateSSHRequestToJSON, DtoSshResponseFromJSON, GetSSHKey200ResponseFromJSON,} from "../models/index";
|
DtoCreateSSHRequest,
|
||||||
|
DtoSshResponse,
|
||||||
|
GetSSHKey200Response,
|
||||||
|
} from "../models/index";
|
||||||
|
import {
|
||||||
|
DtoCreateSSHRequestFromJSON,
|
||||||
|
DtoCreateSSHRequestToJSON,
|
||||||
|
DtoSshResponseFromJSON,
|
||||||
|
DtoSshResponseToJSON,
|
||||||
|
GetSSHKey200ResponseFromJSON,
|
||||||
|
GetSSHKey200ResponseToJSON,
|
||||||
|
} from "../models/index";
|
||||||
|
|
||||||
export interface CreateSSHKeyRequest {
|
export interface CreateSSHKeyRequest {
|
||||||
dtoCreateSSHRequest: DtoCreateSSHRequest;
|
dtoCreateSSHRequest: DtoCreateSSHRequest;
|
||||||
|
|||||||
@@ -13,8 +13,19 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import * as runtime from "../runtime";
|
import * as runtime from "../runtime";
|
||||||
import type {DtoCreateTaintRequest, DtoTaintResponse, DtoUpdateTaintRequest,} from "../models/index";
|
import type {
|
||||||
import {DtoCreateTaintRequestToJSON, DtoTaintResponseFromJSON, DtoUpdateTaintRequestToJSON,} from "../models/index";
|
DtoCreateTaintRequest,
|
||||||
|
DtoTaintResponse,
|
||||||
|
DtoUpdateTaintRequest,
|
||||||
|
} from "../models/index";
|
||||||
|
import {
|
||||||
|
DtoCreateTaintRequestFromJSON,
|
||||||
|
DtoCreateTaintRequestToJSON,
|
||||||
|
DtoTaintResponseFromJSON,
|
||||||
|
DtoTaintResponseToJSON,
|
||||||
|
DtoUpdateTaintRequestFromJSON,
|
||||||
|
DtoUpdateTaintRequestToJSON,
|
||||||
|
} from "../models/index";
|
||||||
|
|
||||||
export interface CreateTaintRequest {
|
export interface CreateTaintRequest {
|
||||||
dtoCreateTaintRequest: DtoCreateTaintRequest;
|
dtoCreateTaintRequest: DtoCreateTaintRequest;
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,16 +12,42 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import type {DtoDomainResponse} from "./DtoDomainResponse";
|
import { mapValues } from "../runtime";
|
||||||
import {DtoDomainResponseFromJSON, DtoDomainResponseToJSON,} from "./DtoDomainResponse";
|
import type { DtoDomainResponse } from "./DtoDomainResponse";
|
||||||
import type {DtoLoadBalancerResponse} from "./DtoLoadBalancerResponse";
|
import {
|
||||||
import {DtoLoadBalancerResponseFromJSON, DtoLoadBalancerResponseToJSON,} from "./DtoLoadBalancerResponse";
|
DtoDomainResponseFromJSON,
|
||||||
import type {DtoNodePoolResponse} from "./DtoNodePoolResponse";
|
DtoDomainResponseFromJSONTyped,
|
||||||
import {DtoNodePoolResponseFromJSON, DtoNodePoolResponseToJSON,} from "./DtoNodePoolResponse";
|
DtoDomainResponseToJSON,
|
||||||
import type {DtoServerResponse} from "./DtoServerResponse";
|
DtoDomainResponseToJSONTyped,
|
||||||
import {DtoServerResponseFromJSON, DtoServerResponseToJSON,} from "./DtoServerResponse";
|
} from "./DtoDomainResponse";
|
||||||
import type {DtoRecordSetResponse} from "./DtoRecordSetResponse";
|
import type { DtoLoadBalancerResponse } from "./DtoLoadBalancerResponse";
|
||||||
import {DtoRecordSetResponseFromJSON, DtoRecordSetResponseToJSON,} from "./DtoRecordSetResponse";
|
import {
|
||||||
|
DtoLoadBalancerResponseFromJSON,
|
||||||
|
DtoLoadBalancerResponseFromJSONTyped,
|
||||||
|
DtoLoadBalancerResponseToJSON,
|
||||||
|
DtoLoadBalancerResponseToJSONTyped,
|
||||||
|
} from "./DtoLoadBalancerResponse";
|
||||||
|
import type { DtoNodePoolResponse } from "./DtoNodePoolResponse";
|
||||||
|
import {
|
||||||
|
DtoNodePoolResponseFromJSON,
|
||||||
|
DtoNodePoolResponseFromJSONTyped,
|
||||||
|
DtoNodePoolResponseToJSON,
|
||||||
|
DtoNodePoolResponseToJSONTyped,
|
||||||
|
} from "./DtoNodePoolResponse";
|
||||||
|
import type { DtoServerResponse } from "./DtoServerResponse";
|
||||||
|
import {
|
||||||
|
DtoServerResponseFromJSON,
|
||||||
|
DtoServerResponseFromJSONTyped,
|
||||||
|
DtoServerResponseToJSON,
|
||||||
|
DtoServerResponseToJSONTyped,
|
||||||
|
} from "./DtoServerResponse";
|
||||||
|
import type { DtoRecordSetResponse } from "./DtoRecordSetResponse";
|
||||||
|
import {
|
||||||
|
DtoRecordSetResponseFromJSON,
|
||||||
|
DtoRecordSetResponseFromJSONTyped,
|
||||||
|
DtoRecordSetResponseToJSON,
|
||||||
|
DtoRecordSetResponseToJSONTyped,
|
||||||
|
} from "./DtoRecordSetResponse";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
@@ -77,6 +103,18 @@ export interface DtoClusterResponse {
|
|||||||
* @memberof DtoClusterResponse
|
* @memberof DtoClusterResponse
|
||||||
*/
|
*/
|
||||||
created_at?: string;
|
created_at?: string;
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @type {string}
|
||||||
|
* @memberof DtoClusterResponse
|
||||||
|
*/
|
||||||
|
docker_image?: string;
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @type {string}
|
||||||
|
* @memberof DtoClusterResponse
|
||||||
|
*/
|
||||||
|
docker_tag?: string;
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @type {DtoLoadBalancerResponse}
|
* @type {DtoLoadBalancerResponse}
|
||||||
@@ -179,6 +217,9 @@ export function DtoClusterResponseFromJSONTyped(
|
|||||||
? undefined
|
? undefined
|
||||||
: DtoRecordSetResponseFromJSON(json["control_plane_record_set"]),
|
: DtoRecordSetResponseFromJSON(json["control_plane_record_set"]),
|
||||||
created_at: json["created_at"] == null ? undefined : json["created_at"],
|
created_at: json["created_at"] == null ? undefined : json["created_at"],
|
||||||
|
docker_image:
|
||||||
|
json["docker_image"] == null ? undefined : json["docker_image"],
|
||||||
|
docker_tag: json["docker_tag"] == null ? undefined : json["docker_tag"],
|
||||||
glueops_load_balancer:
|
glueops_load_balancer:
|
||||||
json["glueops_load_balancer"] == null
|
json["glueops_load_balancer"] == null
|
||||||
? undefined
|
? undefined
|
||||||
@@ -223,6 +264,8 @@ export function DtoClusterResponseToJSONTyped(
|
|||||||
value["control_plane_record_set"],
|
value["control_plane_record_set"],
|
||||||
),
|
),
|
||||||
created_at: value["created_at"],
|
created_at: value["created_at"],
|
||||||
|
docker_image: value["docker_image"],
|
||||||
|
docker_tag: value["docker_tag"],
|
||||||
glueops_load_balancer: DtoLoadBalancerResponseToJSON(
|
glueops_load_balancer: DtoLoadBalancerResponseToJSON(
|
||||||
value["glueops_load_balancer"],
|
value["glueops_load_balancer"],
|
||||||
),
|
),
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
@@ -24,6 +25,18 @@ export interface DtoCreateClusterRequest {
|
|||||||
* @memberof DtoCreateClusterRequest
|
* @memberof DtoCreateClusterRequest
|
||||||
*/
|
*/
|
||||||
cluster_provider?: string;
|
cluster_provider?: string;
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @type {string}
|
||||||
|
* @memberof DtoCreateClusterRequest
|
||||||
|
*/
|
||||||
|
docker_image?: string;
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @type {string}
|
||||||
|
* @memberof DtoCreateClusterRequest
|
||||||
|
*/
|
||||||
|
docker_tag?: string;
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @type {string}
|
* @type {string}
|
||||||
@@ -63,6 +76,9 @@ export function DtoCreateClusterRequestFromJSONTyped(
|
|||||||
return {
|
return {
|
||||||
cluster_provider:
|
cluster_provider:
|
||||||
json["cluster_provider"] == null ? undefined : json["cluster_provider"],
|
json["cluster_provider"] == null ? undefined : json["cluster_provider"],
|
||||||
|
docker_image:
|
||||||
|
json["docker_image"] == null ? undefined : json["docker_image"],
|
||||||
|
docker_tag: json["docker_tag"] == null ? undefined : json["docker_tag"],
|
||||||
name: json["name"] == null ? undefined : json["name"],
|
name: json["name"] == null ? undefined : json["name"],
|
||||||
region: json["region"] == null ? undefined : json["region"],
|
region: json["region"] == null ? undefined : json["region"],
|
||||||
};
|
};
|
||||||
@@ -84,6 +100,8 @@ export function DtoCreateClusterRequestToJSONTyped(
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
cluster_provider: value["cluster_provider"],
|
cluster_provider: value["cluster_provider"],
|
||||||
|
docker_image: value["docker_image"],
|
||||||
|
docker_tag: value["docker_tag"],
|
||||||
name: value["name"],
|
name: value["name"],
|
||||||
region: value["region"],
|
region: value["region"],
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,8 +12,14 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import type {DtoJWK} from "./DtoJWK";
|
import { mapValues } from "../runtime";
|
||||||
import {DtoJWKFromJSON, DtoJWKToJSON,} from "./DtoJWK";
|
import type { DtoJWK } from "./DtoJWK";
|
||||||
|
import {
|
||||||
|
DtoJWKFromJSON,
|
||||||
|
DtoJWKFromJSONTyped,
|
||||||
|
DtoJWKToJSON,
|
||||||
|
DtoJWKToJSONTyped,
|
||||||
|
} from "./DtoJWK";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
|||||||
@@ -12,8 +12,14 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import type {DtoJobStatus} from "./DtoJobStatus";
|
import { mapValues } from "../runtime";
|
||||||
import {DtoJobStatusFromJSON, DtoJobStatusToJSON,} from "./DtoJobStatus";
|
import type { DtoJobStatus } from "./DtoJobStatus";
|
||||||
|
import {
|
||||||
|
DtoJobStatusFromJSON,
|
||||||
|
DtoJobStatusFromJSONTyped,
|
||||||
|
DtoJobStatusToJSON,
|
||||||
|
DtoJobStatusToJSONTyped,
|
||||||
|
} from "./DtoJobStatus";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,14 +12,35 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import type {DtoTaintResponse} from "./DtoTaintResponse";
|
import { mapValues } from "../runtime";
|
||||||
import {DtoTaintResponseFromJSON, DtoTaintResponseToJSON,} from "./DtoTaintResponse";
|
import type { DtoTaintResponse } from "./DtoTaintResponse";
|
||||||
import type {DtoLabelResponse} from "./DtoLabelResponse";
|
import {
|
||||||
import {DtoLabelResponseFromJSON, DtoLabelResponseToJSON,} from "./DtoLabelResponse";
|
DtoTaintResponseFromJSON,
|
||||||
import type {DtoServerResponse} from "./DtoServerResponse";
|
DtoTaintResponseFromJSONTyped,
|
||||||
import {DtoServerResponseFromJSON, DtoServerResponseToJSON,} from "./DtoServerResponse";
|
DtoTaintResponseToJSON,
|
||||||
import type {DtoAnnotationResponse} from "./DtoAnnotationResponse";
|
DtoTaintResponseToJSONTyped,
|
||||||
import {DtoAnnotationResponseFromJSON, DtoAnnotationResponseToJSON,} from "./DtoAnnotationResponse";
|
} from "./DtoTaintResponse";
|
||||||
|
import type { DtoLabelResponse } from "./DtoLabelResponse";
|
||||||
|
import {
|
||||||
|
DtoLabelResponseFromJSON,
|
||||||
|
DtoLabelResponseFromJSONTyped,
|
||||||
|
DtoLabelResponseToJSON,
|
||||||
|
DtoLabelResponseToJSONTyped,
|
||||||
|
} from "./DtoLabelResponse";
|
||||||
|
import type { DtoServerResponse } from "./DtoServerResponse";
|
||||||
|
import {
|
||||||
|
DtoServerResponseFromJSON,
|
||||||
|
DtoServerResponseFromJSONTyped,
|
||||||
|
DtoServerResponseToJSON,
|
||||||
|
DtoServerResponseToJSONTyped,
|
||||||
|
} from "./DtoServerResponse";
|
||||||
|
import type { DtoAnnotationResponse } from "./DtoAnnotationResponse";
|
||||||
|
import {
|
||||||
|
DtoAnnotationResponseFromJSON,
|
||||||
|
DtoAnnotationResponseFromJSONTyped,
|
||||||
|
DtoAnnotationResponseToJSON,
|
||||||
|
DtoAnnotationResponseToJSONTyped,
|
||||||
|
} from "./DtoAnnotationResponse";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
|||||||
@@ -12,8 +12,14 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import type {DtoJob} from "./DtoJob";
|
import { mapValues } from "../runtime";
|
||||||
import {DtoJobFromJSON, DtoJobToJSON,} from "./DtoJob";
|
import type { DtoJob } from "./DtoJob";
|
||||||
|
import {
|
||||||
|
DtoJobFromJSON,
|
||||||
|
DtoJobFromJSONTyped,
|
||||||
|
DtoJobToJSON,
|
||||||
|
DtoJobToJSONTyped,
|
||||||
|
} from "./DtoJob";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
@@ -24,6 +25,18 @@ export interface DtoUpdateClusterRequest {
|
|||||||
* @memberof DtoUpdateClusterRequest
|
* @memberof DtoUpdateClusterRequest
|
||||||
*/
|
*/
|
||||||
cluster_provider?: string;
|
cluster_provider?: string;
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @type {string}
|
||||||
|
* @memberof DtoUpdateClusterRequest
|
||||||
|
*/
|
||||||
|
docker_image?: string;
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @type {string}
|
||||||
|
* @memberof DtoUpdateClusterRequest
|
||||||
|
*/
|
||||||
|
docker_tag?: string;
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @type {string}
|
* @type {string}
|
||||||
@@ -63,6 +76,9 @@ export function DtoUpdateClusterRequestFromJSONTyped(
|
|||||||
return {
|
return {
|
||||||
cluster_provider:
|
cluster_provider:
|
||||||
json["cluster_provider"] == null ? undefined : json["cluster_provider"],
|
json["cluster_provider"] == null ? undefined : json["cluster_provider"],
|
||||||
|
docker_image:
|
||||||
|
json["docker_image"] == null ? undefined : json["docker_image"],
|
||||||
|
docker_tag: json["docker_tag"] == null ? undefined : json["docker_tag"],
|
||||||
name: json["name"] == null ? undefined : json["name"],
|
name: json["name"] == null ? undefined : json["name"],
|
||||||
region: json["region"] == null ? undefined : json["region"],
|
region: json["region"] == null ? undefined : json["region"],
|
||||||
};
|
};
|
||||||
@@ -84,6 +100,8 @@ export function DtoUpdateClusterRequestToJSONTyped(
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
cluster_provider: value["cluster_provider"],
|
cluster_provider: value["cluster_provider"],
|
||||||
|
docker_image: value["docker_image"],
|
||||||
|
docker_tag: value["docker_tag"],
|
||||||
name: value["name"],
|
name: value["name"],
|
||||||
region: value["region"],
|
region: value["region"],
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,13 +12,19 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import type {DtoSshResponse} from "./DtoSshResponse";
|
import type { DtoSshResponse } from "./DtoSshResponse";
|
||||||
import {DtoSshResponseFromJSONTyped, DtoSshResponseToJSON, instanceOfDtoSshResponse,} from "./DtoSshResponse";
|
|
||||||
import type {DtoSshRevealResponse} from "./DtoSshRevealResponse";
|
|
||||||
import {
|
import {
|
||||||
|
instanceOfDtoSshResponse,
|
||||||
|
DtoSshResponseFromJSON,
|
||||||
|
DtoSshResponseFromJSONTyped,
|
||||||
|
DtoSshResponseToJSON,
|
||||||
|
} from "./DtoSshResponse";
|
||||||
|
import type { DtoSshRevealResponse } from "./DtoSshRevealResponse";
|
||||||
|
import {
|
||||||
|
instanceOfDtoSshRevealResponse,
|
||||||
|
DtoSshRevealResponseFromJSON,
|
||||||
DtoSshRevealResponseFromJSONTyped,
|
DtoSshRevealResponseFromJSONTyped,
|
||||||
DtoSshRevealResponseToJSON,
|
DtoSshRevealResponseToJSON,
|
||||||
instanceOfDtoSshRevealResponse,
|
|
||||||
} from "./DtoSshRevealResponse";
|
} from "./DtoSshRevealResponse";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,10 +12,21 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import type {ModelsUserEmail} from "./ModelsUserEmail";
|
import { mapValues } from "../runtime";
|
||||||
import {ModelsUserEmailFromJSON, ModelsUserEmailToJSON,} from "./ModelsUserEmail";
|
import type { ModelsUserEmail } from "./ModelsUserEmail";
|
||||||
import type {ModelsOrganization} from "./ModelsOrganization";
|
import {
|
||||||
import {ModelsOrganizationFromJSON, ModelsOrganizationToJSON,} from "./ModelsOrganization";
|
ModelsUserEmailFromJSON,
|
||||||
|
ModelsUserEmailFromJSONTyped,
|
||||||
|
ModelsUserEmailToJSON,
|
||||||
|
ModelsUserEmailToJSONTyped,
|
||||||
|
} from "./ModelsUserEmail";
|
||||||
|
import type { ModelsOrganization } from "./ModelsOrganization";
|
||||||
|
import {
|
||||||
|
ModelsOrganizationFromJSON,
|
||||||
|
ModelsOrganizationFromJSONTyped,
|
||||||
|
ModelsOrganizationToJSON,
|
||||||
|
ModelsOrganizationToJSONTyped,
|
||||||
|
} from "./ModelsOrganization";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
* Do not edit the class manually.
|
* Do not edit the class manually.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { mapValues } from "../runtime";
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @export
|
* @export
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user