feat: add worker distributor and model registry
All checks were successful
continuous-integration/drone/push Build is passing
All checks were successful
continuous-integration/drone/push Build is passing
This commit is contained in:
18
internal/modelschedule/queries.sql
Normal file
18
internal/modelschedule/queries.sql
Normal file
@@ -0,0 +1,18 @@
|
||||
-- name: Ping :one
|
||||
SELECT 1;
|
||||
|
||||
-- name: GetLast :one
|
||||
SELECT last_run
|
||||
FROM
|
||||
model_schedules
|
||||
WHERE
|
||||
model_name = $1
|
||||
LIMIT 1;
|
||||
|
||||
-- name: UpsertModel :exec
|
||||
INSERT INTO model_schedules (model_name, last_run)
|
||||
VALUES ($1, $2)
|
||||
ON CONFLICT (model_name)
|
||||
DO UPDATE SET
|
||||
last_run = excluded.last_run;
|
||||
|
32
internal/modelschedule/repositories/db.go
Normal file
32
internal/modelschedule/repositories/db.go
Normal file
@@ -0,0 +1,32 @@
|
||||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.23.0
|
||||
|
||||
package repositories
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/jackc/pgx/v5"
|
||||
"github.com/jackc/pgx/v5/pgconn"
|
||||
)
|
||||
|
||||
type DBTX interface {
|
||||
Exec(context.Context, string, ...interface{}) (pgconn.CommandTag, error)
|
||||
Query(context.Context, string, ...interface{}) (pgx.Rows, error)
|
||||
QueryRow(context.Context, string, ...interface{}) pgx.Row
|
||||
}
|
||||
|
||||
func New(db DBTX) *Queries {
|
||||
return &Queries{db: db}
|
||||
}
|
||||
|
||||
type Queries struct {
|
||||
db DBTX
|
||||
}
|
||||
|
||||
func (q *Queries) WithTx(tx pgx.Tx) *Queries {
|
||||
return &Queries{
|
||||
db: tx,
|
||||
}
|
||||
}
|
30
internal/modelschedule/repositories/models.go
Normal file
30
internal/modelschedule/repositories/models.go
Normal file
@@ -0,0 +1,30 @@
|
||||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.23.0
|
||||
|
||||
package repositories
|
||||
|
||||
import (
|
||||
"github.com/google/uuid"
|
||||
"github.com/jackc/pgx/v5/pgtype"
|
||||
)
|
||||
|
||||
type ModelSchedule struct {
|
||||
ModelName string `json:"model_name"`
|
||||
LastRun pgtype.Timestamptz `json:"last_run"`
|
||||
}
|
||||
|
||||
type WorkSchedule struct {
|
||||
ScheduleID uuid.UUID `json:"schedule_id"`
|
||||
WorkerID uuid.UUID `json:"worker_id"`
|
||||
StartRun pgtype.Timestamptz `json:"start_run"`
|
||||
EndRun pgtype.Timestamptz `json:"end_run"`
|
||||
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
|
||||
State string `json:"state"`
|
||||
}
|
||||
|
||||
type WorkerRegister struct {
|
||||
WorkerID uuid.UUID `json:"worker_id"`
|
||||
Capacity int32 `json:"capacity"`
|
||||
HeartBeat pgtype.Timestamptz `json:"heart_beat"`
|
||||
}
|
19
internal/modelschedule/repositories/querier.go
Normal file
19
internal/modelschedule/repositories/querier.go
Normal file
@@ -0,0 +1,19 @@
|
||||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.23.0
|
||||
|
||||
package repositories
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/jackc/pgx/v5/pgtype"
|
||||
)
|
||||
|
||||
type Querier interface {
|
||||
GetLast(ctx context.Context, modelName string) (pgtype.Timestamptz, error)
|
||||
Ping(ctx context.Context) (int32, error)
|
||||
UpsertModel(ctx context.Context, arg *UpsertModelParams) error
|
||||
}
|
||||
|
||||
var _ Querier = (*Queries)(nil)
|
57
internal/modelschedule/repositories/queries.sql.go
Normal file
57
internal/modelschedule/repositories/queries.sql.go
Normal file
@@ -0,0 +1,57 @@
|
||||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.23.0
|
||||
// source: queries.sql
|
||||
|
||||
package repositories
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/jackc/pgx/v5/pgtype"
|
||||
)
|
||||
|
||||
const getLast = `-- name: GetLast :one
|
||||
SELECT last_run
|
||||
FROM
|
||||
model_schedules
|
||||
WHERE
|
||||
model_name = $1
|
||||
LIMIT 1
|
||||
`
|
||||
|
||||
func (q *Queries) GetLast(ctx context.Context, modelName string) (pgtype.Timestamptz, error) {
|
||||
row := q.db.QueryRow(ctx, getLast, modelName)
|
||||
var last_run pgtype.Timestamptz
|
||||
err := row.Scan(&last_run)
|
||||
return last_run, err
|
||||
}
|
||||
|
||||
const ping = `-- name: Ping :one
|
||||
SELECT 1
|
||||
`
|
||||
|
||||
func (q *Queries) Ping(ctx context.Context) (int32, error) {
|
||||
row := q.db.QueryRow(ctx, ping)
|
||||
var column_1 int32
|
||||
err := row.Scan(&column_1)
|
||||
return column_1, err
|
||||
}
|
||||
|
||||
const upsertModel = `-- name: UpsertModel :exec
|
||||
INSERT INTO model_schedules (model_name, last_run)
|
||||
VALUES ($1, $2)
|
||||
ON CONFLICT (model_name)
|
||||
DO UPDATE SET
|
||||
last_run = excluded.last_run
|
||||
`
|
||||
|
||||
type UpsertModelParams struct {
|
||||
ModelName string `json:"model_name"`
|
||||
LastRun pgtype.Timestamptz `json:"last_run"`
|
||||
}
|
||||
|
||||
func (q *Queries) UpsertModel(ctx context.Context, arg *UpsertModelParams) error {
|
||||
_, err := q.db.Exec(ctx, upsertModel, arg.ModelName, arg.LastRun)
|
||||
return err
|
||||
}
|
105
internal/modelschedule/schedule.go
Normal file
105
internal/modelschedule/schedule.go
Normal file
@@ -0,0 +1,105 @@
|
||||
package modelschedule
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"time"
|
||||
|
||||
"git.front.kjuulh.io/kjuulh/orbis/internal/modelregistry"
|
||||
"git.front.kjuulh.io/kjuulh/orbis/internal/modelschedule/repositories"
|
||||
"github.com/adhocore/gronx"
|
||||
"github.com/jackc/pgx/v5"
|
||||
"github.com/jackc/pgx/v5/pgtype"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
)
|
||||
|
||||
//go:generate sqlc generate
|
||||
|
||||
type ModelRunSchedule struct {
|
||||
Model *modelregistry.Model
|
||||
Start time.Time
|
||||
End time.Time
|
||||
}
|
||||
|
||||
type ModelSchedule struct {
|
||||
logger *slog.Logger
|
||||
|
||||
db *pgxpool.Pool
|
||||
}
|
||||
|
||||
func NewModelSchedule(logger *slog.Logger, db *pgxpool.Pool) *ModelSchedule {
|
||||
return &ModelSchedule{
|
||||
logger: logger,
|
||||
|
||||
db: db,
|
||||
}
|
||||
}
|
||||
|
||||
func (m *ModelSchedule) GetNext(
|
||||
ctx context.Context,
|
||||
model modelregistry.Model,
|
||||
start time.Time,
|
||||
end time.Time,
|
||||
amount uint,
|
||||
) (models []ModelRunSchedule, lastExecuted *time.Time, err error) {
|
||||
repo := repositories.New(m.db)
|
||||
|
||||
var startRun time.Time
|
||||
lastRun, err := repo.GetLast(ctx, model.Name)
|
||||
if err != nil {
|
||||
if !errors.Is(err, pgx.ErrNoRows) {
|
||||
return nil, nil, fmt.Errorf("failed to get last run for mode: %s: %w", model.Name, err)
|
||||
}
|
||||
|
||||
startRun = start
|
||||
} else {
|
||||
startRun = lastRun.Time
|
||||
}
|
||||
|
||||
times := make([]ModelRunSchedule, 0, amount)
|
||||
for {
|
||||
next, err := gronx.NextTickAfter(model.Schedule, startRun, false)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to find next model schedule: %w", err)
|
||||
}
|
||||
|
||||
if next.Equal(time.Time{}) {
|
||||
break
|
||||
}
|
||||
|
||||
if next.After(end) {
|
||||
break
|
||||
}
|
||||
|
||||
times = append(times, ModelRunSchedule{
|
||||
Model: &model,
|
||||
Start: startRun,
|
||||
End: next,
|
||||
})
|
||||
startRun = next
|
||||
|
||||
if len(times) >= int(amount) {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if len(times) == 0 {
|
||||
return nil, nil, nil
|
||||
}
|
||||
|
||||
return times, &startRun, nil
|
||||
}
|
||||
|
||||
func (m *ModelSchedule) UpdateModelRun(ctx context.Context, model modelregistry.Model, lastRun *time.Time) error {
|
||||
repo := repositories.New(m.db)
|
||||
|
||||
return repo.UpsertModel(ctx, &repositories.UpsertModelParams{
|
||||
ModelName: model.Name,
|
||||
LastRun: pgtype.Timestamptz{
|
||||
Time: *lastRun,
|
||||
Valid: true,
|
||||
},
|
||||
})
|
||||
}
|
21
internal/modelschedule/sqlc.yaml
Normal file
21
internal/modelschedule/sqlc.yaml
Normal file
@@ -0,0 +1,21 @@
|
||||
version: "2"
|
||||
sql:
|
||||
- queries: queries.sql
|
||||
schema: ../persistence/migrations/
|
||||
engine: "postgresql"
|
||||
gen:
|
||||
go:
|
||||
out: "repositories"
|
||||
package: "repositories"
|
||||
sql_package: "pgx/v5"
|
||||
emit_json_tags: true
|
||||
emit_prepared_queries: true
|
||||
emit_interface: true
|
||||
emit_empty_slices: true
|
||||
emit_result_struct_pointers: true
|
||||
emit_params_struct_pointers: true
|
||||
overrides:
|
||||
- db_type: "uuid"
|
||||
go_type:
|
||||
import: "github.com/google/uuid"
|
||||
type: "UUID"
|
Reference in New Issue
Block a user