Compare commits
15 Commits
fab83b3e68
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 4acf4971bf | |||
| d4b7554633 | |||
| 3b1fceb889 | |||
| ac35bcb776 | |||
| e86d87da36 | |||
| be4ed5c497 | |||
| 33552ae88f | |||
| a5cbe9eb31 | |||
| 32730fc2a7 | |||
| c4e48b6dc3 | |||
| bf80ad9d61 | |||
| ad12690d33 | |||
| 007987869d | |||
| 484e96f407 | |||
| 7a84045f33 |
21
backend/Makefile
Normal file
21
backend/Makefile
Normal file
@@ -0,0 +1,21 @@
|
||||
|
||||
CODIT_SERVER_NAME=codit-server
|
||||
CODIT_SERVER_VERSION=0.5.0
|
||||
|
||||
CODIT_SERVER_SRCS = cmd/codit-server/main.go
|
||||
CODIT_DATA_BROWSER_SRCS = cmd/codit-data-browser/main.go
|
||||
|
||||
all: codit-server codit-data-browser
|
||||
|
||||
codit-server:
|
||||
CGO_ENABLED=0 go build -x -ldflags "-X 'main.CODIT_SERVER_NAME=$(NAME)' -X 'main.CODIT_SERVER_VERSION=$(VERSION)'" -o $@ $(CODIT_SERVER_SRCS)
|
||||
|
||||
codit-server.debug:
|
||||
CGO_ENABLED=1 go build -race -x -ldflags "-X 'main.CODIT_SERVER_NAME=$(NAME)' -X 'main.CODIT_SERVER_VERSION=$(VERSION)'" -o $@ $(CODIT_SERVER_SRCS)
|
||||
|
||||
codit-data-browser:
|
||||
CGO_ENABLED=0 go build -x -ldflags "-X 'main.CODIT_SERVER_NAME=$(NAME)' -X 'main.CODIT_SERVER_VERSION=$(VERSION)'" -o $@ $(CODIT_DATA_BROWSER_SRCS)
|
||||
|
||||
clean:
|
||||
go clean -x
|
||||
rm -rf codit-server codit-server.debug codit-data-browser
|
||||
572
backend/cmd/codit-data-browser/main.go
Normal file
572
backend/cmd/codit-data-browser/main.go
Normal file
@@ -0,0 +1,572 @@
|
||||
package main
|
||||
|
||||
import "database/sql"
|
||||
import "flag"
|
||||
import "fmt"
|
||||
import "os"
|
||||
import "path/filepath"
|
||||
import "sort"
|
||||
import "strconv"
|
||||
import "strings"
|
||||
|
||||
import _ "modernc.org/sqlite"
|
||||
import "github.com/gdamore/tcell/v2"
|
||||
import "github.com/rivo/tview"
|
||||
|
||||
type projectInfo struct {
|
||||
ID int64
|
||||
PublicID string
|
||||
Slug string
|
||||
Name string
|
||||
}
|
||||
|
||||
type repoInfo struct {
|
||||
ID int64
|
||||
PublicID string
|
||||
ProjectID int64
|
||||
Name string
|
||||
RepoType string
|
||||
LegacyPath string
|
||||
}
|
||||
|
||||
type entryInfo struct {
|
||||
Name string
|
||||
IsDir bool
|
||||
Hint string
|
||||
}
|
||||
|
||||
type browser struct {
|
||||
DB *sql.DB
|
||||
DataDir string
|
||||
Cwd string
|
||||
ProjectsByID map[int64]projectInfo
|
||||
ReposByID map[int64]repoInfo
|
||||
Entries []entryInfo
|
||||
App *tview.Application
|
||||
Root tview.Primitive
|
||||
Header *tview.TextView
|
||||
Table *tview.Table
|
||||
Status *tview.TextView
|
||||
}
|
||||
|
||||
func main() {
|
||||
var dbPath string
|
||||
var dataDir string
|
||||
var err error
|
||||
var br *browser
|
||||
flag.StringVar(&dbPath, "db", "./codit-data/codit.db", "sqlite database path")
|
||||
flag.StringVar(&dataDir, "data", "./codit-data", "codit data directory")
|
||||
flag.Parse()
|
||||
|
||||
br, err = newBrowser(dbPath, dataDir)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "init error: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
err = br.run()
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "run error: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func newBrowser(dbPath string, dataDir string) (*browser, error) {
|
||||
var err error
|
||||
var db *sql.DB
|
||||
var br *browser
|
||||
db, err = sql.Open("sqlite", "file:"+dbPath+"?_pragma=foreign_keys(1)")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
br = &browser{
|
||||
DB: db,
|
||||
DataDir: filepath.Clean(dataDir),
|
||||
Cwd: filepath.Clean(dataDir),
|
||||
ProjectsByID: map[int64]projectInfo{},
|
||||
ReposByID: map[int64]repoInfo{},
|
||||
App: tview.NewApplication(),
|
||||
Header: tview.NewTextView(),
|
||||
Table: tview.NewTable(),
|
||||
Status: tview.NewTextView(),
|
||||
}
|
||||
err = br.loadMaps()
|
||||
if err != nil {
|
||||
_ = db.Close()
|
||||
return nil, err
|
||||
}
|
||||
err = br.refreshEntries()
|
||||
if err != nil {
|
||||
_ = db.Close()
|
||||
return nil, err
|
||||
}
|
||||
br.setupUI()
|
||||
br.renderAll("Ready. q quit, Enter/Right open, Left/Backspace up, PgUp/PgDn/Home/End, r reload, i info, c check")
|
||||
return br, nil
|
||||
}
|
||||
|
||||
func (b *browser) run() error {
|
||||
var err error
|
||||
b.Root = b.layout()
|
||||
err = b.App.SetRoot(b.Root, true).EnableMouse(false).Run()
|
||||
_ = b.DB.Close()
|
||||
return err
|
||||
}
|
||||
|
||||
func (b *browser) layout() tview.Primitive {
|
||||
var root *tview.Flex
|
||||
root = tview.NewFlex().SetDirection(tview.FlexRow)
|
||||
root.AddItem(b.Header, 1, 0, false)
|
||||
root.AddItem(b.Table, 0, 1, true)
|
||||
root.AddItem(b.Status, 1, 0, false)
|
||||
return root
|
||||
}
|
||||
|
||||
func (b *browser) setupUI() {
|
||||
var header string
|
||||
header = "Type Name Hint"
|
||||
b.Header.SetDynamicColors(true)
|
||||
b.Header.SetText(header)
|
||||
b.Table.SetSelectable(true, false)
|
||||
b.Table.SetFixed(0, 0)
|
||||
b.Table.SetBorders(false)
|
||||
b.Table.SetSeparator(' ')
|
||||
b.Table.SetInputCapture(b.captureKey)
|
||||
b.Table.SetSelectedStyle(tcell.StyleDefault.Foreground(tcell.ColorBlack).Background(tcell.ColorLightCyan))
|
||||
}
|
||||
|
||||
func (b *browser) renderAll(status string) {
|
||||
var i int
|
||||
var e entryInfo
|
||||
var kind string
|
||||
var name string
|
||||
var rel string
|
||||
var err error
|
||||
var row int
|
||||
b.Table.Clear()
|
||||
for i = 0; i < len(b.Entries); i++ {
|
||||
e = b.Entries[i]
|
||||
if e.IsDir {
|
||||
kind = "DIR "
|
||||
} else {
|
||||
kind = "FILE"
|
||||
}
|
||||
name = e.Name
|
||||
row = i
|
||||
b.Table.SetCell(row, 0, tview.NewTableCell(kind).SetSelectable(false))
|
||||
b.Table.SetCell(row, 1, tview.NewTableCell(name))
|
||||
b.Table.SetCell(row, 2, tview.NewTableCell(e.Hint).SetSelectable(false))
|
||||
}
|
||||
if len(b.Entries) > 0 {
|
||||
b.Table.Select(0, 1)
|
||||
}
|
||||
rel, err = filepath.Rel(b.DataDir, b.Cwd)
|
||||
if err != nil {
|
||||
rel = b.Cwd
|
||||
}
|
||||
if rel == "." {
|
||||
rel = "/"
|
||||
} else {
|
||||
rel = "/" + filepath.ToSlash(rel)
|
||||
}
|
||||
b.Header.SetText("Path: " + rel + " (entries: " + strconv.Itoa(len(b.Entries)) + ")")
|
||||
b.Status.SetText(status)
|
||||
}
|
||||
|
||||
func (b *browser) captureKey(event *tcell.EventKey) *tcell.EventKey {
|
||||
var key tcell.Key
|
||||
var r rune
|
||||
var row int
|
||||
var _, _, _, h = b.Table.GetInnerRect()
|
||||
var page int
|
||||
var err error
|
||||
var info string
|
||||
var mismatches int
|
||||
key = event.Key()
|
||||
r = event.Rune()
|
||||
if key == tcell.KeyEnter || key == tcell.KeyRight {
|
||||
err = b.openSelected()
|
||||
if err != nil {
|
||||
b.Status.SetText("open failed: " + err.Error())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if key == tcell.KeyLeft || key == tcell.KeyBackspace || key == tcell.KeyBackspace2 {
|
||||
err = b.up()
|
||||
if err != nil {
|
||||
b.Status.SetText("up failed: " + err.Error())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if key == tcell.KeyHome {
|
||||
if len(b.Entries) > 0 {
|
||||
b.Table.Select(0, 1)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if key == tcell.KeyEnd {
|
||||
if len(b.Entries) > 0 {
|
||||
b.Table.Select(len(b.Entries)-1, 1)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if key == tcell.KeyPgUp || key == tcell.KeyPgDn {
|
||||
row, _ = b.Table.GetSelection()
|
||||
page = h
|
||||
if page < 1 {
|
||||
page = 10
|
||||
}
|
||||
if key == tcell.KeyPgUp {
|
||||
row = row - page
|
||||
if row < 0 {
|
||||
row = 0
|
||||
}
|
||||
} else {
|
||||
row = row + page
|
||||
if row >= len(b.Entries) {
|
||||
row = len(b.Entries) - 1
|
||||
}
|
||||
if row < 0 {
|
||||
row = 0
|
||||
}
|
||||
}
|
||||
if len(b.Entries) > 0 {
|
||||
b.Table.Select(row, 1)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if r == 'q' || r == 'Q' {
|
||||
b.App.Stop()
|
||||
return nil
|
||||
}
|
||||
if r == 'r' || r == 'R' {
|
||||
b.ProjectsByID = map[int64]projectInfo{}
|
||||
b.ReposByID = map[int64]repoInfo{}
|
||||
err = b.loadMaps()
|
||||
if err != nil {
|
||||
b.Status.SetText("reload failed: " + err.Error())
|
||||
return nil
|
||||
}
|
||||
err = b.refreshEntries()
|
||||
if err != nil {
|
||||
b.Status.SetText("reload list failed: " + err.Error())
|
||||
return nil
|
||||
}
|
||||
b.renderAll("Reloaded")
|
||||
return nil
|
||||
}
|
||||
if r == 'i' || r == 'I' {
|
||||
info = b.selectedInfo()
|
||||
b.Status.SetText(info)
|
||||
return nil
|
||||
}
|
||||
if r == 'c' || r == 'C' {
|
||||
mismatches = b.showMismatches()
|
||||
if mismatches == 0 {
|
||||
b.Status.SetText("check complete: no missing expected repo directories")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return event
|
||||
}
|
||||
|
||||
func (b *browser) openSelected() error {
|
||||
var row int
|
||||
var name string
|
||||
var err error
|
||||
row, _ = b.Table.GetSelection()
|
||||
if row < 0 || row >= len(b.Entries) {
|
||||
return nil
|
||||
}
|
||||
if !b.Entries[row].IsDir {
|
||||
return nil
|
||||
}
|
||||
name = b.Entries[row].Name
|
||||
b.Cwd = filepath.Clean(filepath.Join(b.Cwd, name))
|
||||
if !pathInside(b.Cwd, b.DataDir) {
|
||||
return fmt.Errorf("outside data dir")
|
||||
}
|
||||
err = b.refreshEntries()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
b.renderAll("Opened " + b.Cwd)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *browser) up() error {
|
||||
var parent string
|
||||
var err error
|
||||
parent = filepath.Dir(b.Cwd)
|
||||
if !pathInside(parent, b.DataDir) {
|
||||
return nil
|
||||
}
|
||||
b.Cwd = parent
|
||||
err = b.refreshEntries()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
b.renderAll("Opened " + b.Cwd)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *browser) selectedInfo() string {
|
||||
var row int
|
||||
var e entryInfo
|
||||
row, _ = b.Table.GetSelection()
|
||||
if row < 0 || row >= len(b.Entries) {
|
||||
return "no selection"
|
||||
}
|
||||
e = b.Entries[row]
|
||||
if e.Hint == "" {
|
||||
return e.Name
|
||||
}
|
||||
return e.Name + " | " + e.Hint
|
||||
}
|
||||
|
||||
func (b *browser) refreshEntries() error {
|
||||
var raw []os.DirEntry
|
||||
var i int
|
||||
var item os.DirEntry
|
||||
var entries []entryInfo
|
||||
var info entryInfo
|
||||
var dirs []entryInfo
|
||||
var files []entryInfo
|
||||
var err error
|
||||
raw, err = os.ReadDir(b.Cwd)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
entries = make([]entryInfo, 0, len(raw))
|
||||
for i = 0; i < len(raw); i++ {
|
||||
item = raw[i]
|
||||
info = entryInfo{
|
||||
Name: item.Name(),
|
||||
IsDir: item.IsDir(),
|
||||
Hint: b.resolvePathHint(item.Name()),
|
||||
}
|
||||
entries = append(entries, info)
|
||||
}
|
||||
dirs = make([]entryInfo, 0, len(entries))
|
||||
files = make([]entryInfo, 0, len(entries))
|
||||
for i = 0; i < len(entries); i++ {
|
||||
if entries[i].IsDir {
|
||||
dirs = append(dirs, entries[i])
|
||||
} else {
|
||||
files = append(files, entries[i])
|
||||
}
|
||||
}
|
||||
sort.Slice(dirs, func(i int, j int) bool {
|
||||
return strings.ToLower(dirs[i].Name) < strings.ToLower(dirs[j].Name)
|
||||
})
|
||||
sort.Slice(files, func(i int, j int) bool {
|
||||
return strings.ToLower(files[i].Name) < strings.ToLower(files[j].Name)
|
||||
})
|
||||
b.Entries = make([]entryInfo, 0, len(entries))
|
||||
b.Entries = append(b.Entries, dirs...)
|
||||
b.Entries = append(b.Entries, files...)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *browser) loadMaps() error {
|
||||
var err error
|
||||
var rows *sql.Rows
|
||||
var p projectInfo
|
||||
var r repoInfo
|
||||
rows, err = b.DB.Query(`SELECT id, public_id, slug, name FROM projects`)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for rows.Next() {
|
||||
err = rows.Scan(&p.ID, &p.PublicID, &p.Slug, &p.Name)
|
||||
if err != nil {
|
||||
rows.Close()
|
||||
return err
|
||||
}
|
||||
b.ProjectsByID[p.ID] = p
|
||||
}
|
||||
err = rows.Close()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rows, err = b.DB.Query(`SELECT id, public_id, project_id, name, type, path FROM repos`)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for rows.Next() {
|
||||
err = rows.Scan(&r.ID, &r.PublicID, &r.ProjectID, &r.Name, &r.RepoType, &r.LegacyPath)
|
||||
if err != nil {
|
||||
rows.Close()
|
||||
return err
|
||||
}
|
||||
b.ReposByID[r.ID] = r
|
||||
}
|
||||
err = rows.Close()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *browser) resolvePathHint(name string) string {
|
||||
var rel string
|
||||
var parts []string
|
||||
var service string
|
||||
var projectID int64
|
||||
var repoID int64
|
||||
var project projectInfo
|
||||
var repo repoInfo
|
||||
var err error
|
||||
rel, err = filepath.Rel(b.DataDir, filepath.Join(b.Cwd, name))
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
parts = strings.Split(filepath.ToSlash(rel), "/")
|
||||
if len(parts) < 2 {
|
||||
return ""
|
||||
}
|
||||
service = parts[0]
|
||||
projectID, err = parseStorageIDSegment(parts[1])
|
||||
if err == nil {
|
||||
project = b.ProjectsByID[projectID]
|
||||
if project.ID > 0 && len(parts) == 2 {
|
||||
return fmt.Sprintf("project: %s (%s)", project.Slug, project.PublicID)
|
||||
}
|
||||
}
|
||||
if len(parts) < 3 {
|
||||
return ""
|
||||
}
|
||||
if service == "git" {
|
||||
parts[2] = strings.TrimSuffix(parts[2], ".git")
|
||||
}
|
||||
repoID, err = parseStorageIDSegment(parts[2])
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
repo = b.ReposByID[repoID]
|
||||
if repo.ID == 0 {
|
||||
return ""
|
||||
}
|
||||
return fmt.Sprintf("repo: %s (%s) type=%s", repo.Name, repo.PublicID, repo.RepoType)
|
||||
}
|
||||
|
||||
func (b *browser) countMismatches() int {
|
||||
var ids []int64
|
||||
var repoID int64
|
||||
var repo repoInfo
|
||||
var expected string
|
||||
var err error
|
||||
var count int
|
||||
ids = make([]int64, 0, len(b.ReposByID))
|
||||
for repoID = range b.ReposByID {
|
||||
ids = append(ids, repoID)
|
||||
}
|
||||
sort.Slice(ids, func(i int, j int) bool { return ids[i] < ids[j] })
|
||||
count = 0
|
||||
for _, repoID = range ids {
|
||||
repo = b.ReposByID[repoID]
|
||||
expected = expectedRepoPath(b.DataDir, repo.RepoType, repo.ProjectID, repo.ID)
|
||||
_, err = os.Stat(expected)
|
||||
if err != nil {
|
||||
count = count + 1
|
||||
}
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
func (b *browser) showMismatches() int {
|
||||
var lines []string
|
||||
var text string
|
||||
var modal *tview.Modal
|
||||
lines = b.collectMismatchLines()
|
||||
if len(lines) == 0 {
|
||||
return 0
|
||||
}
|
||||
text = strings.Join(lines, "\n")
|
||||
modal = tview.NewModal().
|
||||
SetText(text).
|
||||
AddButtons([]string{"Close"}).
|
||||
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
|
||||
b.App.SetRoot(b.Root, true).SetFocus(b.Table)
|
||||
})
|
||||
b.App.SetRoot(modal, true).SetFocus(modal)
|
||||
return len(lines)
|
||||
}
|
||||
|
||||
func (b *browser) collectMismatchLines() []string {
|
||||
var ids []int64
|
||||
var repoID int64
|
||||
var repo repoInfo
|
||||
var project projectInfo
|
||||
var expected string
|
||||
var lines []string
|
||||
var err error
|
||||
ids = make([]int64, 0, len(b.ReposByID))
|
||||
for repoID = range b.ReposByID {
|
||||
ids = append(ids, repoID)
|
||||
}
|
||||
sort.Slice(ids, func(i int, j int) bool { return ids[i] < ids[j] })
|
||||
lines = []string{}
|
||||
for _, repoID = range ids {
|
||||
repo = b.ReposByID[repoID]
|
||||
expected = expectedRepoPath(b.DataDir, repo.RepoType, repo.ProjectID, repo.ID)
|
||||
_, err = os.Stat(expected)
|
||||
if err == nil {
|
||||
continue
|
||||
}
|
||||
project = b.ProjectsByID[repo.ProjectID]
|
||||
lines = append(lines, fmt.Sprintf("%s/%s [%s]", project.Slug, repo.Name, repo.RepoType))
|
||||
lines = append(lines, "expected: "+expected)
|
||||
lines = append(lines, "current : "+repo.LegacyPath)
|
||||
lines = append(lines, "")
|
||||
}
|
||||
if len(lines) > 0 {
|
||||
lines = lines[:len(lines)-1]
|
||||
}
|
||||
return lines
|
||||
}
|
||||
|
||||
func expectedRepoPath(dataDir string, repoType string, projectID int64, repoID int64) string {
|
||||
var p string
|
||||
var r string
|
||||
p = formatStorageIDSegment(projectID)
|
||||
r = formatStorageIDSegment(repoID)
|
||||
if repoType == "git" {
|
||||
return filepath.Join(dataDir, "git", p, r+".git")
|
||||
}
|
||||
if repoType == "rpm" {
|
||||
return filepath.Join(dataDir, "rpm", p, r)
|
||||
}
|
||||
if repoType == "docker" {
|
||||
return filepath.Join(dataDir, "docker", p, r)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func formatStorageIDSegment(id int64) string {
|
||||
return fmt.Sprintf("%016x", id)
|
||||
}
|
||||
|
||||
func parseStorageIDSegment(s string) (int64, error) {
|
||||
var parsed int64
|
||||
var err error
|
||||
var trimmed string
|
||||
trimmed = strings.TrimSpace(s)
|
||||
parsed, err = strconv.ParseInt(trimmed, 16, 64)
|
||||
if err == nil {
|
||||
return parsed, nil
|
||||
}
|
||||
return strconv.ParseInt(trimmed, 10, 64)
|
||||
}
|
||||
|
||||
func pathInside(path string, root string) bool {
|
||||
var p string
|
||||
var r string
|
||||
var prefix string
|
||||
p = filepath.Clean(path)
|
||||
r = filepath.Clean(root)
|
||||
if p == r {
|
||||
return true
|
||||
}
|
||||
prefix = r + string(os.PathSeparator)
|
||||
return strings.HasPrefix(p, prefix)
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -3,9 +3,11 @@ module codit
|
||||
go 1.25.3
|
||||
|
||||
require (
|
||||
github.com/gdamore/tcell/v2 v2.13.8
|
||||
github.com/go-git/go-git/v5 v5.12.0
|
||||
github.com/go-ldap/ldap/v3 v3.4.6
|
||||
github.com/graphql-go/graphql v0.8.0
|
||||
github.com/rivo/tview v0.42.0
|
||||
github.com/sosedoff/gitkit v0.4.0
|
||||
golang.org/x/crypto v0.47.0
|
||||
modernc.org/sqlite v1.30.0
|
||||
@@ -22,6 +24,7 @@ require (
|
||||
github.com/cyphar/filepath-securejoin v0.2.4 // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/emirpasic/gods v1.18.1 // indirect
|
||||
github.com/gdamore/encoding v1.0.1 // indirect
|
||||
github.com/go-asn1-ber/asn1-ber v1.5.5 // indirect
|
||||
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect
|
||||
github.com/go-git/go-billy/v5 v5.5.0 // indirect
|
||||
@@ -32,19 +35,24 @@ require (
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
|
||||
github.com/kevinburke/ssh_config v1.2.0 // indirect
|
||||
github.com/klauspost/compress v1.18.3 // indirect
|
||||
github.com/lucasb-eyer/go-colorful v1.3.0 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/ncruces/go-strftime v0.1.9 // indirect
|
||||
github.com/pjbgf/sha1cd v0.3.0 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/rivo/uniseg v0.4.7 // indirect
|
||||
github.com/sassoftware/go-rpmutils v0.4.1-0.20250318174028-2660c86d578c // indirect
|
||||
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
|
||||
github.com/skeema/knownhosts v1.2.2 // indirect
|
||||
github.com/ulikunitz/xz v0.5.15 // indirect
|
||||
github.com/xanzy/ssh-agent v0.3.3 // indirect
|
||||
golang.org/x/mod v0.16.0 // indirect
|
||||
golang.org/x/mod v0.31.0 // indirect
|
||||
golang.org/x/net v0.48.0 // indirect
|
||||
golang.org/x/sync v0.19.0 // indirect
|
||||
golang.org/x/sys v0.40.0 // indirect
|
||||
golang.org/x/tools v0.19.0 // indirect
|
||||
golang.org/x/term v0.39.0 // indirect
|
||||
golang.org/x/text v0.33.0 // indirect
|
||||
golang.org/x/tools v0.40.0 // indirect
|
||||
gopkg.in/warnings.v0 v0.1.2 // indirect
|
||||
modernc.org/gc/v3 v3.0.0-20240107210532-573471604cb6 // indirect
|
||||
modernc.org/libc v1.50.9 // indirect
|
||||
|
||||
@@ -28,6 +28,10 @@ github.com/elazarl/goproxy v0.0.0-20230808193330-2592e75ae04a h1:mATvB/9r/3gvcej
|
||||
github.com/elazarl/goproxy v0.0.0-20230808193330-2592e75ae04a/go.mod h1:Ro8st/ElPeALwNFlcTpWmkr6IoMFfkjXAvTHpevnDsM=
|
||||
github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
|
||||
github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ=
|
||||
github.com/gdamore/encoding v1.0.1 h1:YzKZckdBL6jVt2Gc+5p82qhrGiqMdG/eNs6Wy0u3Uhw=
|
||||
github.com/gdamore/encoding v1.0.1/go.mod h1:0Z0cMFinngz9kS1QfMjCP8TY7em3bZYeeklsSDPivEo=
|
||||
github.com/gdamore/tcell/v2 v2.13.8 h1:Mys/Kl5wfC/GcC5Cx4C2BIQH9dbnhnkPgS9/wF3RlfU=
|
||||
github.com/gdamore/tcell/v2 v2.13.8/go.mod h1:+Wfe208WDdB7INEtCsNrAN6O2m+wsTPk1RAovjaILlo=
|
||||
github.com/gliderlabs/ssh v0.3.7 h1:iV3Bqi942d9huXnzEF2Mt+CY9gLu8DNM4Obd+8bODRE=
|
||||
github.com/gliderlabs/ssh v0.3.7/go.mod h1:zpHEXBstFnQYtGnB8k8kQLol82umzn/2/snG7alWVD8=
|
||||
github.com/go-asn1-ber/asn1-ber v1.5.5 h1:MNHlNMBDgEKD4TcKr36vQN68BA00aDfjIt3/bD50WnA=
|
||||
@@ -70,6 +74,8 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/lucasb-eyer/go-colorful v1.3.0 h1:2/yBRLdWBZKrf7gB40FoiKfAWYQ0lqNcbuQwVHXptag=
|
||||
github.com/lucasb-eyer/go-colorful v1.3.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
|
||||
@@ -84,6 +90,10 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||
github.com/rivo/tview v0.42.0 h1:b/ftp+RxtDsHSaynXTbJb+/n/BxDEi+W3UfF5jILK6c=
|
||||
github.com/rivo/tview v0.42.0/go.mod h1:cSfIYfhpSGCjp3r/ECJb+GKS7cGJnqV8vfjQPwoXyfY=
|
||||
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
|
||||
github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
|
||||
github.com/sassoftware/go-rpmutils v0.4.1-0.20250318174028-2660c86d578c h1:Y+MtXJBE7rpqj0nk6GhSzD/48pXSKNEJPIYhtoSCbjk=
|
||||
@@ -120,8 +130,8 @@ golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8=
|
||||
golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.16.0 h1:QX4fJ0Rr5cPQCF7O9lh9Se4pmwfwskqZfq5moyldzic=
|
||||
golang.org/x/mod v0.16.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.31.0 h1:HaW9xtz0+kOcWKwli0ZXy79Ix+UW/vOfmWI5QVd2tgI=
|
||||
golang.org/x/mod v0.31.0/go.mod h1:43JraMp9cGx1Rx3AqioxrbrhNsLl2l/iNAvuBkrezpg=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
@@ -133,8 +143,8 @@ golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ=
|
||||
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
|
||||
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
@@ -164,14 +174,15 @@ golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE=
|
||||
golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.19.0 h1:tfGCXNR1OsFG+sVdLAitlpjAvD/I6dHDKnYrpEZUHkw=
|
||||
golang.org/x/tools v0.19.0/go.mod h1:qoJWxmGSIBmAeriMx19ogtrEPrGtDbPK634QFIcLAhc=
|
||||
golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA=
|
||||
golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
|
||||
@@ -3,14 +3,17 @@ package config
|
||||
import "encoding/json"
|
||||
import "errors"
|
||||
import "os"
|
||||
import "path/filepath"
|
||||
import "strings"
|
||||
import "time"
|
||||
import "strconv"
|
||||
|
||||
type Config struct {
|
||||
HTTPAddr string `json:"http_addr"`
|
||||
HTTPAddrs []string `json:"http_addrs"`
|
||||
HTTPSAddrs []string `json:"https_addrs"`
|
||||
PublicBaseURL string `json:"public_base_url"`
|
||||
DataDir string `json:"data_dir"`
|
||||
FrontendDir string `json:"frontend_dir"`
|
||||
DBDriver string `json:"db_driver"`
|
||||
DBDSN string `json:"db_dsn"`
|
||||
SessionTTL Duration `json:"session_ttl"`
|
||||
@@ -30,6 +33,14 @@ type Config struct {
|
||||
OIDCScopes string `json:"oidc_scopes"`
|
||||
OIDCEnabled bool `json:"oidc_enabled"`
|
||||
OIDCTLSInsecureSkipVerify bool `json:"oidc_tls_insecure_skip_verify"`
|
||||
TLSServerCertSource string `json:"tls_server_cert_source"`
|
||||
TLSCertFile string `json:"tls_cert_file"`
|
||||
TLSKeyFile string `json:"tls_key_file"`
|
||||
TLSPKIServerCertID string `json:"tls_pki_server_cert_id"`
|
||||
TLSClientAuth string `json:"tls_client_auth"`
|
||||
TLSClientCAFile string `json:"tls_client_ca_file"`
|
||||
TLSPKIClientCAID string `json:"tls_pki_client_ca_id"`
|
||||
TLSMinVersion string `json:"tls_min_version"`
|
||||
GitHTTPPrefix string `json:"git_http_prefix"`
|
||||
RPMHTTPPrefix string `json:"rpm_http_prefix"`
|
||||
}
|
||||
@@ -39,14 +50,19 @@ func Load(path string) (Config, error) {
|
||||
var data []byte
|
||||
var err error
|
||||
cfg = Config{
|
||||
HTTPAddr: ":1080",
|
||||
HTTPAddrs: []string{":1080"},
|
||||
HTTPSAddrs: []string{},
|
||||
DataDir: "./codit-data",
|
||||
FrontendDir: filepath.Join("..", "frontend", "dist"),
|
||||
DBDriver: "sqlite",
|
||||
DBDSN: "file:./codit-data/codit.db?_pragma=foreign_keys(1)",
|
||||
SessionTTL: Duration(24 * time.Hour),
|
||||
AuthMode: "db",
|
||||
LDAPUserFilter: "(uid={username})",
|
||||
OIDCScopes: "openid profile email",
|
||||
TLSServerCertSource: "files",
|
||||
TLSClientAuth: "none",
|
||||
TLSMinVersion: "1.2",
|
||||
GitHTTPPrefix: "/git",
|
||||
RPMHTTPPrefix: "/rpm",
|
||||
}
|
||||
@@ -62,6 +78,13 @@ func Load(path string) (Config, error) {
|
||||
}
|
||||
override(&cfg)
|
||||
cfg.AuthMode = strings.ToLower(strings.TrimSpace(cfg.AuthMode))
|
||||
cfg.TLSServerCertSource = strings.ToLower(strings.TrimSpace(cfg.TLSServerCertSource))
|
||||
cfg.TLSClientAuth = strings.ToLower(strings.TrimSpace(cfg.TLSClientAuth))
|
||||
cfg.HTTPAddrs = normalizeHTTPAddrs(cfg.HTTPAddrs)
|
||||
cfg.HTTPSAddrs = normalizeHTTPAddrs(cfg.HTTPSAddrs)
|
||||
if len(cfg.HTTPAddrs) == 0 && len(cfg.HTTPSAddrs) == 0 {
|
||||
return cfg, errors.New("http_addrs or https_addrs is required")
|
||||
}
|
||||
if cfg.DBDSN == "" {
|
||||
return cfg, errors.New("db dsn is required")
|
||||
}
|
||||
@@ -70,9 +93,13 @@ func Load(path string) (Config, error) {
|
||||
|
||||
func override(cfg *Config) {
|
||||
var v string
|
||||
v = os.Getenv("CODIT_HTTP_ADDR")
|
||||
v = os.Getenv("CODIT_HTTP_ADDRS")
|
||||
if v != "" {
|
||||
cfg.HTTPAddr = v
|
||||
cfg.HTTPAddrs = splitCSV(v)
|
||||
}
|
||||
v = os.Getenv("CODIT_HTTPS_ADDRS")
|
||||
if v != "" {
|
||||
cfg.HTTPSAddrs = splitCSV(v)
|
||||
}
|
||||
v = os.Getenv("CODIT_PUBLIC_BASE_URL")
|
||||
if v != "" {
|
||||
@@ -82,6 +109,10 @@ func override(cfg *Config) {
|
||||
if v != "" {
|
||||
cfg.DataDir = v
|
||||
}
|
||||
v = os.Getenv("CODIT_FRONTEND_DIR")
|
||||
if v != "" {
|
||||
cfg.FrontendDir = v
|
||||
}
|
||||
v = os.Getenv("CODIT_DB_DRIVER")
|
||||
if v != "" {
|
||||
cfg.DBDriver = v
|
||||
@@ -154,6 +185,38 @@ func override(cfg *Config) {
|
||||
if v != "" {
|
||||
cfg.OIDCTLSInsecureSkipVerify = parseEnvBool(v)
|
||||
}
|
||||
v = os.Getenv("CODIT_TLS_SERVER_CERT_SOURCE")
|
||||
if v != "" {
|
||||
cfg.TLSServerCertSource = v
|
||||
}
|
||||
v = os.Getenv("CODIT_TLS_CERT_FILE")
|
||||
if v != "" {
|
||||
cfg.TLSCertFile = v
|
||||
}
|
||||
v = os.Getenv("CODIT_TLS_KEY_FILE")
|
||||
if v != "" {
|
||||
cfg.TLSKeyFile = v
|
||||
}
|
||||
v = os.Getenv("CODIT_TLS_PKI_SERVER_CERT_ID")
|
||||
if v != "" {
|
||||
cfg.TLSPKIServerCertID = v
|
||||
}
|
||||
v = os.Getenv("CODIT_TLS_CLIENT_AUTH")
|
||||
if v != "" {
|
||||
cfg.TLSClientAuth = v
|
||||
}
|
||||
v = os.Getenv("CODIT_TLS_CLIENT_CA_FILE")
|
||||
if v != "" {
|
||||
cfg.TLSClientCAFile = v
|
||||
}
|
||||
v = os.Getenv("CODIT_TLS_PKI_CLIENT_CA_ID")
|
||||
if v != "" {
|
||||
cfg.TLSPKIClientCAID = v
|
||||
}
|
||||
v = os.Getenv("CODIT_TLS_MIN_VERSION")
|
||||
if v != "" {
|
||||
cfg.TLSMinVersion = v
|
||||
}
|
||||
v = os.Getenv("CODIT_GIT_HTTP_PREFIX")
|
||||
if v != "" {
|
||||
cfg.GitHTTPPrefix = v
|
||||
@@ -209,3 +272,33 @@ func parseEnvBool(v string) bool {
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func splitCSV(v string) []string {
|
||||
var parts []string
|
||||
var out []string
|
||||
var i int
|
||||
var p string
|
||||
parts = strings.Split(v, ",")
|
||||
for i = 0; i < len(parts); i++ {
|
||||
p = strings.TrimSpace(parts[i])
|
||||
if p == "" {
|
||||
continue
|
||||
}
|
||||
out = append(out, p)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func normalizeHTTPAddrs(values []string) []string {
|
||||
var out []string
|
||||
var i int
|
||||
var v string
|
||||
for i = 0; i < len(values); i++ {
|
||||
v = strings.TrimSpace(values[i])
|
||||
if v == "" {
|
||||
continue
|
||||
}
|
||||
out = append(out, v)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
@@ -18,6 +18,9 @@ func TestLoadDefaults(t *testing.T) {
|
||||
if cfg.GitHTTPPrefix != "/git" {
|
||||
t.Fatalf("unexpected git prefix default: %s", cfg.GitHTTPPrefix)
|
||||
}
|
||||
if cfg.FrontendDir == "" {
|
||||
t.Fatalf("frontend_dir default missing")
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoadFromJSONAndEnvOverride(t *testing.T) {
|
||||
@@ -34,6 +37,7 @@ func TestLoadFromJSONAndEnvOverride(t *testing.T) {
|
||||
t.Fatalf("write config file: %v", err)
|
||||
}
|
||||
t.Setenv("CODIT_DB_DSN", "file:override.db")
|
||||
t.Setenv("CODIT_FRONTEND_DIR", "/srv/codit/frontend")
|
||||
cfg, err = Load(path)
|
||||
if err != nil {
|
||||
t.Fatalf("Load() error: %v", err)
|
||||
@@ -44,6 +48,9 @@ func TestLoadFromJSONAndEnvOverride(t *testing.T) {
|
||||
if cfg.AuthMode != "hybrid" {
|
||||
t.Fatalf("auth_mode normalization failed: %s", cfg.AuthMode)
|
||||
}
|
||||
if cfg.FrontendDir != "/srv/codit/frontend" {
|
||||
t.Fatalf("frontend_dir env override failed: %s", cfg.FrontendDir)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDurationUnmarshalJSON(t *testing.T) {
|
||||
|
||||
@@ -14,8 +14,10 @@ type Store struct {
|
||||
|
||||
func Open(driver, dsn string) (*Store, error) {
|
||||
var db *sql.DB
|
||||
var drv string
|
||||
var err error
|
||||
db, err = sql.Open(driverName(driver), dsn)
|
||||
drv = driverName(driver)
|
||||
db, err = sql.Open(drv, dsn)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -23,6 +25,13 @@ func Open(driver, dsn string) (*Store, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if drv == "sqlite" {
|
||||
_, err = db.Exec(`PRAGMA busy_timeout = 10000`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_, _ = db.Exec(`PRAGMA journal_mode = WAL`)
|
||||
}
|
||||
return &Store{DB: db}, nil
|
||||
}
|
||||
|
||||
|
||||
354
backend/internal/db/pki.go
Normal file
354
backend/internal/db/pki.go
Normal file
@@ -0,0 +1,354 @@
|
||||
package db
|
||||
|
||||
import "database/sql"
|
||||
import "strings"
|
||||
import "time"
|
||||
|
||||
import "codit/internal/models"
|
||||
import "codit/internal/util"
|
||||
|
||||
func (s *Store) ListPKICAs() ([]models.PKICA, error) {
|
||||
var rows *sql.Rows
|
||||
var err error
|
||||
var items []models.PKICA
|
||||
var item models.PKICA
|
||||
rows, err = s.DB.Query(`SELECT c.public_id, c.name, COALESCE(p.public_id, ''), c.is_root, c.cert_pem, c.key_pem, c.serial_counter, c.status, c.created_at, c.updated_at
|
||||
FROM pki_cas c
|
||||
LEFT JOIN pki_cas p ON p.id = c.parent_ca_id
|
||||
ORDER BY c.name`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
err = rows.Scan(&item.ID, &item.Name, &item.ParentCAID, &item.IsRoot, &item.CertPEM, &item.KeyPEM, &item.SerialCounter, &item.Status, &item.CreatedAt, &item.UpdatedAt)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, item)
|
||||
}
|
||||
err = rows.Err()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
func (s *Store) GetPKICA(id string) (models.PKICA, error) {
|
||||
var row *sql.Row
|
||||
var item models.PKICA
|
||||
var err error
|
||||
row = s.DB.QueryRow(`SELECT c.public_id, c.name, COALESCE(p.public_id, ''), c.is_root, c.cert_pem, c.key_pem, c.serial_counter, c.status, c.created_at, c.updated_at
|
||||
FROM pki_cas c
|
||||
LEFT JOIN pki_cas p ON p.id = c.parent_ca_id
|
||||
WHERE c.public_id = ?`, id)
|
||||
err = row.Scan(&item.ID, &item.Name, &item.ParentCAID, &item.IsRoot, &item.CertPEM, &item.KeyPEM, &item.SerialCounter, &item.Status, &item.CreatedAt, &item.UpdatedAt)
|
||||
if err != nil {
|
||||
return item, err
|
||||
}
|
||||
return item, nil
|
||||
}
|
||||
|
||||
func (s *Store) UpdatePKICAName(id string, name string) error {
|
||||
var err error
|
||||
_, err = s.DB.Exec(`UPDATE pki_cas SET name = ?, updated_at = ? WHERE public_id = ?`, name, time.Now().UTC().Unix(), id)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) CreatePKICA(item models.PKICA) (models.PKICA, error) {
|
||||
var id string
|
||||
var now int64
|
||||
var err error
|
||||
if item.ID == "" {
|
||||
id, err = util.NewID()
|
||||
if err != nil {
|
||||
return item, err
|
||||
}
|
||||
item.ID = id
|
||||
}
|
||||
if item.SerialCounter <= 0 {
|
||||
item.SerialCounter = 1
|
||||
}
|
||||
if item.Status == "" {
|
||||
item.Status = "active"
|
||||
}
|
||||
now = time.Now().UTC().Unix()
|
||||
item.CreatedAt = now
|
||||
item.UpdatedAt = now
|
||||
_, err = s.DB.Exec(`INSERT INTO pki_cas (public_id, name, parent_ca_id, is_root, cert_pem, key_pem, serial_counter, status, created_at, updated_at)
|
||||
VALUES (?, ?, CASE WHEN ? = '' THEN NULL ELSE (SELECT id FROM pki_cas WHERE public_id = ?) END, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
item.ID, item.Name, item.ParentCAID, item.ParentCAID, item.IsRoot, item.CertPEM, item.KeyPEM, item.SerialCounter, item.Status, item.CreatedAt, item.UpdatedAt)
|
||||
if err != nil {
|
||||
return item, err
|
||||
}
|
||||
return item, nil
|
||||
}
|
||||
|
||||
func (s *Store) CountPKICAChildren(id string) (int, error) {
|
||||
var row *sql.Row
|
||||
var count int
|
||||
var err error
|
||||
row = s.DB.QueryRow(`SELECT COUNT(*)
|
||||
FROM pki_cas c
|
||||
JOIN pki_cas p ON p.id = c.parent_ca_id
|
||||
WHERE p.public_id = ?`, id)
|
||||
err = row.Scan(&count)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return count, nil
|
||||
}
|
||||
|
||||
func (s *Store) CountPKICertsByCA(id string) (int, error) {
|
||||
var row *sql.Row
|
||||
var count int
|
||||
var err error
|
||||
row = s.DB.QueryRow(`SELECT COUNT(*)
|
||||
FROM pki_certs c
|
||||
JOIN pki_cas ca ON ca.id = c.ca_id
|
||||
WHERE ca.public_id = ?`, id)
|
||||
err = row.Scan(&count)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return count, nil
|
||||
}
|
||||
|
||||
func (s *Store) DeletePKICA(id string) error {
|
||||
var err error
|
||||
_, err = s.DB.Exec(`DELETE FROM pki_cas WHERE public_id = ?`, id)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) DeletePKICASubtree(id string) error {
|
||||
var tx *sql.Tx
|
||||
var rows *sql.Rows
|
||||
var err error
|
||||
var itemID string
|
||||
var parentID string
|
||||
var parentByID map[string]string
|
||||
var pending []string
|
||||
var current string
|
||||
var i int
|
||||
var j int
|
||||
var target string
|
||||
var toDelete []string
|
||||
var contains bool
|
||||
parentByID = map[string]string{}
|
||||
tx, err = s.DB.Begin()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rows, err = tx.Query(`SELECT c.public_id, COALESCE(p.public_id, '')
|
||||
FROM pki_cas c
|
||||
LEFT JOIN pki_cas p ON p.id = c.parent_ca_id`)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
err = rows.Scan(&itemID, &parentID)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
parentByID[itemID] = parentID
|
||||
}
|
||||
err = rows.Err()
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
pending = append(pending, id)
|
||||
for len(pending) > 0 {
|
||||
current = pending[0]
|
||||
pending = pending[1:]
|
||||
contains = false
|
||||
for i = 0; i < len(toDelete); i++ {
|
||||
if toDelete[i] == current {
|
||||
contains = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !contains {
|
||||
toDelete = append(toDelete, current)
|
||||
}
|
||||
for target = range parentByID {
|
||||
if parentByID[target] == current {
|
||||
pending = append(pending, target)
|
||||
}
|
||||
}
|
||||
}
|
||||
for i = len(toDelete) - 1; i >= 0; i-- {
|
||||
j = i
|
||||
_ = j
|
||||
_, err = tx.Exec(`DELETE FROM pki_cas WHERE public_id = ?`, toDelete[i])
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
}
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Store) NextPKICASerial(caID string) (int64, error) {
|
||||
var tx *sql.Tx
|
||||
var err error
|
||||
var row *sql.Row
|
||||
var serial int64
|
||||
tx, err = s.DB.Begin()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
row = tx.QueryRow(`SELECT serial_counter FROM pki_cas WHERE public_id = ?`, caID)
|
||||
err = row.Scan(&serial)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return 0, err
|
||||
}
|
||||
_, err = tx.Exec(`UPDATE pki_cas SET serial_counter = ?, updated_at = ? WHERE public_id = ?`, serial+1, time.Now().UTC().Unix(), caID)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return 0, err
|
||||
}
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return serial, nil
|
||||
}
|
||||
|
||||
func (s *Store) ListPKICerts(caID string) ([]models.PKICert, error) {
|
||||
var rows *sql.Rows
|
||||
var err error
|
||||
var items []models.PKICert
|
||||
var item models.PKICert
|
||||
if caID == "" {
|
||||
rows, err = s.DB.Query(`SELECT c.public_id, COALESCE(ca.public_id, ''), c.serial_hex, c.common_name, c.san_dns, c.san_ips, c.is_ca, c.cert_pem, c.key_pem, c.not_before, c.not_after, c.status, c.revoked_at, c.revocation_reason, c.created_at
|
||||
FROM pki_certs c
|
||||
LEFT JOIN pki_cas ca ON ca.id = c.ca_id
|
||||
ORDER BY c.created_at DESC`)
|
||||
} else if caID == "standalone" {
|
||||
rows, err = s.DB.Query(`SELECT c.public_id, COALESCE(ca.public_id, ''), c.serial_hex, c.common_name, c.san_dns, c.san_ips, c.is_ca, c.cert_pem, c.key_pem, c.not_before, c.not_after, c.status, c.revoked_at, c.revocation_reason, c.created_at
|
||||
FROM pki_certs c
|
||||
LEFT JOIN pki_cas ca ON ca.id = c.ca_id
|
||||
WHERE c.ca_id IS NULL
|
||||
ORDER BY c.created_at DESC`)
|
||||
} else {
|
||||
rows, err = s.DB.Query(`SELECT c.public_id, COALESCE(ca.public_id, ''), c.serial_hex, c.common_name, c.san_dns, c.san_ips, c.is_ca, c.cert_pem, c.key_pem, c.not_before, c.not_after, c.status, c.revoked_at, c.revocation_reason, c.created_at
|
||||
FROM pki_certs c
|
||||
LEFT JOIN pki_cas ca ON ca.id = c.ca_id
|
||||
WHERE c.ca_id = (SELECT id FROM pki_cas WHERE public_id = ?)
|
||||
ORDER BY c.created_at DESC`, caID)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
err = rows.Scan(&item.ID, &item.CAID, &item.SerialHex, &item.CommonName, &item.SANDNS, &item.SANIPs, &item.IsCA, &item.CertPEM, &item.KeyPEM, &item.NotBefore, &item.NotAfter, &item.Status, &item.RevokedAt, &item.RevocationReason, &item.CreatedAt)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, item)
|
||||
}
|
||||
err = rows.Err()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
func (s *Store) GetPKICert(id string) (models.PKICert, error) {
|
||||
var row *sql.Row
|
||||
var item models.PKICert
|
||||
var err error
|
||||
row = s.DB.QueryRow(`SELECT c.public_id, COALESCE(ca.public_id, ''), c.serial_hex, c.common_name, c.san_dns, c.san_ips, c.is_ca, c.cert_pem, c.key_pem, c.not_before, c.not_after, c.status, c.revoked_at, c.revocation_reason, c.created_at
|
||||
FROM pki_certs c
|
||||
LEFT JOIN pki_cas ca ON ca.id = c.ca_id
|
||||
WHERE c.public_id = ?`, id)
|
||||
err = row.Scan(&item.ID, &item.CAID, &item.SerialHex, &item.CommonName, &item.SANDNS, &item.SANIPs, &item.IsCA, &item.CertPEM, &item.KeyPEM, &item.NotBefore, &item.NotAfter, &item.Status, &item.RevokedAt, &item.RevocationReason, &item.CreatedAt)
|
||||
if err != nil {
|
||||
return item, err
|
||||
}
|
||||
return item, nil
|
||||
}
|
||||
|
||||
func (s *Store) CreatePKICert(item models.PKICert) (models.PKICert, error) {
|
||||
var id string
|
||||
var now int64
|
||||
var err error
|
||||
if item.ID == "" {
|
||||
id, err = util.NewID()
|
||||
if err != nil {
|
||||
return item, err
|
||||
}
|
||||
item.ID = id
|
||||
}
|
||||
if item.Status == "" {
|
||||
item.Status = "active"
|
||||
}
|
||||
now = time.Now().UTC().Unix()
|
||||
item.CreatedAt = now
|
||||
item.CAID = strings.TrimSpace(item.CAID)
|
||||
_, err = s.DB.Exec(`INSERT INTO pki_certs (public_id, ca_id, serial_hex, common_name, san_dns, san_ips, is_ca, cert_pem, key_pem, not_before, not_after, status, revoked_at, revocation_reason, created_at)
|
||||
VALUES (?, CASE WHEN ? = '' THEN NULL ELSE (SELECT id FROM pki_cas WHERE public_id = ?) END, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
item.ID, item.CAID, item.CAID, item.SerialHex, item.CommonName, item.SANDNS, item.SANIPs, item.IsCA, item.CertPEM, item.KeyPEM, item.NotBefore, item.NotAfter, item.Status, item.RevokedAt, item.RevocationReason, item.CreatedAt)
|
||||
if err != nil {
|
||||
return item, err
|
||||
}
|
||||
return item, nil
|
||||
}
|
||||
|
||||
func (s *Store) RevokePKICert(id string, reason string) error {
|
||||
var err error
|
||||
_, err = s.DB.Exec(`UPDATE pki_certs SET status = 'revoked', revoked_at = ?, revocation_reason = ? WHERE public_id = ?`, time.Now().UTC().Unix(), reason, id)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) DeletePKICert(id string) error {
|
||||
var err error
|
||||
_, err = s.DB.Exec(`DELETE FROM pki_certs WHERE public_id = ?`, id)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) CountTLSServerCertReferences(certID string) (int, int, error) {
|
||||
var row *sql.Row
|
||||
var appCount int
|
||||
var listenerCount int
|
||||
var err error
|
||||
row = s.DB.QueryRow(`SELECT COUNT(*) FROM app_settings WHERE key = 'tls.pki_server_cert_id' AND value = ?`, certID)
|
||||
err = row.Scan(&appCount)
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
row = s.DB.QueryRow(`SELECT COUNT(*) FROM tls_listeners WHERE tls_pki_server_cert_id = ?`, certID)
|
||||
err = row.Scan(&listenerCount)
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
return appCount, listenerCount, nil
|
||||
}
|
||||
|
||||
func (s *Store) CountTLSClientCAReferences(caID string) (int, int, error) {
|
||||
var row *sql.Row
|
||||
var appCount int
|
||||
var listenerCount int
|
||||
var err error
|
||||
row = s.DB.QueryRow(`SELECT COUNT(*) FROM app_settings WHERE key = 'tls.pki_client_ca_id' AND value = ?`, caID)
|
||||
err = row.Scan(&appCount)
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
row = s.DB.QueryRow(`SELECT COUNT(*) FROM tls_listeners WHERE tls_pki_client_ca_id = ?`, caID)
|
||||
err = row.Scan(&listenerCount)
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
return appCount, listenerCount, nil
|
||||
}
|
||||
216
backend/internal/db/principals.go
Normal file
216
backend/internal/db/principals.go
Normal file
@@ -0,0 +1,216 @@
|
||||
package db
|
||||
|
||||
import "database/sql"
|
||||
import "strings"
|
||||
import "time"
|
||||
|
||||
import "codit/internal/models"
|
||||
import "codit/internal/util"
|
||||
|
||||
func (s *Store) ListServicePrincipals() ([]models.ServicePrincipal, error) {
|
||||
var rows *sql.Rows
|
||||
var items []models.ServicePrincipal
|
||||
var item models.ServicePrincipal
|
||||
var err error
|
||||
rows, err = s.DB.Query(`SELECT public_id, name, description, is_admin, disabled, created_at, updated_at FROM service_principals ORDER BY name`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
err = rows.Scan(&item.ID, &item.Name, &item.Description, &item.IsAdmin, &item.Disabled, &item.CreatedAt, &item.UpdatedAt)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, item)
|
||||
}
|
||||
err = rows.Err()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
func (s *Store) GetServicePrincipal(id string) (models.ServicePrincipal, error) {
|
||||
var row *sql.Row
|
||||
var item models.ServicePrincipal
|
||||
var err error
|
||||
row = s.DB.QueryRow(`SELECT public_id, name, description, is_admin, disabled, created_at, updated_at FROM service_principals WHERE public_id = ?`, id)
|
||||
err = row.Scan(&item.ID, &item.Name, &item.Description, &item.IsAdmin, &item.Disabled, &item.CreatedAt, &item.UpdatedAt)
|
||||
return item, err
|
||||
}
|
||||
|
||||
func (s *Store) CreateServicePrincipal(item models.ServicePrincipal) (models.ServicePrincipal, error) {
|
||||
var id string
|
||||
var now int64
|
||||
var err error
|
||||
if item.ID == "" {
|
||||
id, err = util.NewID()
|
||||
if err != nil {
|
||||
return item, err
|
||||
}
|
||||
item.ID = id
|
||||
}
|
||||
now = time.Now().UTC().Unix()
|
||||
item.CreatedAt = now
|
||||
item.UpdatedAt = now
|
||||
_, err = s.DB.Exec(`INSERT INTO service_principals (public_id, name, description, is_admin, disabled, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?)`,
|
||||
item.ID, item.Name, item.Description, item.IsAdmin, item.Disabled, item.CreatedAt, item.UpdatedAt)
|
||||
if err != nil {
|
||||
return item, err
|
||||
}
|
||||
return item, nil
|
||||
}
|
||||
|
||||
func (s *Store) UpdateServicePrincipal(item models.ServicePrincipal) error {
|
||||
var now int64
|
||||
var err error
|
||||
now = time.Now().UTC().Unix()
|
||||
item.UpdatedAt = now
|
||||
_, err = s.DB.Exec(`UPDATE service_principals SET name = ?, description = ?, is_admin = ?, disabled = ?, updated_at = ? WHERE public_id = ?`,
|
||||
item.Name, item.Description, item.IsAdmin, item.Disabled, item.UpdatedAt, item.ID)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) DeleteServicePrincipal(id string) error {
|
||||
var err error
|
||||
_, err = s.DB.Exec(`DELETE FROM service_principals WHERE public_id = ?`, id)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) ListCertPrincipalBindings() ([]models.CertPrincipalBinding, error) {
|
||||
var rows *sql.Rows
|
||||
var items []models.CertPrincipalBinding
|
||||
var item models.CertPrincipalBinding
|
||||
var err error
|
||||
rows, err = s.DB.Query(`SELECT b.fingerprint, p.public_id, b.enabled, b.created_at, b.updated_at
|
||||
FROM cert_principal_bindings b
|
||||
JOIN service_principals p ON p.id = b.principal_id
|
||||
ORDER BY b.fingerprint`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
err = rows.Scan(&item.Fingerprint, &item.PrincipalID, &item.Enabled, &item.CreatedAt, &item.UpdatedAt)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, item)
|
||||
}
|
||||
err = rows.Err()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
func (s *Store) UpsertCertPrincipalBinding(item models.CertPrincipalBinding) (models.CertPrincipalBinding, error) {
|
||||
var now int64
|
||||
var err error
|
||||
now = time.Now().UTC().Unix()
|
||||
item.Fingerprint = strings.ToLower(strings.TrimSpace(item.Fingerprint))
|
||||
item.UpdatedAt = now
|
||||
_, err = s.DB.Exec(`INSERT INTO cert_principal_bindings (fingerprint, principal_id, enabled, created_at, updated_at)
|
||||
VALUES (?, (SELECT id FROM service_principals WHERE public_id = ?), ?, ?, ?)
|
||||
ON CONFLICT(fingerprint) DO UPDATE SET principal_id = excluded.principal_id, enabled = excluded.enabled, updated_at = excluded.updated_at`,
|
||||
item.Fingerprint, item.PrincipalID, item.Enabled, now, now)
|
||||
if err != nil {
|
||||
return item, err
|
||||
}
|
||||
item.CreatedAt = now
|
||||
return item, nil
|
||||
}
|
||||
|
||||
func (s *Store) DeleteCertPrincipalBinding(fingerprint string) error {
|
||||
var err error
|
||||
fingerprint = strings.ToLower(strings.TrimSpace(fingerprint))
|
||||
_, err = s.DB.Exec(`DELETE FROM cert_principal_bindings WHERE fingerprint = ?`, fingerprint)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) GetPrincipalByCertFingerprint(fingerprint string) (models.ServicePrincipal, bool, error) {
|
||||
var row *sql.Row
|
||||
var item models.ServicePrincipal
|
||||
var enabled bool
|
||||
var err error
|
||||
fingerprint = strings.ToLower(strings.TrimSpace(fingerprint))
|
||||
row = s.DB.QueryRow(`SELECT p.public_id, p.name, p.description, p.is_admin, p.disabled, p.created_at, p.updated_at, b.enabled
|
||||
FROM cert_principal_bindings b
|
||||
INNER JOIN service_principals p ON p.id = b.principal_id
|
||||
WHERE b.fingerprint = ?`, fingerprint)
|
||||
err = row.Scan(&item.ID, &item.Name, &item.Description, &item.IsAdmin, &item.Disabled, &item.CreatedAt, &item.UpdatedAt, &enabled)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return item, false, nil
|
||||
}
|
||||
return item, false, err
|
||||
}
|
||||
if item.Disabled || !enabled {
|
||||
return item, false, nil
|
||||
}
|
||||
return item, true, nil
|
||||
}
|
||||
|
||||
func (s *Store) ListPrincipalProjectRoles(principalID string) ([]models.PrincipalProjectRole, error) {
|
||||
var rows *sql.Rows
|
||||
var items []models.PrincipalProjectRole
|
||||
var item models.PrincipalProjectRole
|
||||
var err error
|
||||
rows, err = s.DB.Query(`SELECT sp.public_id, p.public_id, r.role, r.created_at
|
||||
FROM principal_project_roles r
|
||||
JOIN service_principals sp ON sp.id = r.principal_id
|
||||
JOIN projects p ON p.id = r.project_id
|
||||
WHERE r.principal_id = (SELECT id FROM service_principals WHERE public_id = ?)
|
||||
ORDER BY p.public_id`, principalID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
err = rows.Scan(&item.PrincipalID, &item.ProjectID, &item.Role, &item.CreatedAt)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, item)
|
||||
}
|
||||
err = rows.Err()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
func (s *Store) UpsertPrincipalProjectRole(item models.PrincipalProjectRole) (models.PrincipalProjectRole, error) {
|
||||
var now int64
|
||||
var err error
|
||||
now = time.Now().UTC().Unix()
|
||||
item.CreatedAt = now
|
||||
_, err = s.DB.Exec(`INSERT INTO principal_project_roles (principal_id, project_id, role, created_at)
|
||||
VALUES ((SELECT id FROM service_principals WHERE public_id = ?), (SELECT id FROM projects WHERE public_id = ?), ?, ?)
|
||||
ON CONFLICT(principal_id, project_id) DO UPDATE SET role = excluded.role`,
|
||||
item.PrincipalID, item.ProjectID, item.Role, item.CreatedAt)
|
||||
if err != nil {
|
||||
return item, err
|
||||
}
|
||||
return item, nil
|
||||
}
|
||||
|
||||
func (s *Store) DeletePrincipalProjectRole(principalID string, projectID string) error {
|
||||
var err error
|
||||
_, err = s.DB.Exec(`DELETE FROM principal_project_roles
|
||||
WHERE principal_id = (SELECT id FROM service_principals WHERE public_id = ?)
|
||||
AND project_id = (SELECT id FROM projects WHERE public_id = ?)`, principalID, projectID)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) GetPrincipalProjectRole(principalID string, projectID string) (string, error) {
|
||||
var row *sql.Row
|
||||
var role string
|
||||
var err error
|
||||
row = s.DB.QueryRow(`SELECT role FROM principal_project_roles
|
||||
WHERE principal_id = (SELECT id FROM service_principals WHERE public_id = ?)
|
||||
AND project_id = (SELECT id FROM projects WHERE public_id = ?)`, principalID, projectID)
|
||||
err = row.Scan(&role)
|
||||
return role, err
|
||||
}
|
||||
397
backend/internal/db/rpm_dirs.go
Normal file
397
backend/internal/db/rpm_dirs.go
Normal file
@@ -0,0 +1,397 @@
|
||||
package db
|
||||
|
||||
import "database/sql"
|
||||
import "strings"
|
||||
import "time"
|
||||
|
||||
import "codit/internal/models"
|
||||
import "codit/internal/util"
|
||||
|
||||
func (s *Store) ListRPMRepoDirs(repoID string) ([]models.RPMRepoDir, error) {
|
||||
var rows *sql.Rows
|
||||
var items []models.RPMRepoDir
|
||||
var item models.RPMRepoDir
|
||||
var err error
|
||||
rows, err = s.DB.Query(`SELECT r.public_id, d.path, d.mode, d.allow_delete, d.remote_url, d.connect_host, d.host_header, d.tls_server_name, d.tls_insecure_skip_verify, d.sync_interval_sec, d.sync_enabled, d.dirty, d.next_sync_at, d.sync_running, d.sync_status, d.sync_error, d.sync_step, d.sync_total, d.sync_done, d.sync_failed, d.sync_deleted, d.last_sync_started_at, d.last_sync_finished_at, d.last_sync_success_at, d.last_synced_revision, d.created_at, d.updated_at
|
||||
FROM rpm_repo_dirs d
|
||||
JOIN repos r ON r.id = d.repo_id
|
||||
WHERE r.public_id = ?
|
||||
ORDER BY LENGTH(d.path), d.path`, repoID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
err = rows.Scan(&item.RepoID, &item.Path, &item.Mode, &item.AllowDelete, &item.RemoteURL, &item.ConnectHost, &item.HostHeader, &item.TLSServerName, &item.TLSInsecureSkipVerify, &item.SyncIntervalSec, &item.SyncEnabled, &item.Dirty, &item.NextSyncAt, &item.SyncRunning, &item.SyncStatus, &item.SyncError, &item.SyncStep, &item.SyncTotal, &item.SyncDone, &item.SyncFailed, &item.SyncDeleted, &item.LastSyncStartedAt, &item.LastSyncFinishedAt, &item.LastSyncSuccessAt, &item.LastSyncedRevision, &item.CreatedAt, &item.UpdatedAt)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, item)
|
||||
}
|
||||
err = rows.Err()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
func (s *Store) UpsertRPMRepoDir(item models.RPMRepoDir) error {
|
||||
var now int64
|
||||
var err error
|
||||
now = time.Now().UTC().Unix()
|
||||
if item.SyncIntervalSec <= 0 {
|
||||
item.SyncIntervalSec = 300
|
||||
}
|
||||
_, err = s.DB.Exec(`
|
||||
INSERT INTO rpm_repo_dirs (repo_id, path, mode, allow_delete, remote_url, connect_host, host_header, tls_server_name, tls_insecure_skip_verify, sync_interval_sec, sync_enabled, dirty, next_sync_at, created_at, updated_at)
|
||||
VALUES ((SELECT id FROM repos WHERE public_id = ?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(repo_id, path) DO UPDATE SET
|
||||
mode = excluded.mode,
|
||||
allow_delete = excluded.allow_delete,
|
||||
remote_url = excluded.remote_url,
|
||||
connect_host = excluded.connect_host,
|
||||
host_header = excluded.host_header,
|
||||
tls_server_name = excluded.tls_server_name,
|
||||
tls_insecure_skip_verify = excluded.tls_insecure_skip_verify,
|
||||
sync_interval_sec = excluded.sync_interval_sec,
|
||||
sync_enabled = CASE WHEN excluded.mode = 'mirror' THEN excluded.sync_enabled ELSE 1 END,
|
||||
dirty = CASE WHEN excluded.mode = 'mirror' THEN 1 ELSE rpm_repo_dirs.dirty END,
|
||||
next_sync_at = CASE WHEN excluded.mode = 'mirror' THEN 0 ELSE rpm_repo_dirs.next_sync_at END,
|
||||
updated_at = excluded.updated_at
|
||||
`,
|
||||
item.RepoID,
|
||||
item.Path,
|
||||
item.Mode,
|
||||
item.AllowDelete,
|
||||
item.RemoteURL,
|
||||
item.ConnectHost,
|
||||
item.HostHeader,
|
||||
item.TLSServerName,
|
||||
item.TLSInsecureSkipVerify,
|
||||
item.SyncIntervalSec,
|
||||
item.SyncEnabled,
|
||||
normalizeRPMRepoMode(item.Mode) == "mirror",
|
||||
int64(0),
|
||||
now,
|
||||
now)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) GetRPMRepoDir(repoID string, path string) (models.RPMRepoDir, error) {
|
||||
var row *sql.Row
|
||||
var item models.RPMRepoDir
|
||||
var err error
|
||||
row = s.DB.QueryRow(`SELECT r.public_id, d.path, d.mode, d.allow_delete, d.remote_url, d.connect_host, d.host_header, d.tls_server_name, d.tls_insecure_skip_verify, d.sync_interval_sec, d.sync_enabled, d.dirty, d.next_sync_at, d.sync_running, d.sync_status, d.sync_error, d.sync_step, d.sync_total, d.sync_done, d.sync_failed, d.sync_deleted, d.last_sync_started_at, d.last_sync_finished_at, d.last_sync_success_at, d.last_synced_revision, d.created_at, d.updated_at
|
||||
FROM rpm_repo_dirs d
|
||||
JOIN repos r ON r.id = d.repo_id
|
||||
WHERE r.public_id = ? AND d.path = ?`, repoID, path)
|
||||
err = row.Scan(&item.RepoID, &item.Path, &item.Mode, &item.AllowDelete, &item.RemoteURL, &item.ConnectHost, &item.HostHeader, &item.TLSServerName, &item.TLSInsecureSkipVerify, &item.SyncIntervalSec, &item.SyncEnabled, &item.Dirty, &item.NextSyncAt, &item.SyncRunning, &item.SyncStatus, &item.SyncError, &item.SyncStep, &item.SyncTotal, &item.SyncDone, &item.SyncFailed, &item.SyncDeleted, &item.LastSyncStartedAt, &item.LastSyncFinishedAt, &item.LastSyncSuccessAt, &item.LastSyncedRevision, &item.CreatedAt, &item.UpdatedAt)
|
||||
if err != nil {
|
||||
return item, err
|
||||
}
|
||||
return item, nil
|
||||
}
|
||||
|
||||
func (s *Store) ListDueRPMMirrorTasks(now int64, limit int) ([]models.RPMMirrorTask, error) {
|
||||
var rows *sql.Rows
|
||||
var out []models.RPMMirrorTask
|
||||
var item models.RPMMirrorTask
|
||||
var err error
|
||||
if limit <= 0 {
|
||||
limit = 10
|
||||
}
|
||||
rows, err = s.DB.Query(`
|
||||
SELECT r.public_id, r.path, d.path, d.remote_url, d.connect_host, d.host_header, d.tls_server_name, d.tls_insecure_skip_verify, d.sync_interval_sec, d.dirty, d.last_synced_revision
|
||||
FROM rpm_repo_dirs d
|
||||
JOIN repos r ON r.id = d.repo_id
|
||||
WHERE d.mode = 'mirror' AND d.sync_enabled = 1 AND d.sync_running = 0 AND (d.dirty = 1 OR d.next_sync_at <= ? OR d.next_sync_at = 0)
|
||||
ORDER BY d.next_sync_at, d.updated_at
|
||||
LIMIT ?`, now, limit)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
err = rows.Scan(&item.RepoID, &item.RepoPath, &item.MirrorPath, &item.RemoteURL, &item.ConnectHost, &item.HostHeader, &item.TLSServerName, &item.TLSInsecureSkipVerify, &item.SyncIntervalSec, &item.Dirty, &item.LastSyncedRevision)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
out = append(out, item)
|
||||
}
|
||||
err = rows.Err()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (s *Store) TryStartRPMMirrorTask(repoID string, path string, now int64) (bool, error) {
|
||||
var res sql.Result
|
||||
var rows int64
|
||||
var err error
|
||||
res, err = s.DB.Exec(`UPDATE rpm_repo_dirs SET sync_running = 1, sync_status = 'running', sync_error = '', sync_step = 'start', sync_total = 0, sync_done = 0, sync_failed = 0, sync_deleted = 0, last_sync_started_at = ?, updated_at = ? WHERE repo_id = (SELECT id FROM repos WHERE public_id = ?) AND path = ? AND mode = 'mirror' AND sync_enabled = 1 AND sync_running = 0`, now, now, repoID, path)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
rows, err = res.RowsAffected()
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return rows > 0, nil
|
||||
}
|
||||
|
||||
func (s *Store) UpdateRPMMirrorTaskProgress(repoID string, path string, step string, total int64, done int64, failed int64, deleted int64) error {
|
||||
var now int64
|
||||
var err error
|
||||
now = time.Now().UTC().Unix()
|
||||
_, err = s.DB.Exec(`UPDATE rpm_repo_dirs SET sync_step = ?, sync_total = ?, sync_done = ?, sync_failed = ?, sync_deleted = ?, updated_at = ? WHERE repo_id = (SELECT id FROM repos WHERE public_id = ?) AND path = ?`, step, total, done, failed, deleted, now, repoID, path)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) FinishRPMMirrorTask(repoID string, path string, success bool, revision string, errMsg string) error {
|
||||
var now int64
|
||||
var status string
|
||||
var nextSync int64
|
||||
var interval int64
|
||||
var row *sql.Row
|
||||
var err error
|
||||
row = s.DB.QueryRow(`SELECT sync_interval_sec FROM rpm_repo_dirs WHERE repo_id = (SELECT id FROM repos WHERE public_id = ?) AND path = ?`, repoID, path)
|
||||
err = row.Scan(&interval)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if interval <= 0 {
|
||||
interval = 300
|
||||
}
|
||||
now = time.Now().UTC().Unix()
|
||||
nextSync = now + interval
|
||||
if success {
|
||||
status = "success"
|
||||
_, err = s.DB.Exec(`UPDATE rpm_repo_dirs SET sync_running = 0, dirty = 0, next_sync_at = ?, sync_status = ?, sync_error = '', sync_step = 'idle', last_sync_finished_at = ?, last_sync_success_at = ?, last_synced_revision = ?, updated_at = ? WHERE repo_id = (SELECT id FROM repos WHERE public_id = ?) AND path = ?`, nextSync, status, now, now, revision, now, repoID, path)
|
||||
return err
|
||||
}
|
||||
status = "failed"
|
||||
if errMsg == "" {
|
||||
errMsg = "mirror sync failed"
|
||||
}
|
||||
_, err = s.DB.Exec(`UPDATE rpm_repo_dirs SET sync_running = 0, dirty = 1, next_sync_at = ?, sync_status = ?, sync_error = ?, sync_step = 'idle', last_sync_finished_at = ?, updated_at = ? WHERE repo_id = (SELECT id FROM repos WHERE public_id = ?) AND path = ?`, now+30, status, errMsg, now, now, repoID, path)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) MarkRPMMirrorTaskDirty(repoID string, path string) error {
|
||||
var now int64
|
||||
var err error
|
||||
now = time.Now().UTC().Unix()
|
||||
_, err = s.DB.Exec(`UPDATE rpm_repo_dirs SET dirty = 1, next_sync_at = ?, updated_at = ? WHERE repo_id = (SELECT id FROM repos WHERE public_id = ?) AND path = ?`, now, now, repoID, path)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) SetRPMMirrorSyncEnabled(repoID string, path string, enabled bool) error {
|
||||
var now int64
|
||||
var err error
|
||||
now = time.Now().UTC().Unix()
|
||||
_, err = s.DB.Exec(`UPDATE rpm_repo_dirs SET sync_enabled = ?, dirty = CASE WHEN ? THEN 1 ELSE dirty END, next_sync_at = CASE WHEN ? THEN ? ELSE next_sync_at END, updated_at = ? WHERE repo_id = (SELECT id FROM repos WHERE public_id = ?) AND path = ?`,
|
||||
enabled, enabled, enabled, now, now, repoID, path)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) ResetRunningRPMMirrorTasks() error {
|
||||
var now int64
|
||||
var err error
|
||||
now = time.Now().UTC().Unix()
|
||||
_, err = s.DB.Exec(`UPDATE rpm_repo_dirs SET sync_running = 0, dirty = 1, next_sync_at = ?, sync_status = 'failed', sync_error = 'aborted by restart', sync_step = 'idle', last_sync_finished_at = ?, updated_at = ? WHERE mode = 'mirror' AND sync_running = 1`, now+5, now, now)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) ListRPMMirrorPaths() ([]models.RPMMirrorTask, error) {
|
||||
var rows *sql.Rows
|
||||
var out []models.RPMMirrorTask
|
||||
var item models.RPMMirrorTask
|
||||
var err error
|
||||
rows, err = s.DB.Query(`
|
||||
SELECT r.public_id, r.path, d.path
|
||||
FROM rpm_repo_dirs d
|
||||
JOIN repos r ON r.id = d.repo_id
|
||||
WHERE d.mode = 'mirror'`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
err = rows.Scan(&item.RepoID, &item.RepoPath, &item.MirrorPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
out = append(out, item)
|
||||
}
|
||||
err = rows.Err()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (s *Store) HasRunningRPMMirrorTask(repoID string) (bool, error) {
|
||||
var row *sql.Row
|
||||
var count int64
|
||||
var err error
|
||||
row = s.DB.QueryRow(`SELECT COUNT(1) FROM rpm_repo_dirs WHERE repo_id = (SELECT id FROM repos WHERE public_id = ?) AND mode = 'mirror' AND sync_running = 1`, repoID)
|
||||
err = row.Scan(&count)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return count > 0, nil
|
||||
}
|
||||
|
||||
func (s *Store) CreateRPMMirrorRun(repoID string, path string, startedAt int64) (string, error) {
|
||||
var id string
|
||||
var err error
|
||||
id, err = util.NewID()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
_, err = s.DB.Exec(`INSERT INTO rpm_mirror_runs (public_id, repo_id, path, started_at, status) VALUES (?, (SELECT id FROM repos WHERE public_id = ?), ?, ?, 'running')`, id, repoID, path, startedAt)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return id, nil
|
||||
}
|
||||
|
||||
func (s *Store) FinishRPMMirrorRun(id string, finishedAt int64, status string, step string, total int64, done int64, failed int64, deleted int64, revision string, errMsg string) error {
|
||||
var err error
|
||||
_, err = s.DB.Exec(`UPDATE rpm_mirror_runs SET finished_at = ?, status = ?, step = ?, total = ?, done = ?, failed = ?, deleted = ?, revision = ?, error = ? WHERE public_id = ?`,
|
||||
finishedAt, status, step, total, done, failed, deleted, revision, errMsg, id)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) ListRPMMirrorRuns(repoID string, path string, limit int) ([]models.RPMMirrorRun, error) {
|
||||
var rows *sql.Rows
|
||||
var out []models.RPMMirrorRun
|
||||
var item models.RPMMirrorRun
|
||||
var err error
|
||||
if limit <= 0 {
|
||||
limit = 20
|
||||
}
|
||||
rows, err = s.DB.Query(`SELECT m.public_id, r.public_id, m.path, m.started_at, m.finished_at, m.status, m.step, m.total, m.done, m.failed, m.deleted, m.revision, m.error
|
||||
FROM rpm_mirror_runs m
|
||||
JOIN repos r ON r.id = m.repo_id
|
||||
WHERE r.public_id = ? AND m.path = ?
|
||||
ORDER BY m.started_at DESC
|
||||
LIMIT ?`, repoID, path, limit)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
err = rows.Scan(&item.ID, &item.RepoID, &item.Path, &item.StartedAt, &item.FinishedAt, &item.Status, &item.Step, &item.Total, &item.Done, &item.Failed, &item.Deleted, &item.Revision, &item.Error)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
out = append(out, item)
|
||||
}
|
||||
err = rows.Err()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (s *Store) DeleteRPMMirrorRuns(repoID string, path string) (int64, error) {
|
||||
var res sql.Result
|
||||
var count int64
|
||||
var err error
|
||||
res, err = s.DB.Exec(`DELETE FROM rpm_mirror_runs WHERE repo_id = (SELECT id FROM repos WHERE public_id = ?) AND path = ?`, repoID, path)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
count, err = res.RowsAffected()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return count, nil
|
||||
}
|
||||
|
||||
func (s *Store) CleanupRPMMirrorRunsRetention(repoID string, path string, keepCount int, keepDays int) error {
|
||||
var cutoff int64
|
||||
var now int64
|
||||
var err error
|
||||
if keepCount <= 0 {
|
||||
keepCount = 200
|
||||
}
|
||||
if keepDays <= 0 {
|
||||
keepDays = 30
|
||||
}
|
||||
now = time.Now().UTC().Unix()
|
||||
cutoff = now - int64(keepDays*24*60*60)
|
||||
_, err = s.DB.Exec(`
|
||||
DELETE FROM rpm_mirror_runs
|
||||
WHERE repo_id = (SELECT id FROM repos WHERE public_id = ?)
|
||||
AND path = ?
|
||||
AND started_at < ?
|
||||
AND id NOT IN (
|
||||
SELECT id FROM rpm_mirror_runs
|
||||
WHERE repo_id = (SELECT id FROM repos WHERE public_id = ?) AND path = ?
|
||||
ORDER BY started_at DESC
|
||||
LIMIT ?
|
||||
)
|
||||
`, repoID, path, cutoff, repoID, path, keepCount)
|
||||
return err
|
||||
}
|
||||
|
||||
func normalizeRPMRepoMode(mode string) string {
|
||||
var v string
|
||||
v = strings.ToLower(strings.TrimSpace(mode))
|
||||
if v == "mirror" {
|
||||
return "mirror"
|
||||
}
|
||||
return "local"
|
||||
}
|
||||
|
||||
func (s *Store) DeleteRPMRepoDir(repoID string, path string) error {
|
||||
var err error
|
||||
_, err = s.DB.Exec(`DELETE FROM rpm_repo_dirs WHERE repo_id = (SELECT id FROM repos WHERE public_id = ?) AND path = ?`, repoID, path)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) DeleteRPMRepoDirSubtree(repoID string, path string) error {
|
||||
var prefix string
|
||||
var err error
|
||||
prefix = path + "/"
|
||||
_, err = s.DB.Exec(`DELETE FROM rpm_repo_dirs WHERE repo_id = (SELECT id FROM repos WHERE public_id = ?) AND (path = ? OR path LIKE (? || '%'))`, repoID, path, prefix)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) MoveRPMRepoDir(repoID string, oldPath string, newPath string) error {
|
||||
var tx *sql.Tx
|
||||
var now int64
|
||||
var oldPrefix string
|
||||
var newPrefix string
|
||||
var err error
|
||||
tx, err = s.DB.Begin()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
now = time.Now().UTC().Unix()
|
||||
oldPrefix = oldPath + "/"
|
||||
newPrefix = newPath + "/"
|
||||
_, err = tx.Exec(`DELETE FROM rpm_mirror_runs WHERE repo_id = (SELECT id FROM repos WHERE public_id = ?) AND (path = ? OR path LIKE (? || '%'))`, repoID, oldPath, oldPrefix)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
_, err = tx.Exec(`UPDATE rpm_repo_dirs SET path = ?, updated_at = ? WHERE repo_id = (SELECT id FROM repos WHERE public_id = ?) AND path = ?`, newPath, now, repoID, oldPath)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
_, err = tx.Exec(`UPDATE rpm_repo_dirs SET path = (? || SUBSTR(path, ?)), updated_at = ? WHERE repo_id = (SELECT id FROM repos WHERE public_id = ?) AND path LIKE (? || '%')`, newPrefix, len(oldPrefix)+1, now, repoID, oldPrefix)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -28,7 +28,7 @@ func (s *Store) CreateUser(user models.User, passwordHash string) (models.User,
|
||||
user.AuthSource = "db"
|
||||
}
|
||||
_, err = s.DB.Exec(`
|
||||
INSERT INTO users (id, username, display_name, email, password_hash, is_admin, disabled, auth_source, created_at, updated_at)
|
||||
INSERT INTO users (public_id, username, display_name, email, password_hash, is_admin, disabled, auth_source, created_at, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`, user.ID, user.Username, user.DisplayName, user.Email, passwordHash, user.IsAdmin, user.Disabled, user.AuthSource, now, now)
|
||||
return user, err
|
||||
@@ -41,7 +41,7 @@ func (s *Store) UpdateUser(user models.User) error {
|
||||
now = time.Now().UTC()
|
||||
nowUnix = now.Unix()
|
||||
user.UpdatedAt = nowUnix
|
||||
_, err = s.DB.Exec(`UPDATE users SET display_name = ?, email = ?, is_admin = ?, disabled = ?, updated_at = ? WHERE id = ?`,
|
||||
_, err = s.DB.Exec(`UPDATE users SET display_name = ?, email = ?, is_admin = ?, disabled = ?, updated_at = ? WHERE public_id = ?`,
|
||||
user.DisplayName, user.Email, user.IsAdmin, user.Disabled, now, user.ID)
|
||||
return err
|
||||
}
|
||||
@@ -58,7 +58,7 @@ func (s *Store) UpdateUserWithPassword(user models.User, passwordHash string) er
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = tx.Exec(`UPDATE users SET display_name = ?, email = ?, is_admin = ?, disabled = ?, password_hash = ?, updated_at = ? WHERE id = ?`,
|
||||
_, err = tx.Exec(`UPDATE users SET display_name = ?, email = ?, is_admin = ?, disabled = ?, password_hash = ?, updated_at = ? WHERE public_id = ?`,
|
||||
user.DisplayName, user.Email, user.IsAdmin, user.Disabled, passwordHash, now, user.ID)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
@@ -73,7 +73,7 @@ func (s *Store) UpdateUserWithPassword(user models.User, passwordHash string) er
|
||||
|
||||
func (s *Store) SetUserPassword(userID, passwordHash string) error {
|
||||
var err error
|
||||
_, err = s.DB.Exec(`UPDATE users SET password_hash = ?, updated_at = ? WHERE id = ?`, passwordHash, time.Now().UTC(), userID)
|
||||
_, err = s.DB.Exec(`UPDATE users SET password_hash = ?, updated_at = ? WHERE public_id = ?`, passwordHash, time.Now().UTC(), userID)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -83,7 +83,7 @@ func (s *Store) GetUserByID(id string) (models.User, error) {
|
||||
var created time.Time
|
||||
var updated time.Time
|
||||
var err error
|
||||
row = s.DB.QueryRow(`SELECT id, username, display_name, email, is_admin, disabled, auth_source, created_at, updated_at FROM users WHERE id = ?`, id)
|
||||
row = s.DB.QueryRow(`SELECT public_id, username, display_name, email, is_admin, disabled, auth_source, created_at, updated_at FROM users WHERE public_id = ?`, id)
|
||||
err = row.Scan(&user.ID, &user.Username, &user.DisplayName, &user.Email, &user.IsAdmin, &user.Disabled, &user.AuthSource, &created, &updated)
|
||||
if err != nil {
|
||||
return user, err
|
||||
@@ -100,7 +100,7 @@ func (s *Store) GetUserByUsername(username string) (models.User, string, error)
|
||||
var err error
|
||||
var created time.Time
|
||||
var updated time.Time
|
||||
row = s.DB.QueryRow(`SELECT id, username, display_name, email, is_admin, disabled, auth_source, password_hash, created_at, updated_at FROM users WHERE username = ?`, username)
|
||||
row = s.DB.QueryRow(`SELECT public_id, username, display_name, email, is_admin, disabled, auth_source, password_hash, created_at, updated_at FROM users WHERE username = ?`, username)
|
||||
err = row.Scan(&user.ID, &user.Username, &user.DisplayName, &user.Email, &user.IsAdmin, &user.Disabled, &user.AuthSource, &passwordHash, &created, &updated)
|
||||
if err != nil {
|
||||
return user, passwordHash.String, err
|
||||
@@ -117,7 +117,7 @@ func (s *Store) ListUsers() ([]models.User, error) {
|
||||
var u models.User
|
||||
var created time.Time
|
||||
var updated time.Time
|
||||
rows, err = s.DB.Query(`SELECT id, username, display_name, email, is_admin, disabled, auth_source, created_at, updated_at FROM users ORDER BY username`)
|
||||
rows, err = s.DB.Query(`SELECT public_id, username, display_name, email, is_admin, disabled, auth_source, created_at, updated_at FROM users ORDER BY username`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -136,7 +136,7 @@ func (s *Store) ListUsers() ([]models.User, error) {
|
||||
|
||||
func (s *Store) DeleteUser(id string) error {
|
||||
var err error
|
||||
_, err = s.DB.Exec(`DELETE FROM users WHERE id = ?`, id)
|
||||
_, err = s.DB.Exec(`DELETE FROM users WHERE public_id = ?`, id)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -358,11 +358,169 @@ func (s *Store) SetAuthSettings(settings models.AuthSettings) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Store) GetTLSSettings() (models.TLSSettings, error) {
|
||||
var settings models.TLSSettings
|
||||
var rows *sql.Rows
|
||||
var err error
|
||||
var key string
|
||||
var value string
|
||||
rows, err = s.DB.Query(`SELECT key, value FROM app_settings WHERE key IN (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"tls.http_addrs",
|
||||
"tls.https_addrs",
|
||||
"tls.server_cert_source",
|
||||
"tls.cert_file",
|
||||
"tls.key_file",
|
||||
"tls.pki_server_cert_id",
|
||||
"tls.client_auth",
|
||||
"tls.client_ca_file",
|
||||
"tls.pki_client_ca_id",
|
||||
"tls.min_version")
|
||||
if err != nil {
|
||||
return settings, err
|
||||
}
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
err = rows.Scan(&key, &value)
|
||||
if err != nil {
|
||||
return settings, err
|
||||
}
|
||||
switch key {
|
||||
case "tls.http_addrs":
|
||||
settings.HTTPAddrs = splitCSVValue(value)
|
||||
case "tls.https_addrs":
|
||||
settings.HTTPSAddrs = splitCSVValue(value)
|
||||
case "tls.server_cert_source":
|
||||
settings.TLSServerCertSource = value
|
||||
case "tls.cert_file":
|
||||
settings.TLSCertFile = value
|
||||
case "tls.key_file":
|
||||
settings.TLSKeyFile = value
|
||||
case "tls.pki_server_cert_id":
|
||||
settings.TLSPKIServerCertID = value
|
||||
case "tls.client_auth":
|
||||
settings.TLSClientAuth = value
|
||||
case "tls.client_ca_file":
|
||||
settings.TLSClientCAFile = value
|
||||
case "tls.pki_client_ca_id":
|
||||
settings.TLSPKIClientCAID = value
|
||||
case "tls.min_version":
|
||||
settings.TLSMinVersion = value
|
||||
}
|
||||
}
|
||||
err = rows.Err()
|
||||
if err != nil {
|
||||
return settings, err
|
||||
}
|
||||
return settings, nil
|
||||
}
|
||||
|
||||
func (s *Store) SetTLSSettings(settings models.TLSSettings) error {
|
||||
var tx *sql.Tx
|
||||
var err error
|
||||
var now int64
|
||||
tx, err = s.DB.Begin()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
now = time.Now().UTC().Unix()
|
||||
_, err = tx.Exec(`INSERT INTO app_settings (key, value, updated_at) VALUES (?, ?, ?)
|
||||
ON CONFLICT(key) DO UPDATE SET value=excluded.value, updated_at=excluded.updated_at`,
|
||||
"tls.http_addrs", strings.Join(settings.HTTPAddrs, ","), now)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
_, err = tx.Exec(`INSERT INTO app_settings (key, value, updated_at) VALUES (?, ?, ?)
|
||||
ON CONFLICT(key) DO UPDATE SET value=excluded.value, updated_at=excluded.updated_at`,
|
||||
"tls.https_addrs", strings.Join(settings.HTTPSAddrs, ","), now)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
_, err = tx.Exec(`INSERT INTO app_settings (key, value, updated_at) VALUES (?, ?, ?)
|
||||
ON CONFLICT(key) DO UPDATE SET value=excluded.value, updated_at=excluded.updated_at`,
|
||||
"tls.server_cert_source", settings.TLSServerCertSource, now)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
_, err = tx.Exec(`INSERT INTO app_settings (key, value, updated_at) VALUES (?, ?, ?)
|
||||
ON CONFLICT(key) DO UPDATE SET value=excluded.value, updated_at=excluded.updated_at`,
|
||||
"tls.cert_file", settings.TLSCertFile, now)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
_, err = tx.Exec(`INSERT INTO app_settings (key, value, updated_at) VALUES (?, ?, ?)
|
||||
ON CONFLICT(key) DO UPDATE SET value=excluded.value, updated_at=excluded.updated_at`,
|
||||
"tls.key_file", settings.TLSKeyFile, now)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
_, err = tx.Exec(`INSERT INTO app_settings (key, value, updated_at) VALUES (?, ?, ?)
|
||||
ON CONFLICT(key) DO UPDATE SET value=excluded.value, updated_at=excluded.updated_at`,
|
||||
"tls.pki_server_cert_id", settings.TLSPKIServerCertID, now)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
_, err = tx.Exec(`INSERT INTO app_settings (key, value, updated_at) VALUES (?, ?, ?)
|
||||
ON CONFLICT(key) DO UPDATE SET value=excluded.value, updated_at=excluded.updated_at`,
|
||||
"tls.client_auth", settings.TLSClientAuth, now)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
_, err = tx.Exec(`INSERT INTO app_settings (key, value, updated_at) VALUES (?, ?, ?)
|
||||
ON CONFLICT(key) DO UPDATE SET value=excluded.value, updated_at=excluded.updated_at`,
|
||||
"tls.client_ca_file", settings.TLSClientCAFile, now)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
_, err = tx.Exec(`INSERT INTO app_settings (key, value, updated_at) VALUES (?, ?, ?)
|
||||
ON CONFLICT(key) DO UPDATE SET value=excluded.value, updated_at=excluded.updated_at`,
|
||||
"tls.pki_client_ca_id", settings.TLSPKIClientCAID, now)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
_, err = tx.Exec(`INSERT INTO app_settings (key, value, updated_at) VALUES (?, ?, ?)
|
||||
ON CONFLICT(key) DO UPDATE SET value=excluded.value, updated_at=excluded.updated_at`,
|
||||
"tls.min_version", settings.TLSMinVersion, now)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func splitCSVValue(value string) []string {
|
||||
var parts []string
|
||||
var out []string
|
||||
var i int
|
||||
var p string
|
||||
parts = strings.Split(value, ",")
|
||||
for i = 0; i < len(parts); i++ {
|
||||
p = strings.TrimSpace(parts[i])
|
||||
if p == "" {
|
||||
continue
|
||||
}
|
||||
out = append(out, p)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func (s *Store) SetUserDisabled(id string, disabled bool) error {
|
||||
var err error
|
||||
var now time.Time
|
||||
now = time.Now().UTC()
|
||||
_, err = s.DB.Exec(`UPDATE users SET disabled = ?, updated_at = ? WHERE id = ?`, disabled, now, id)
|
||||
_, err = s.DB.Exec(`UPDATE users SET disabled = ?, updated_at = ? WHERE public_id = ?`, disabled, now, id)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -388,7 +546,8 @@ func (s *Store) CreateAPIKey(userID string, name string, tokenHash string, prefi
|
||||
ExpiresAt: expiresAt,
|
||||
Disabled: false,
|
||||
}
|
||||
_, err = s.DB.Exec(`INSERT INTO api_keys (id, user_id, name, token_hash, token_prefix, created_at, last_used_at, expires_at, disabled) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
_, err = s.DB.Exec(`INSERT INTO api_keys (public_id, user_id, name, token_hash, token_prefix, created_at, last_used_at, expires_at, disabled)
|
||||
VALUES (?, (SELECT id FROM users WHERE public_id = ?), ?, ?, ?, ?, ?, ?, ?)`,
|
||||
key.ID, key.UserID, key.Name, tokenHash, key.Prefix, key.CreatedAt, key.LastUsedAt, key.ExpiresAt, key.Disabled)
|
||||
return key, err
|
||||
}
|
||||
@@ -398,7 +557,11 @@ func (s *Store) ListAPIKeys(userID string) ([]models.APIKey, error) {
|
||||
var err error
|
||||
var keys []models.APIKey
|
||||
var key models.APIKey
|
||||
rows, err = s.DB.Query(`SELECT id, user_id, name, token_prefix, created_at, last_used_at, expires_at, disabled FROM api_keys WHERE user_id = ? ORDER BY created_at DESC`, userID)
|
||||
rows, err = s.DB.Query(`SELECT k.public_id, u.public_id, k.name, k.token_prefix, k.created_at, k.last_used_at, k.expires_at, k.disabled
|
||||
FROM api_keys k
|
||||
JOIN users u ON u.id = k.user_id
|
||||
WHERE u.public_id = ?
|
||||
ORDER BY k.created_at DESC`, userID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -415,25 +578,25 @@ func (s *Store) ListAPIKeys(userID string) ([]models.APIKey, error) {
|
||||
|
||||
func (s *Store) DeleteAPIKey(userID string, id string) error {
|
||||
var err error
|
||||
_, err = s.DB.Exec(`DELETE FROM api_keys WHERE id = ? AND user_id = ?`, id, userID)
|
||||
_, err = s.DB.Exec(`DELETE FROM api_keys WHERE public_id = ? AND user_id = (SELECT id FROM users WHERE public_id = ?)`, id, userID)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) SetAPIKeyDisabled(userID string, id string, disabled bool) error {
|
||||
var err error
|
||||
_, err = s.DB.Exec(`UPDATE api_keys SET disabled = ? WHERE id = ? AND user_id = ?`, disabled, id, userID)
|
||||
_, err = s.DB.Exec(`UPDATE api_keys SET disabled = ? WHERE public_id = ? AND user_id = (SELECT id FROM users WHERE public_id = ?)`, disabled, id, userID)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) DeleteAPIKeyByID(id string) error {
|
||||
var err error
|
||||
_, err = s.DB.Exec(`DELETE FROM api_keys WHERE id = ?`, id)
|
||||
_, err = s.DB.Exec(`DELETE FROM api_keys WHERE public_id = ?`, id)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) SetAPIKeyDisabledByID(id string, disabled bool) error {
|
||||
var err error
|
||||
_, err = s.DB.Exec(`UPDATE api_keys SET disabled = ? WHERE id = ?`, disabled, id)
|
||||
_, err = s.DB.Exec(`UPDATE api_keys SET disabled = ? WHERE public_id = ?`, disabled, id)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -449,11 +612,11 @@ func (s *Store) ListAPIKeysAdmin(userID string, query string) ([]models.AdminAPI
|
||||
if userID != "" && query != "" {
|
||||
sqlQuery = `
|
||||
SELECT
|
||||
k.id, k.user_id, u.username, u.display_name, u.email,
|
||||
k.public_id, u.public_id, u.username, u.display_name, u.email,
|
||||
k.name, k.token_prefix, k.created_at, k.last_used_at, k.expires_at, k.disabled
|
||||
FROM api_keys k
|
||||
JOIN users u ON u.id = k.user_id
|
||||
WHERE k.user_id = ?
|
||||
WHERE u.public_id = ?
|
||||
AND (k.name LIKE ? OR k.token_prefix LIKE ? OR u.username LIKE ? OR u.display_name LIKE ? OR u.email LIKE ?)
|
||||
ORDER BY k.created_at DESC
|
||||
`
|
||||
@@ -461,18 +624,18 @@ func (s *Store) ListAPIKeysAdmin(userID string, query string) ([]models.AdminAPI
|
||||
} else if userID != "" {
|
||||
sqlQuery = `
|
||||
SELECT
|
||||
k.id, k.user_id, u.username, u.display_name, u.email,
|
||||
k.public_id, u.public_id, u.username, u.display_name, u.email,
|
||||
k.name, k.token_prefix, k.created_at, k.last_used_at, k.expires_at, k.disabled
|
||||
FROM api_keys k
|
||||
JOIN users u ON u.id = k.user_id
|
||||
WHERE k.user_id = ?
|
||||
WHERE u.public_id = ?
|
||||
ORDER BY k.created_at DESC
|
||||
`
|
||||
rows, err = s.DB.Query(sqlQuery, userID)
|
||||
} else if query != "" {
|
||||
sqlQuery = `
|
||||
SELECT
|
||||
k.id, k.user_id, u.username, u.display_name, u.email,
|
||||
k.public_id, u.public_id, u.username, u.display_name, u.email,
|
||||
k.name, k.token_prefix, k.created_at, k.last_used_at, k.expires_at, k.disabled
|
||||
FROM api_keys k
|
||||
JOIN users u ON u.id = k.user_id
|
||||
@@ -483,7 +646,7 @@ func (s *Store) ListAPIKeysAdmin(userID string, query string) ([]models.AdminAPI
|
||||
} else {
|
||||
sqlQuery = `
|
||||
SELECT
|
||||
k.id, k.user_id, u.username, u.display_name, u.email,
|
||||
k.public_id, u.public_id, u.username, u.display_name, u.email,
|
||||
k.name, k.token_prefix, k.created_at, k.last_used_at, k.expires_at, k.disabled
|
||||
FROM api_keys k
|
||||
JOIN users u ON u.id = k.user_id
|
||||
@@ -522,7 +685,7 @@ func (s *Store) GetUserByAPIKeyHash(tokenHash string) (models.User, error) {
|
||||
var row *sql.Row
|
||||
var created time.Time
|
||||
var updated time.Time
|
||||
var keyID string
|
||||
var keyID int64
|
||||
var now time.Time
|
||||
var nowUnix int64
|
||||
var err error
|
||||
@@ -530,7 +693,7 @@ func (s *Store) GetUserByAPIKeyHash(tokenHash string) (models.User, error) {
|
||||
now = time.Now().UTC()
|
||||
currentUnix = now.Unix()
|
||||
row = s.DB.QueryRow(`
|
||||
SELECT u.id, u.username, u.display_name, u.email, u.is_admin, u.disabled, u.auth_source, u.created_at, u.updated_at, k.id
|
||||
SELECT u.public_id, u.username, u.display_name, u.email, u.is_admin, u.disabled, u.auth_source, u.created_at, u.updated_at, k.id
|
||||
FROM api_keys k
|
||||
JOIN users u ON u.id = k.user_id
|
||||
WHERE k.token_hash = ?
|
||||
@@ -553,7 +716,8 @@ func (s *Store) GetUserByAPIKeyHash(tokenHash string) (models.User, error) {
|
||||
|
||||
func (s *Store) CreateSession(userID, token string, expiresAt time.Time) error {
|
||||
var err error
|
||||
_, err = s.DB.Exec(`INSERT INTO sessions (id, user_id, token, expires_at, created_at) VALUES (?, ?, ?, ?, ?)`,
|
||||
_, err = s.DB.Exec(`INSERT INTO sessions (id, user_id, token, expires_at, created_at)
|
||||
VALUES (?, (SELECT id FROM users WHERE public_id = ?), ?, ?, ?)`,
|
||||
mustID(), userID, token, expiresAt, time.Now().UTC())
|
||||
return err
|
||||
}
|
||||
@@ -572,7 +736,7 @@ func (s *Store) GetSessionUser(token string) (models.User, time.Time, error) {
|
||||
var created time.Time
|
||||
var updated time.Time
|
||||
row = s.DB.QueryRow(`
|
||||
SELECT u.id, u.username, u.display_name, u.email, u.is_admin, u.disabled, u.auth_source, u.created_at, u.updated_at, s.expires_at
|
||||
SELECT u.public_id, u.username, u.display_name, u.email, u.is_admin, u.disabled, u.auth_source, u.created_at, u.updated_at, s.expires_at
|
||||
FROM sessions s JOIN users u ON u.id = s.user_id
|
||||
WHERE s.token = ? AND u.disabled = 0
|
||||
`, token)
|
||||
@@ -612,8 +776,8 @@ func (s *Store) CreateProject(project models.Project) (models.Project, error) {
|
||||
if project.HomePage == "" {
|
||||
project.HomePage = "info"
|
||||
}
|
||||
_, err = tx.Exec(`INSERT INTO projects (id, slug, name, description, home_page, created_by, updated_by, created_at, updated_at, created_at_unix, updated_at_unix)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
_, err = tx.Exec(`INSERT INTO projects (public_id, slug, name, description, home_page, created_by, updated_by, created_at, updated_at, created_at_unix, updated_at_unix)
|
||||
VALUES (?, ?, ?, ?, ?, (SELECT id FROM users WHERE public_id = ?), (SELECT id FROM users WHERE public_id = ?), ?, ?, ?, ?)`,
|
||||
project.ID,
|
||||
project.Slug,
|
||||
project.Name,
|
||||
@@ -630,7 +794,8 @@ func (s *Store) CreateProject(project models.Project) (models.Project, error) {
|
||||
_ = tx.Rollback()
|
||||
return project, err
|
||||
}
|
||||
_, err = tx.Exec(`INSERT INTO project_members (project_id, user_id, role, created_at) VALUES (?, ?, ?, ?)`,
|
||||
_, err = tx.Exec(`INSERT INTO project_members (project_id, user_id, role, created_at)
|
||||
VALUES ((SELECT id FROM projects WHERE public_id = ?), (SELECT id FROM users WHERE public_id = ?), ?, ?)`,
|
||||
project.ID, project.CreatedBy, "admin", now)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
@@ -649,7 +814,7 @@ func (s *Store) UpdateProject(project models.Project) error {
|
||||
if project.HomePage == "" {
|
||||
project.HomePage = "info"
|
||||
}
|
||||
_, err = s.DB.Exec(`UPDATE projects SET slug = ?, name = ?, description = ?, home_page = ?, updated_at = ?, updated_by = ?, updated_at_unix = ? WHERE id = ?`,
|
||||
_, err = s.DB.Exec(`UPDATE projects SET slug = ?, name = ?, description = ?, home_page = ?, updated_at = ?, updated_by = (SELECT id FROM users WHERE public_id = ?), updated_at_unix = ? WHERE public_id = ?`,
|
||||
project.Slug,
|
||||
project.Name,
|
||||
project.Description,
|
||||
@@ -666,14 +831,14 @@ func (s *Store) GetProject(id string) (models.Project, error) {
|
||||
var project models.Project
|
||||
var row *sql.Row
|
||||
row = s.DB.QueryRow(`
|
||||
SELECT p.id, p.slug, p.name, p.description, p.home_page,
|
||||
p.created_by, p.updated_by,
|
||||
SELECT p.public_id, p.slug, p.name, p.description, p.home_page,
|
||||
c.public_id, u.public_id,
|
||||
COALESCE(c.username, ''), COALESCE(u.username, ''),
|
||||
p.created_at_unix, p.updated_at_unix
|
||||
FROM projects p
|
||||
LEFT JOIN users c ON c.id = p.created_by
|
||||
LEFT JOIN users u ON u.id = p.updated_by
|
||||
WHERE p.id = ?
|
||||
WHERE p.public_id = ?
|
||||
`, id)
|
||||
return project, row.Scan(
|
||||
&project.ID,
|
||||
@@ -695,8 +860,8 @@ func (s *Store) GetProjectBySlug(slug string) (models.Project, error) {
|
||||
var row *sql.Row
|
||||
var err error
|
||||
row = s.DB.QueryRow(`
|
||||
SELECT p.id, p.slug, p.name, p.description, p.home_page,
|
||||
p.created_by, p.updated_by,
|
||||
SELECT p.public_id, p.slug, p.name, p.description, p.home_page,
|
||||
c.public_id, u.public_id,
|
||||
COALESCE(c.username, ''), COALESCE(u.username, ''),
|
||||
p.created_at_unix, p.updated_at_unix
|
||||
FROM projects p
|
||||
@@ -729,8 +894,8 @@ func (s *Store) ListProjects() ([]models.Project, error) {
|
||||
var projects []models.Project
|
||||
var p models.Project
|
||||
rows, err = s.DB.Query(`
|
||||
SELECT p.id, p.slug, p.name, p.description, p.home_page,
|
||||
p.created_by, p.updated_by,
|
||||
SELECT p.public_id, p.slug, p.name, p.description, p.home_page,
|
||||
c.public_id, u.public_id,
|
||||
COALESCE(c.username, ''), COALESCE(u.username, ''),
|
||||
p.created_at_unix, p.updated_at_unix
|
||||
FROM projects p
|
||||
@@ -770,15 +935,15 @@ func (s *Store) ListProjectsForUser(userID string) ([]models.Project, error) {
|
||||
var projects []models.Project
|
||||
var p models.Project
|
||||
rows, err = s.DB.Query(`
|
||||
SELECT p.id, p.slug, p.name, p.description, p.home_page,
|
||||
p.created_by, p.updated_by,
|
||||
SELECT p.public_id, p.slug, p.name, p.description, p.home_page,
|
||||
c.public_id, u.public_id,
|
||||
COALESCE(c.username, ''), COALESCE(u.username, ''),
|
||||
p.created_at_unix, p.updated_at_unix
|
||||
FROM projects p
|
||||
JOIN project_members m ON m.project_id = p.id
|
||||
LEFT JOIN users c ON c.id = p.created_by
|
||||
LEFT JOIN users u ON u.id = p.updated_by
|
||||
WHERE m.user_id = ?
|
||||
WHERE m.user_id = (SELECT id FROM users WHERE public_id = ?)
|
||||
ORDER BY p.name
|
||||
`, userID)
|
||||
if err != nil {
|
||||
@@ -820,8 +985,8 @@ func (s *Store) ListProjectsFiltered(limit int, offset int, query string) ([]mod
|
||||
}
|
||||
if query == "" {
|
||||
rows, err = s.DB.Query(
|
||||
`SELECT p.id, p.slug, p.name, p.description, p.home_page,
|
||||
p.created_by, p.updated_by,
|
||||
`SELECT p.public_id, p.slug, p.name, p.description, p.home_page,
|
||||
c.public_id, u.public_id,
|
||||
COALESCE(c.username, ''), COALESCE(u.username, ''),
|
||||
p.created_at_unix, p.updated_at_unix
|
||||
FROM projects p
|
||||
@@ -833,8 +998,8 @@ func (s *Store) ListProjectsFiltered(limit int, offset int, query string) ([]mod
|
||||
)
|
||||
} else {
|
||||
rows, err = s.DB.Query(
|
||||
`SELECT p.id, p.slug, p.name, p.description, p.home_page,
|
||||
p.created_by, p.updated_by,
|
||||
`SELECT p.public_id, p.slug, p.name, p.description, p.home_page,
|
||||
c.public_id, u.public_id,
|
||||
COALESCE(c.username, ''), COALESCE(u.username, ''),
|
||||
p.created_at_unix, p.updated_at_unix
|
||||
FROM projects p
|
||||
@@ -887,15 +1052,15 @@ func (s *Store) ListProjectsFilteredForUser(userID string, limit int, offset int
|
||||
}
|
||||
if query == "" {
|
||||
rows, err = s.DB.Query(
|
||||
`SELECT p.id, p.slug, p.name, p.description, p.home_page,
|
||||
p.created_by, p.updated_by,
|
||||
`SELECT p.public_id, p.slug, p.name, p.description, p.home_page,
|
||||
c.public_id, u.public_id,
|
||||
COALESCE(c.username, ''), COALESCE(u.username, ''),
|
||||
p.created_at_unix, p.updated_at_unix
|
||||
FROM projects p
|
||||
JOIN project_members m ON m.project_id = p.id
|
||||
LEFT JOIN users c ON c.id = p.created_by
|
||||
LEFT JOIN users u ON u.id = p.updated_by
|
||||
WHERE m.user_id = ?
|
||||
WHERE m.user_id = (SELECT id FROM users WHERE public_id = ?)
|
||||
ORDER BY p.name LIMIT ? OFFSET ?`,
|
||||
userID,
|
||||
limit,
|
||||
@@ -903,15 +1068,15 @@ func (s *Store) ListProjectsFilteredForUser(userID string, limit int, offset int
|
||||
)
|
||||
} else {
|
||||
rows, err = s.DB.Query(
|
||||
`SELECT p.id, p.slug, p.name, p.description, p.home_page,
|
||||
p.created_by, p.updated_by,
|
||||
`SELECT p.public_id, p.slug, p.name, p.description, p.home_page,
|
||||
c.public_id, u.public_id,
|
||||
COALESCE(c.username, ''), COALESCE(u.username, ''),
|
||||
p.created_at_unix, p.updated_at_unix
|
||||
FROM projects p
|
||||
JOIN project_members m ON m.project_id = p.id
|
||||
LEFT JOIN users c ON c.id = p.created_by
|
||||
LEFT JOIN users u ON u.id = p.updated_by
|
||||
WHERE m.user_id = ? AND (p.name LIKE ? OR p.slug LIKE ?)
|
||||
WHERE m.user_id = (SELECT id FROM users WHERE public_id = ?) AND (p.name LIKE ? OR p.slug LIKE ?)
|
||||
ORDER BY p.name LIMIT ? OFFSET ?`,
|
||||
userID,
|
||||
"%"+query+"%",
|
||||
@@ -948,7 +1113,7 @@ func (s *Store) ListProjectsFilteredForUser(userID string, limit int, offset int
|
||||
|
||||
func (s *Store) DeleteProject(id string) error {
|
||||
var err error
|
||||
_, err = s.DB.Exec(`DELETE FROM projects WHERE id = ?`, id)
|
||||
_, err = s.DB.Exec(`DELETE FROM projects WHERE public_id = ?`, id)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -958,20 +1123,25 @@ func (s *Store) AddProjectMember(projectID, userID, role string) (models.Project
|
||||
var now time.Time
|
||||
now = time.Now().UTC()
|
||||
member = models.ProjectMember{ProjectID: projectID, UserID: userID, Role: role, CreatedAt: now.Unix()}
|
||||
_, err = s.DB.Exec(`INSERT INTO project_members (project_id, user_id, role, created_at) VALUES (?, ?, ?, ?)`,
|
||||
_, err = s.DB.Exec(`INSERT INTO project_members (project_id, user_id, role, created_at)
|
||||
VALUES ((SELECT id FROM projects WHERE public_id = ?), (SELECT id FROM users WHERE public_id = ?), ?, ?)`,
|
||||
member.ProjectID, member.UserID, member.Role, now)
|
||||
return member, err
|
||||
}
|
||||
|
||||
func (s *Store) UpdateProjectMemberRole(projectID, userID, role string) error {
|
||||
var err error
|
||||
_, err = s.DB.Exec(`UPDATE project_members SET role = ? WHERE project_id = ? AND user_id = ?`, role, projectID, userID)
|
||||
_, err = s.DB.Exec(`UPDATE project_members SET role = ?
|
||||
WHERE project_id = (SELECT id FROM projects WHERE public_id = ?)
|
||||
AND user_id = (SELECT id FROM users WHERE public_id = ?)`, role, projectID, userID)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) RemoveProjectMember(projectID, userID string) error {
|
||||
var err error
|
||||
_, err = s.DB.Exec(`DELETE FROM project_members WHERE project_id = ? AND user_id = ?`, projectID, userID)
|
||||
_, err = s.DB.Exec(`DELETE FROM project_members
|
||||
WHERE project_id = (SELECT id FROM projects WHERE public_id = ?)
|
||||
AND user_id = (SELECT id FROM users WHERE public_id = ?)`, projectID, userID)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -981,7 +1151,12 @@ func (s *Store) ListProjectMembers(projectID string) ([]models.ProjectMember, er
|
||||
var members []models.ProjectMember
|
||||
var m models.ProjectMember
|
||||
var created time.Time
|
||||
rows, err = s.DB.Query(`SELECT project_id, user_id, role, created_at FROM project_members WHERE project_id = ? ORDER BY role`, projectID)
|
||||
rows, err = s.DB.Query(`SELECT p.public_id, u.public_id, m.role, m.created_at
|
||||
FROM project_members m
|
||||
JOIN projects p ON p.id = m.project_id
|
||||
JOIN users u ON u.id = m.user_id
|
||||
WHERE m.project_id = (SELECT id FROM projects WHERE public_id = ?)
|
||||
ORDER BY m.role`, projectID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -1001,7 +1176,9 @@ func (s *Store) GetProjectMemberRole(projectID, userID string) (string, error) {
|
||||
var role string
|
||||
var row *sql.Row
|
||||
var err error
|
||||
row = s.DB.QueryRow(`SELECT role FROM project_members WHERE project_id = ? AND user_id = ?`, projectID, userID)
|
||||
row = s.DB.QueryRow(`SELECT role FROM project_members
|
||||
WHERE project_id = (SELECT id FROM projects WHERE public_id = ?)
|
||||
AND user_id = (SELECT id FROM users WHERE public_id = ?)`, projectID, userID)
|
||||
err = row.Scan(&role)
|
||||
if err != nil {
|
||||
return "", err
|
||||
@@ -1024,7 +1201,8 @@ func (s *Store) CreateRepo(repo models.Repo) (models.Repo, error) {
|
||||
now = time.Now().UTC()
|
||||
nowUnix = now.Unix()
|
||||
repo.CreatedAt = nowUnix
|
||||
_, err = s.DB.Exec(`INSERT INTO repos (id, project_id, name, type, path, created_by, created_at) VALUES (?, ?, ?, ?, ?, ?, ?)`,
|
||||
_, err = s.DB.Exec(`INSERT INTO repos (public_id, project_id, name, type, path, created_by, created_at)
|
||||
VALUES (?, (SELECT id FROM projects WHERE public_id = ?), ?, ?, ?, (SELECT id FROM users WHERE public_id = ?), ?)`,
|
||||
repo.ID, repo.ProjectID, repo.Name, repo.Type, repo.Path, repo.CreatedBy, now)
|
||||
return repo, err
|
||||
}
|
||||
@@ -1033,7 +1211,9 @@ func (s *Store) RepoNameExists(projectID string, name string, repoType string) (
|
||||
var count int
|
||||
var row *sql.Row
|
||||
var err error
|
||||
row = s.DB.QueryRow(`SELECT COUNT(1) FROM repos WHERE project_id = ? AND name = ? AND type = ?`, projectID, name, repoType)
|
||||
row = s.DB.QueryRow(`SELECT COUNT(1) FROM repos
|
||||
WHERE project_id = (SELECT id FROM projects WHERE public_id = ?)
|
||||
AND name = ? AND type = ?`, projectID, name, repoType)
|
||||
err = row.Scan(&count)
|
||||
if err != nil {
|
||||
return false, err
|
||||
@@ -1046,7 +1226,11 @@ func (s *Store) GetRepo(id string) (models.Repo, error) {
|
||||
var row *sql.Row
|
||||
var created time.Time
|
||||
var err error
|
||||
row = s.DB.QueryRow(`SELECT id, project_id, name, type, path, created_by, created_at FROM repos WHERE id = ?`, id)
|
||||
row = s.DB.QueryRow(`SELECT r.public_id, p.public_id, r.name, r.type, r.path, u.public_id, r.created_at
|
||||
FROM repos r
|
||||
JOIN projects p ON p.id = r.project_id
|
||||
JOIN users u ON u.id = r.created_by
|
||||
WHERE r.public_id = ?`, id)
|
||||
err = row.Scan(&repo.ID, &repo.ProjectID, &repo.Name, &repo.Type, &repo.Path, &repo.CreatedBy, &created)
|
||||
if err != nil {
|
||||
return repo, err
|
||||
@@ -1055,12 +1239,33 @@ func (s *Store) GetRepo(id string) (models.Repo, error) {
|
||||
return repo, nil
|
||||
}
|
||||
|
||||
func (s *Store) GetRepoStorageIDs(id string) (int64, int64, error) {
|
||||
var row *sql.Row
|
||||
var projectID int64
|
||||
var repoID int64
|
||||
var err error
|
||||
row = s.DB.QueryRow(`SELECT p.id, r.id
|
||||
FROM repos r
|
||||
JOIN projects p ON p.id = r.project_id
|
||||
WHERE r.public_id = ?`, id)
|
||||
err = row.Scan(&projectID, &repoID)
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
return projectID, repoID, nil
|
||||
}
|
||||
|
||||
func (s *Store) GetRepoByProjectNameType(projectID string, name string, repoType string) (models.Repo, error) {
|
||||
var repo models.Repo
|
||||
var row *sql.Row
|
||||
var err error
|
||||
var created time.Time
|
||||
row = s.DB.QueryRow(`SELECT id, project_id, name, type, path, created_by, created_at FROM repos WHERE project_id = ? AND name = ? AND type = ?`,
|
||||
row = s.DB.QueryRow(`SELECT r.public_id, p.public_id, r.name, r.type, r.path, u.public_id, r.created_at
|
||||
FROM repos r
|
||||
JOIN projects p ON p.id = r.project_id
|
||||
JOIN users u ON u.id = r.created_by
|
||||
WHERE r.project_id = (SELECT id FROM projects WHERE public_id = ?)
|
||||
AND r.name = ? AND r.type = ?`,
|
||||
projectID, name, repoType)
|
||||
err = row.Scan(&repo.ID, &repo.ProjectID, &repo.Name, &repo.Type, &repo.Path, &repo.CreatedBy, &created)
|
||||
if err != nil {
|
||||
@@ -1078,15 +1283,20 @@ func (s *Store) ListRepos(projectID string) ([]models.Repo, error) {
|
||||
var isForeign int
|
||||
var created time.Time
|
||||
rows, err = s.DB.Query(`
|
||||
SELECT id, project_id, name, type, path, created_by, created_at, 0 AS is_foreign
|
||||
FROM repos
|
||||
WHERE project_id = ?
|
||||
UNION ALL
|
||||
SELECT r.id, r.project_id, r.name, r.type, r.path, r.created_by, r.created_at, 1 AS is_foreign
|
||||
SELECT r.public_id, p.public_id, r.name, r.type, r.path, u.public_id, r.created_at, 0 AS is_foreign
|
||||
FROM repos r
|
||||
JOIN projects p ON p.id = r.project_id
|
||||
JOIN users u ON u.id = r.created_by
|
||||
WHERE r.project_id = (SELECT id FROM projects WHERE public_id = ?)
|
||||
UNION ALL
|
||||
SELECT r.public_id, p.public_id, r.name, r.type, r.path, u.public_id, r.created_at, 1 AS is_foreign
|
||||
FROM repos r
|
||||
JOIN projects p ON p.id = r.project_id
|
||||
JOIN users u ON u.id = r.created_by
|
||||
JOIN project_repos pr ON pr.repo_id = r.id
|
||||
WHERE pr.project_id = ? AND r.project_id <> ?
|
||||
ORDER BY name
|
||||
WHERE pr.project_id = (SELECT id FROM projects WHERE public_id = ?)
|
||||
AND r.project_id <> (SELECT id FROM projects WHERE public_id = ?)
|
||||
ORDER BY 3
|
||||
`, projectID, projectID, projectID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -1111,10 +1321,12 @@ func (s *Store) ListReposOwned(projectID string) ([]models.Repo, error) {
|
||||
var r models.Repo
|
||||
var created time.Time
|
||||
rows, err = s.DB.Query(`
|
||||
SELECT id, project_id, name, type, path, created_by, created_at
|
||||
FROM repos
|
||||
WHERE project_id = ?
|
||||
ORDER BY name
|
||||
SELECT r.public_id, p.public_id, r.name, r.type, r.path, u.public_id, r.created_at
|
||||
FROM repos r
|
||||
JOIN projects p ON p.id = r.project_id
|
||||
JOIN users u ON u.id = r.created_by
|
||||
WHERE r.project_id = (SELECT id FROM projects WHERE public_id = ?)
|
||||
ORDER BY r.name
|
||||
`, projectID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -1138,9 +1350,11 @@ func (s *Store) ListAllRepos() ([]models.Repo, error) {
|
||||
var r models.Repo
|
||||
var created time.Time
|
||||
rows, err = s.DB.Query(`
|
||||
SELECT id, project_id, name, type, path, created_by, created_at
|
||||
FROM repos
|
||||
ORDER BY name
|
||||
SELECT r.public_id, p.public_id, r.name, r.type, r.path, u.public_id, r.created_at
|
||||
FROM repos r
|
||||
JOIN projects p ON p.id = r.project_id
|
||||
JOIN users u ON u.id = r.created_by
|
||||
ORDER BY r.name
|
||||
`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -1166,6 +1380,7 @@ func (s *Store) ListReposByProjectIDs(projectIDs []string) ([]models.Repo, error
|
||||
var placeholders []string
|
||||
var args []interface{}
|
||||
var i int
|
||||
var selected []interface{}
|
||||
if len(projectIDs) == 0 {
|
||||
return []models.Repo{}, nil
|
||||
}
|
||||
@@ -1173,13 +1388,19 @@ func (s *Store) ListReposByProjectIDs(projectIDs []string) ([]models.Repo, error
|
||||
args = make([]interface{}, len(projectIDs))
|
||||
for i = 0; i < len(projectIDs); i++ {
|
||||
placeholders[i] = "?"
|
||||
args[i] = projectIDs[i]
|
||||
selected = append(selected, projectIDs[i])
|
||||
}
|
||||
args = make([]interface{}, len(selected))
|
||||
for i = 0; i < len(selected); i++ {
|
||||
args[i] = selected[i]
|
||||
}
|
||||
rows, err = s.DB.Query(`
|
||||
SELECT id, project_id, name, type, path, created_by, created_at
|
||||
FROM repos
|
||||
WHERE project_id IN (`+strings.Join(placeholders, ",")+`)
|
||||
ORDER BY name
|
||||
SELECT r.public_id, p.public_id, r.name, r.type, r.path, u.public_id, r.created_at
|
||||
FROM repos r
|
||||
JOIN projects p ON p.id = r.project_id
|
||||
JOIN users u ON u.id = r.created_by
|
||||
WHERE p.public_id IN (`+strings.Join(placeholders, ",")+`)
|
||||
ORDER BY r.name
|
||||
`, args...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -1201,7 +1422,10 @@ func (s *Store) ListProjectIDsForUser(userID string) ([]string, error) {
|
||||
var err error
|
||||
var ids []string
|
||||
var id string
|
||||
rows, err = s.DB.Query(`SELECT project_id FROM project_members WHERE user_id = ?`, userID)
|
||||
rows, err = s.DB.Query(`SELECT p.public_id
|
||||
FROM project_members m
|
||||
JOIN projects p ON p.id = m.project_id
|
||||
WHERE m.user_id = (SELECT id FROM users WHERE public_id = ?)`, userID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -1218,26 +1442,29 @@ func (s *Store) ListProjectIDsForUser(userID string) ([]string, error) {
|
||||
|
||||
func (s *Store) UpdateRepo(repo models.Repo) error {
|
||||
var err error
|
||||
_, err = s.DB.Exec(`UPDATE repos SET name = ?, path = ? WHERE id = ?`, repo.Name, repo.Path, repo.ID)
|
||||
_, err = s.DB.Exec(`UPDATE repos SET name = ?, path = ? WHERE public_id = ?`, repo.Name, repo.Path, repo.ID)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) DeleteRepo(id string) error {
|
||||
var err error
|
||||
_, err = s.DB.Exec(`DELETE FROM repos WHERE id = ?`, id)
|
||||
_, err = s.DB.Exec(`DELETE FROM repos WHERE public_id = ?`, id)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) AttachRepoToProject(projectID string, repoID string) error {
|
||||
var err error
|
||||
_, err = s.DB.Exec(`INSERT INTO project_repos (project_id, repo_id, created_at) VALUES (?, ?, ?)`,
|
||||
_, err = s.DB.Exec(`INSERT INTO project_repos (project_id, repo_id, created_at)
|
||||
VALUES ((SELECT id FROM projects WHERE public_id = ?), (SELECT id FROM repos WHERE public_id = ?), ?)`,
|
||||
projectID, repoID, time.Now().UTC())
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) DetachRepoFromProject(projectID string, repoID string) error {
|
||||
var err error
|
||||
_, err = s.DB.Exec(`DELETE FROM project_repos WHERE project_id = ? AND repo_id = ?`, projectID, repoID)
|
||||
_, err = s.DB.Exec(`DELETE FROM project_repos
|
||||
WHERE project_id = (SELECT id FROM projects WHERE public_id = ?)
|
||||
AND repo_id = (SELECT id FROM repos WHERE public_id = ?)`, projectID, repoID)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -1247,9 +1474,12 @@ func (s *Store) GetRepoProjectIDs(repoID string) ([]string, error) {
|
||||
var ids []string
|
||||
var id string
|
||||
rows, err = s.DB.Query(`
|
||||
SELECT project_id FROM repos WHERE id = ?
|
||||
SELECT p.public_id FROM repos r JOIN projects p ON p.id = r.project_id WHERE r.public_id = ?
|
||||
UNION
|
||||
SELECT project_id FROM project_repos WHERE repo_id = ?
|
||||
SELECT p.public_id
|
||||
FROM project_repos pr
|
||||
JOIN projects p ON p.id = pr.project_id
|
||||
WHERE pr.repo_id = (SELECT id FROM repos WHERE public_id = ?)
|
||||
`, repoID, repoID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -1279,20 +1509,35 @@ func (s *Store) ListAvailableReposForProject(projectID string, query string, lim
|
||||
}
|
||||
if query == "" {
|
||||
rows, err = s.DB.Query(`
|
||||
SELECT id, project_id, name, type, path, created_by, created_at
|
||||
FROM repos
|
||||
WHERE project_id <> ? AND id NOT IN (SELECT repo_id FROM project_repos WHERE project_id = ?)
|
||||
ORDER BY name
|
||||
SELECT r.public_id, p.public_id, r.name, r.type, r.path, u.public_id, r.created_at
|
||||
FROM repos r
|
||||
JOIN projects p ON p.id = r.project_id
|
||||
JOIN users u ON u.id = r.created_by
|
||||
WHERE p.public_id <> ?
|
||||
AND r.id NOT IN (
|
||||
SELECT pr.repo_id
|
||||
FROM project_repos pr
|
||||
JOIN projects px ON px.id = pr.project_id
|
||||
WHERE px.public_id = ?
|
||||
)
|
||||
ORDER BY r.name
|
||||
LIMIT ? OFFSET ?
|
||||
`, projectID, projectID, limit, offset)
|
||||
} else {
|
||||
rows, err = s.DB.Query(`
|
||||
SELECT id, project_id, name, type, path, created_by, created_at
|
||||
FROM repos
|
||||
WHERE project_id <> ?
|
||||
AND id NOT IN (SELECT repo_id FROM project_repos WHERE project_id = ?)
|
||||
AND name LIKE ?
|
||||
ORDER BY name
|
||||
SELECT r.public_id, p.public_id, r.name, r.type, r.path, u.public_id, r.created_at
|
||||
FROM repos r
|
||||
JOIN projects p ON p.id = r.project_id
|
||||
JOIN users u ON u.id = r.created_by
|
||||
WHERE p.public_id <> ?
|
||||
AND r.id NOT IN (
|
||||
SELECT pr.repo_id
|
||||
FROM project_repos pr
|
||||
JOIN projects px ON px.id = pr.project_id
|
||||
WHERE px.public_id = ?
|
||||
)
|
||||
AND r.name LIKE ?
|
||||
ORDER BY r.name
|
||||
LIMIT ? OFFSET ?
|
||||
`, projectID, projectID, "%"+query+"%", limit, offset)
|
||||
}
|
||||
@@ -1332,9 +1577,9 @@ func (s *Store) CreateIssue(issue models.Issue) (models.Issue, error) {
|
||||
issue.Status = "open"
|
||||
}
|
||||
_, err = s.DB.Exec(`
|
||||
INSERT INTO issues (id, project_id, title, body, status, created_by, assignee_id, created_at, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`, issue.ID, issue.ProjectID, issue.Title, issue.Body, issue.Status, issue.CreatedBy, nullIfEmpty(issue.AssigneeID), now, now)
|
||||
INSERT INTO issues (public_id, project_id, title, body, status, created_by, assignee_id, created_at, updated_at)
|
||||
VALUES (?, (SELECT id FROM projects WHERE public_id = ?), ?, ?, ?, (SELECT id FROM users WHERE public_id = ?), (SELECT id FROM users WHERE public_id = ?), ?, ?)
|
||||
`, issue.ID, issue.ProjectID, issue.Title, issue.Body, issue.Status, issue.CreatedBy, issue.AssigneeID, now, now)
|
||||
return issue, err
|
||||
}
|
||||
|
||||
@@ -1345,8 +1590,8 @@ func (s *Store) UpdateIssue(issue models.Issue) error {
|
||||
now = time.Now().UTC()
|
||||
nowUnix = now.Unix()
|
||||
issue.UpdatedAt = nowUnix
|
||||
_, err = s.DB.Exec(`UPDATE issues SET title = ?, body = ?, status = ?, assignee_id = ?, updated_at = ? WHERE id = ?`,
|
||||
issue.Title, issue.Body, issue.Status, nullIfEmpty(issue.AssigneeID), now, issue.ID)
|
||||
_, err = s.DB.Exec(`UPDATE issues SET title = ?, body = ?, status = ?, assignee_id = (SELECT id FROM users WHERE public_id = ?), updated_at = ? WHERE public_id = ?`,
|
||||
issue.Title, issue.Body, issue.Status, issue.AssigneeID, now, issue.ID)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -1356,7 +1601,12 @@ func (s *Store) GetIssue(id string) (models.Issue, error) {
|
||||
var created time.Time
|
||||
var updated time.Time
|
||||
var err error
|
||||
row = s.DB.QueryRow(`SELECT id, project_id, title, body, status, created_by, COALESCE(assignee_id, ''), created_at, updated_at FROM issues WHERE id = ?`, id)
|
||||
row = s.DB.QueryRow(`SELECT i.public_id, p.public_id, i.title, i.body, i.status, cu.public_id, COALESCE(au.public_id, ''), i.created_at, i.updated_at
|
||||
FROM issues i
|
||||
JOIN projects p ON p.id = i.project_id
|
||||
JOIN users cu ON cu.id = i.created_by
|
||||
LEFT JOIN users au ON au.id = i.assignee_id
|
||||
WHERE i.public_id = ?`, id)
|
||||
err = row.Scan(&issue.ID, &issue.ProjectID, &issue.Title, &issue.Body, &issue.Status, &issue.CreatedBy, &issue.AssigneeID, &created, &updated)
|
||||
if err != nil {
|
||||
return issue, err
|
||||
@@ -1373,7 +1623,13 @@ func (s *Store) ListIssues(projectID string) ([]models.Issue, error) {
|
||||
var issue models.Issue
|
||||
var created time.Time
|
||||
var updated time.Time
|
||||
rows, err = s.DB.Query(`SELECT id, project_id, title, body, status, created_by, COALESCE(assignee_id, ''), created_at, updated_at FROM issues WHERE project_id = ? ORDER BY created_at DESC`, projectID)
|
||||
rows, err = s.DB.Query(`SELECT i.public_id, p.public_id, i.title, i.body, i.status, cu.public_id, COALESCE(au.public_id, ''), i.created_at, i.updated_at
|
||||
FROM issues i
|
||||
JOIN projects p ON p.id = i.project_id
|
||||
JOIN users cu ON cu.id = i.created_by
|
||||
LEFT JOIN users au ON au.id = i.assignee_id
|
||||
WHERE i.project_id = (SELECT id FROM projects WHERE public_id = ?)
|
||||
ORDER BY i.created_at DESC`, projectID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -1405,7 +1661,8 @@ func (s *Store) AddIssueComment(comment models.IssueComment) (models.IssueCommen
|
||||
now = time.Now().UTC()
|
||||
nowUnix = now.Unix()
|
||||
comment.CreatedAt = nowUnix
|
||||
_, err = s.DB.Exec(`INSERT INTO issue_comments (id, issue_id, body, created_by, created_at) VALUES (?, ?, ?, ?, ?)`,
|
||||
_, err = s.DB.Exec(`INSERT INTO issue_comments (public_id, issue_id, body, created_by, created_at)
|
||||
VALUES (?, (SELECT id FROM issues WHERE public_id = ?), ?, (SELECT id FROM users WHERE public_id = ?), ?)`,
|
||||
comment.ID, comment.IssueID, comment.Body, comment.CreatedBy, now)
|
||||
return comment, err
|
||||
}
|
||||
@@ -1425,7 +1682,8 @@ func (s *Store) CreateWikiPage(page models.WikiPage) (models.WikiPage, error) {
|
||||
now = time.Now().UTC()
|
||||
nowUnix = now.Unix()
|
||||
page.UpdatedAt = nowUnix
|
||||
_, err = s.DB.Exec(`INSERT INTO wiki_pages (id, project_id, title, slug, body, created_by, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?)`,
|
||||
_, err = s.DB.Exec(`INSERT INTO wiki_pages (public_id, project_id, title, slug, body, created_by, updated_at)
|
||||
VALUES (?, (SELECT id FROM projects WHERE public_id = ?), ?, ?, ?, (SELECT id FROM users WHERE public_id = ?), ?)`,
|
||||
page.ID, page.ProjectID, page.Title, page.Slug, page.Body, page.CreatedBy, now)
|
||||
return page, err
|
||||
}
|
||||
@@ -1437,7 +1695,7 @@ func (s *Store) UpdateWikiPage(page models.WikiPage) error {
|
||||
now = time.Now().UTC()
|
||||
nowUnix = now.Unix()
|
||||
page.UpdatedAt = nowUnix
|
||||
_, err = s.DB.Exec(`UPDATE wiki_pages SET title = ?, body = ?, updated_at = ? WHERE id = ?`, page.Title, page.Body, now, page.ID)
|
||||
_, err = s.DB.Exec(`UPDATE wiki_pages SET title = ?, body = ?, updated_at = ? WHERE public_id = ?`, page.Title, page.Body, now, page.ID)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -1447,7 +1705,12 @@ func (s *Store) ListWikiPages(projectID string) ([]models.WikiPage, error) {
|
||||
var pages []models.WikiPage
|
||||
var page models.WikiPage
|
||||
var updated time.Time
|
||||
rows, err = s.DB.Query(`SELECT id, project_id, title, slug, body, created_by, updated_at FROM wiki_pages WHERE project_id = ? ORDER BY title`, projectID)
|
||||
rows, err = s.DB.Query(`SELECT w.public_id, p.public_id, w.title, w.slug, w.body, u.public_id, w.updated_at
|
||||
FROM wiki_pages w
|
||||
JOIN projects p ON p.id = w.project_id
|
||||
JOIN users u ON u.id = w.created_by
|
||||
WHERE w.project_id = (SELECT id FROM projects WHERE public_id = ?)
|
||||
ORDER BY w.title`, projectID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -1468,7 +1731,11 @@ func (s *Store) GetWikiPage(id string) (models.WikiPage, error) {
|
||||
var row *sql.Row
|
||||
var updated time.Time
|
||||
var err error
|
||||
row = s.DB.QueryRow(`SELECT id, project_id, title, slug, body, created_by, updated_at FROM wiki_pages WHERE id = ?`, id)
|
||||
row = s.DB.QueryRow(`SELECT w.public_id, p.public_id, w.title, w.slug, w.body, u.public_id, w.updated_at
|
||||
FROM wiki_pages w
|
||||
JOIN projects p ON p.id = w.project_id
|
||||
JOIN users u ON u.id = w.created_by
|
||||
WHERE w.public_id = ?`, id)
|
||||
err = row.Scan(&page.ID, &page.ProjectID, &page.Title, &page.Slug, &page.Body, &page.CreatedBy, &updated)
|
||||
if err != nil {
|
||||
return page, err
|
||||
@@ -1492,7 +1759,8 @@ func (s *Store) CreateUpload(upload models.Upload) (models.Upload, error) {
|
||||
now = time.Now().UTC()
|
||||
nowUnix = now.Unix()
|
||||
upload.CreatedAt = nowUnix
|
||||
_, err = s.DB.Exec(`INSERT INTO uploads (id, project_id, filename, content_type, size, storage_path, created_by, created_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
_, err = s.DB.Exec(`INSERT INTO uploads (public_id, project_id, filename, content_type, size, storage_path, created_by, created_at)
|
||||
VALUES (?, (SELECT id FROM projects WHERE public_id = ?), ?, ?, ?, ?, (SELECT id FROM users WHERE public_id = ?), ?)`,
|
||||
upload.ID, upload.ProjectID, upload.Filename, upload.ContentType, upload.Size, upload.StoragePath, upload.CreatedBy, now)
|
||||
return upload, err
|
||||
}
|
||||
@@ -1502,7 +1770,11 @@ func (s *Store) GetUpload(id string) (models.Upload, error) {
|
||||
var row *sql.Row
|
||||
var created time.Time
|
||||
var err error
|
||||
row = s.DB.QueryRow(`SELECT id, project_id, filename, content_type, size, storage_path, created_by, created_at FROM uploads WHERE id = ?`, id)
|
||||
row = s.DB.QueryRow(`SELECT up.public_id, p.public_id, up.filename, up.content_type, up.size, up.storage_path, u.public_id, up.created_at
|
||||
FROM uploads up
|
||||
JOIN projects p ON p.id = up.project_id
|
||||
JOIN users u ON u.id = up.created_by
|
||||
WHERE up.public_id = ?`, id)
|
||||
err = row.Scan(&upload.ID, &upload.ProjectID, &upload.Filename, &upload.ContentType, &upload.Size, &upload.StoragePath, &upload.CreatedBy, &created)
|
||||
if err != nil {
|
||||
return upload, err
|
||||
@@ -1517,7 +1789,12 @@ func (s *Store) ListUploads(projectID string) ([]models.Upload, error) {
|
||||
var uploads []models.Upload
|
||||
var upload models.Upload
|
||||
var created time.Time
|
||||
rows, err = s.DB.Query(`SELECT id, project_id, filename, content_type, size, storage_path, created_by, created_at FROM uploads WHERE project_id = ? ORDER BY created_at DESC`, projectID)
|
||||
rows, err = s.DB.Query(`SELECT up.public_id, p.public_id, up.filename, up.content_type, up.size, up.storage_path, u.public_id, up.created_at
|
||||
FROM uploads up
|
||||
JOIN projects p ON p.id = up.project_id
|
||||
JOIN users u ON u.id = up.created_by
|
||||
WHERE up.project_id = (SELECT id FROM projects WHERE public_id = ?)
|
||||
ORDER BY up.created_at DESC`, projectID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
94
backend/internal/db/tls_listeners.go
Normal file
94
backend/internal/db/tls_listeners.go
Normal file
@@ -0,0 +1,94 @@
|
||||
package db
|
||||
|
||||
import "database/sql"
|
||||
import "strings"
|
||||
import "time"
|
||||
|
||||
import "codit/internal/models"
|
||||
import "codit/internal/util"
|
||||
|
||||
func (s *Store) ListTLSListeners() ([]models.TLSListener, error) {
|
||||
var rows *sql.Rows
|
||||
var err error
|
||||
var items []models.TLSListener
|
||||
var item models.TLSListener
|
||||
var httpAddrs string
|
||||
var httpsAddrs string
|
||||
var certAllowlist string
|
||||
rows, err = s.DB.Query(`SELECT l.public_id, l.name, l.enabled, l.http_addrs, l.https_addrs, l.auth_policy, l.apply_policy_api, l.apply_policy_git, l.apply_policy_rpm, l.apply_policy_v2, l.client_cert_allowlist, l.tls_server_cert_source, l.tls_cert_file, l.tls_key_file, l.tls_pki_server_cert_id, l.tls_client_auth, l.tls_client_ca_file, l.tls_pki_client_ca_id, l.tls_min_version, l.created_at, l.updated_at FROM tls_listeners l ORDER BY l.name`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
err = rows.Scan(&item.ID, &item.Name, &item.Enabled, &httpAddrs, &httpsAddrs, &item.AuthPolicy, &item.ApplyPolicyAPI, &item.ApplyPolicyGit, &item.ApplyPolicyRPM, &item.ApplyPolicyV2, &certAllowlist, &item.TLSServerCertSource, &item.TLSCertFile, &item.TLSKeyFile, &item.TLSPKIServerCertID, &item.TLSClientAuth, &item.TLSClientCAFile, &item.TLSPKIClientCAID, &item.TLSMinVersion, &item.CreatedAt, &item.UpdatedAt)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
item.HTTPAddrs = splitCSVValue(httpAddrs)
|
||||
item.HTTPSAddrs = splitCSVValue(httpsAddrs)
|
||||
item.ClientCertAllowlist = splitCSVValue(certAllowlist)
|
||||
items = append(items, item)
|
||||
}
|
||||
err = rows.Err()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
func (s *Store) GetTLSListener(id string) (models.TLSListener, error) {
|
||||
var row *sql.Row
|
||||
var item models.TLSListener
|
||||
var httpAddrs string
|
||||
var httpsAddrs string
|
||||
var certAllowlist string
|
||||
var err error
|
||||
row = s.DB.QueryRow(`SELECT l.public_id, l.name, l.enabled, l.http_addrs, l.https_addrs, l.auth_policy, l.apply_policy_api, l.apply_policy_git, l.apply_policy_rpm, l.apply_policy_v2, l.client_cert_allowlist, l.tls_server_cert_source, l.tls_cert_file, l.tls_key_file, l.tls_pki_server_cert_id, l.tls_client_auth, l.tls_client_ca_file, l.tls_pki_client_ca_id, l.tls_min_version, l.created_at, l.updated_at FROM tls_listeners l WHERE l.public_id = ?`, id)
|
||||
err = row.Scan(&item.ID, &item.Name, &item.Enabled, &httpAddrs, &httpsAddrs, &item.AuthPolicy, &item.ApplyPolicyAPI, &item.ApplyPolicyGit, &item.ApplyPolicyRPM, &item.ApplyPolicyV2, &certAllowlist, &item.TLSServerCertSource, &item.TLSCertFile, &item.TLSKeyFile, &item.TLSPKIServerCertID, &item.TLSClientAuth, &item.TLSClientCAFile, &item.TLSPKIClientCAID, &item.TLSMinVersion, &item.CreatedAt, &item.UpdatedAt)
|
||||
if err != nil {
|
||||
return item, err
|
||||
}
|
||||
item.HTTPAddrs = splitCSVValue(httpAddrs)
|
||||
item.HTTPSAddrs = splitCSVValue(httpsAddrs)
|
||||
item.ClientCertAllowlist = splitCSVValue(certAllowlist)
|
||||
return item, nil
|
||||
}
|
||||
|
||||
func (s *Store) CreateTLSListener(item models.TLSListener) (models.TLSListener, error) {
|
||||
var id string
|
||||
var now int64
|
||||
var err error
|
||||
if item.ID == "" {
|
||||
id, err = util.NewID()
|
||||
if err != nil {
|
||||
return item, err
|
||||
}
|
||||
item.ID = id
|
||||
}
|
||||
now = time.Now().UTC().Unix()
|
||||
item.CreatedAt = now
|
||||
item.UpdatedAt = now
|
||||
_, err = s.DB.Exec(`INSERT INTO tls_listeners (public_id, name, enabled, http_addrs, https_addrs, auth_policy, apply_policy_api, apply_policy_git, apply_policy_rpm, apply_policy_v2, client_cert_allowlist, tls_server_cert_source, tls_cert_file, tls_key_file, tls_pki_server_cert_id, tls_client_auth, tls_client_ca_file, tls_pki_client_ca_id, tls_min_version, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
item.ID, item.Name, item.Enabled, strings.Join(item.HTTPAddrs, ","), strings.Join(item.HTTPSAddrs, ","), item.AuthPolicy, item.ApplyPolicyAPI, item.ApplyPolicyGit, item.ApplyPolicyRPM, item.ApplyPolicyV2, strings.Join(item.ClientCertAllowlist, ","), item.TLSServerCertSource, item.TLSCertFile, item.TLSKeyFile, item.TLSPKIServerCertID, item.TLSClientAuth, item.TLSClientCAFile, item.TLSPKIClientCAID, item.TLSMinVersion, item.CreatedAt, item.UpdatedAt)
|
||||
if err != nil {
|
||||
return item, err
|
||||
}
|
||||
return item, nil
|
||||
}
|
||||
|
||||
func (s *Store) UpdateTLSListener(item models.TLSListener) error {
|
||||
var err error
|
||||
var now int64
|
||||
now = time.Now().UTC().Unix()
|
||||
item.UpdatedAt = now
|
||||
_, err = s.DB.Exec(`UPDATE tls_listeners SET name = ?, enabled = ?, http_addrs = ?, https_addrs = ?, auth_policy = ?, apply_policy_api = ?, apply_policy_git = ?, apply_policy_rpm = ?, apply_policy_v2 = ?, client_cert_allowlist = ?, tls_server_cert_source = ?, tls_cert_file = ?, tls_key_file = ?, tls_pki_server_cert_id = ?, tls_client_auth = ?, tls_client_ca_file = ?, tls_pki_client_ca_id = ?, tls_min_version = ?, updated_at = ? WHERE public_id = ?`,
|
||||
item.Name, item.Enabled, strings.Join(item.HTTPAddrs, ","), strings.Join(item.HTTPSAddrs, ","), item.AuthPolicy, item.ApplyPolicyAPI, item.ApplyPolicyGit, item.ApplyPolicyRPM, item.ApplyPolicyV2, strings.Join(item.ClientCertAllowlist, ","), item.TLSServerCertSource, item.TLSCertFile, item.TLSKeyFile, item.TLSPKIServerCertID, item.TLSClientAuth, item.TLSClientCAFile, item.TLSPKIClientCAID, item.TLSMinVersion, item.UpdatedAt, item.ID)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Store) DeleteTLSListener(id string) error {
|
||||
var err error
|
||||
_, err = s.DB.Exec(`DELETE FROM tls_listeners WHERE public_id = ?`, id)
|
||||
return err
|
||||
}
|
||||
@@ -161,6 +161,8 @@ func (s *HTTPServer) resolveRepo(repoName string) (models.Repo, models.Project,
|
||||
var parts []string
|
||||
var project models.Project
|
||||
var repo models.Repo
|
||||
var projectStorageID int64
|
||||
var repoStorageID int64
|
||||
var err error
|
||||
var slug string
|
||||
var name string
|
||||
@@ -186,12 +188,18 @@ func (s *HTTPServer) resolveRepo(repoName string) (models.Repo, models.Project,
|
||||
if err != nil {
|
||||
return repo, project, "", err
|
||||
}
|
||||
if repo.Path == "" {
|
||||
repo.Path = filepath.Join(s.baseDir, project.ID, repo.Name)
|
||||
projectStorageID, repoStorageID, err = s.store.GetRepoStorageIDs(repo.ID)
|
||||
if err != nil {
|
||||
return repo, project, "", err
|
||||
}
|
||||
repo.Path = filepath.Join(s.baseDir, storageIDSegment(projectStorageID), storageIDSegment(repoStorageID))
|
||||
return repo, project, image, nil
|
||||
}
|
||||
|
||||
func storageIDSegment(id int64) string {
|
||||
return fmt.Sprintf("%016x", id)
|
||||
}
|
||||
|
||||
func IsReservedImagePath(image string) bool {
|
||||
var cleaned string
|
||||
var parts []string
|
||||
@@ -574,6 +582,7 @@ func resolveManifest(repoPath string, reference string) (ociDescriptor, error) {
|
||||
var desc ociDescriptor
|
||||
var err error
|
||||
var ok bool
|
||||
var data []byte
|
||||
if isDigestRef(reference) {
|
||||
ok, err = HasBlob(repoPath, reference)
|
||||
if err != nil {
|
||||
@@ -583,6 +592,11 @@ func resolveManifest(repoPath string, reference string) (ociDescriptor, error) {
|
||||
return desc, ErrNotFound
|
||||
}
|
||||
desc = ociDescriptor{Digest: reference}
|
||||
data, err = ReadBlob(repoPath, reference)
|
||||
if err == nil {
|
||||
desc.Size = int64(len(data))
|
||||
desc.MediaType = detectManifestMediaType(data)
|
||||
}
|
||||
return desc, nil
|
||||
}
|
||||
desc, err = resolveTag(repoPath, reference)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
1056
backend/internal/handlers/pki.go
Normal file
1056
backend/internal/handlers/pki.go
Normal file
File diff suppressed because it is too large
Load Diff
@@ -12,6 +12,7 @@ import "codit/internal/util"
|
||||
type ctxKey string
|
||||
|
||||
const userKey ctxKey = "user"
|
||||
const principalKey ctxKey = "principal"
|
||||
|
||||
func WithUser(store *db.Store, next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
@@ -68,6 +69,19 @@ func UserFromContext(ctx context.Context) (models.User, bool) {
|
||||
return user, ok
|
||||
}
|
||||
|
||||
func WithPrincipal(r *http.Request, principal models.ServicePrincipal) *http.Request {
|
||||
var ctx context.Context
|
||||
ctx = context.WithValue(r.Context(), principalKey, principal)
|
||||
return r.WithContext(ctx)
|
||||
}
|
||||
|
||||
func PrincipalFromContext(ctx context.Context) (models.ServicePrincipal, bool) {
|
||||
var principal models.ServicePrincipal
|
||||
var ok bool
|
||||
principal, ok = ctx.Value(principalKey).(models.ServicePrincipal)
|
||||
return principal, ok
|
||||
}
|
||||
|
||||
func RequireAuth(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
var ok bool
|
||||
|
||||
@@ -44,6 +44,66 @@ type Repo struct {
|
||||
IsForeign bool `json:"is_foreign"`
|
||||
}
|
||||
|
||||
type RPMRepoDir struct {
|
||||
RepoID string `json:"repo_id"`
|
||||
Path string `json:"path"`
|
||||
Mode string `json:"mode"`
|
||||
AllowDelete bool `json:"allow_delete"`
|
||||
RemoteURL string `json:"remote_url"`
|
||||
ConnectHost string `json:"connect_host"`
|
||||
HostHeader string `json:"host_header"`
|
||||
TLSServerName string `json:"tls_server_name"`
|
||||
TLSInsecureSkipVerify bool `json:"tls_insecure_skip_verify"`
|
||||
SyncIntervalSec int64 `json:"sync_interval_sec"`
|
||||
SyncEnabled bool `json:"sync_enabled"`
|
||||
Dirty bool `json:"dirty"`
|
||||
NextSyncAt int64 `json:"next_sync_at"`
|
||||
SyncRunning bool `json:"sync_running"`
|
||||
SyncStatus string `json:"sync_status"`
|
||||
SyncError string `json:"sync_error"`
|
||||
SyncStep string `json:"sync_step"`
|
||||
SyncTotal int64 `json:"sync_total"`
|
||||
SyncDone int64 `json:"sync_done"`
|
||||
SyncFailed int64 `json:"sync_failed"`
|
||||
SyncDeleted int64 `json:"sync_deleted"`
|
||||
LastSyncStartedAt int64 `json:"last_sync_started_at"`
|
||||
LastSyncFinishedAt int64 `json:"last_sync_finished_at"`
|
||||
LastSyncSuccessAt int64 `json:"last_sync_success_at"`
|
||||
LastSyncedRevision string `json:"last_synced_revision"`
|
||||
CreatedAt int64 `json:"created_at"`
|
||||
UpdatedAt int64 `json:"updated_at"`
|
||||
}
|
||||
|
||||
type RPMMirrorTask struct {
|
||||
RepoID string `json:"repo_id"`
|
||||
RepoPath string `json:"repo_path"`
|
||||
MirrorPath string `json:"mirror_path"`
|
||||
RemoteURL string `json:"remote_url"`
|
||||
ConnectHost string `json:"connect_host"`
|
||||
HostHeader string `json:"host_header"`
|
||||
TLSServerName string `json:"tls_server_name"`
|
||||
TLSInsecureSkipVerify bool `json:"tls_insecure_skip_verify"`
|
||||
SyncIntervalSec int64 `json:"sync_interval_sec"`
|
||||
Dirty bool `json:"dirty"`
|
||||
LastSyncedRevision string `json:"last_synced_revision"`
|
||||
}
|
||||
|
||||
type RPMMirrorRun struct {
|
||||
ID string `json:"id"`
|
||||
RepoID string `json:"repo_id"`
|
||||
Path string `json:"path"`
|
||||
StartedAt int64 `json:"started_at"`
|
||||
FinishedAt int64 `json:"finished_at"`
|
||||
Status string `json:"status"`
|
||||
Step string `json:"step"`
|
||||
Total int64 `json:"total"`
|
||||
Done int64 `json:"done"`
|
||||
Failed int64 `json:"failed"`
|
||||
Deleted int64 `json:"deleted"`
|
||||
Revision string `json:"revision"`
|
||||
Error string `json:"error"`
|
||||
}
|
||||
|
||||
type Issue struct {
|
||||
ID string `json:"id"`
|
||||
ProjectID string `json:"project_id"`
|
||||
@@ -128,3 +188,96 @@ type AuthSettings struct {
|
||||
OIDCScopes string `json:"oidc_scopes"`
|
||||
OIDCTLSInsecureSkipVerify bool `json:"oidc_tls_insecure_skip_verify"`
|
||||
}
|
||||
|
||||
type TLSSettings struct {
|
||||
HTTPAddrs []string `json:"http_addrs"`
|
||||
HTTPSAddrs []string `json:"https_addrs"`
|
||||
TLSServerCertSource string `json:"tls_server_cert_source"`
|
||||
TLSCertFile string `json:"tls_cert_file"`
|
||||
TLSKeyFile string `json:"tls_key_file"`
|
||||
TLSPKIServerCertID string `json:"tls_pki_server_cert_id"`
|
||||
TLSClientAuth string `json:"tls_client_auth"`
|
||||
TLSClientCAFile string `json:"tls_client_ca_file"`
|
||||
TLSPKIClientCAID string `json:"tls_pki_client_ca_id"`
|
||||
TLSMinVersion string `json:"tls_min_version"`
|
||||
}
|
||||
|
||||
type TLSListener struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Enabled bool `json:"enabled"`
|
||||
HTTPAddrs []string `json:"http_addrs"`
|
||||
HTTPSAddrs []string `json:"https_addrs"`
|
||||
AuthPolicy string `json:"auth_policy"`
|
||||
ApplyPolicyAPI bool `json:"apply_policy_api"`
|
||||
ApplyPolicyGit bool `json:"apply_policy_git"`
|
||||
ApplyPolicyRPM bool `json:"apply_policy_rpm"`
|
||||
ApplyPolicyV2 bool `json:"apply_policy_v2"`
|
||||
ClientCertAllowlist []string `json:"client_cert_allowlist"`
|
||||
TLSServerCertSource string `json:"tls_server_cert_source"`
|
||||
TLSCertFile string `json:"tls_cert_file"`
|
||||
TLSKeyFile string `json:"tls_key_file"`
|
||||
TLSPKIServerCertID string `json:"tls_pki_server_cert_id"`
|
||||
TLSClientAuth string `json:"tls_client_auth"`
|
||||
TLSClientCAFile string `json:"tls_client_ca_file"`
|
||||
TLSPKIClientCAID string `json:"tls_pki_client_ca_id"`
|
||||
TLSMinVersion string `json:"tls_min_version"`
|
||||
CreatedAt int64 `json:"created_at"`
|
||||
UpdatedAt int64 `json:"updated_at"`
|
||||
}
|
||||
|
||||
type PKICA struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
ParentCAID string `json:"parent_ca_id"`
|
||||
IsRoot bool `json:"is_root"`
|
||||
CertPEM string `json:"cert_pem"`
|
||||
KeyPEM string `json:"key_pem"`
|
||||
SerialCounter int64 `json:"serial_counter"`
|
||||
Status string `json:"status"`
|
||||
CreatedAt int64 `json:"created_at"`
|
||||
UpdatedAt int64 `json:"updated_at"`
|
||||
}
|
||||
|
||||
type PKICert struct {
|
||||
ID string `json:"id"`
|
||||
CAID string `json:"ca_id"`
|
||||
SerialHex string `json:"serial_hex"`
|
||||
CommonName string `json:"common_name"`
|
||||
SANDNS string `json:"san_dns"`
|
||||
SANIPs string `json:"san_ips"`
|
||||
IsCA bool `json:"is_ca"`
|
||||
CertPEM string `json:"cert_pem"`
|
||||
KeyPEM string `json:"key_pem"`
|
||||
NotBefore int64 `json:"not_before"`
|
||||
NotAfter int64 `json:"not_after"`
|
||||
Status string `json:"status"`
|
||||
RevokedAt int64 `json:"revoked_at"`
|
||||
RevocationReason string `json:"revocation_reason"`
|
||||
CreatedAt int64 `json:"created_at"`
|
||||
}
|
||||
|
||||
type ServicePrincipal struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
IsAdmin bool `json:"is_admin"`
|
||||
Disabled bool `json:"disabled"`
|
||||
CreatedAt int64 `json:"created_at"`
|
||||
UpdatedAt int64 `json:"updated_at"`
|
||||
}
|
||||
|
||||
type CertPrincipalBinding struct {
|
||||
Fingerprint string `json:"fingerprint"`
|
||||
PrincipalID string `json:"principal_id"`
|
||||
Enabled bool `json:"enabled"`
|
||||
CreatedAt int64 `json:"created_at"`
|
||||
UpdatedAt int64 `json:"updated_at"`
|
||||
}
|
||||
|
||||
type PrincipalProjectRole struct {
|
||||
PrincipalID string `json:"principal_id"`
|
||||
ProjectID string `json:"project_id"`
|
||||
Role string `json:"role"`
|
||||
CreatedAt int64 `json:"created_at"`
|
||||
}
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
package rpm
|
||||
|
||||
import "log"
|
||||
import "os"
|
||||
import "path/filepath"
|
||||
import "strings"
|
||||
import "sync"
|
||||
import "time"
|
||||
|
||||
import repokit "repokit"
|
||||
|
||||
@@ -24,6 +27,18 @@ func NewMetaManager() *MetaManager {
|
||||
return mgr
|
||||
}
|
||||
|
||||
func (m *MetaManager) IsRunning(dir string) bool {
|
||||
var state *metaState
|
||||
var ok bool
|
||||
m.mutex.Lock()
|
||||
defer m.mutex.Unlock()
|
||||
state, ok = m.states[dir]
|
||||
if !ok || state == nil {
|
||||
return false
|
||||
}
|
||||
return state.inProgress
|
||||
}
|
||||
|
||||
func (m *MetaManager) Schedule(dir string) {
|
||||
var state *metaState
|
||||
var ok bool
|
||||
@@ -46,7 +61,15 @@ func (m *MetaManager) Schedule(dir string) {
|
||||
func (m *MetaManager) run(dir string) {
|
||||
var err error
|
||||
var opts repokit.RpmRepoOptions
|
||||
var state *metaState
|
||||
var ok bool
|
||||
var repodataDir string
|
||||
var repomdPath string
|
||||
var entries []os.DirEntry
|
||||
var repomdInfo os.FileInfo
|
||||
var statErr error
|
||||
for {
|
||||
log.Printf("rpm metadata: job begin dir=%s", dir)
|
||||
opts = repokit.RpmDefaultRepoOptions()
|
||||
opts.LockMode = repokit.RpmLockFail
|
||||
opts.AllowMissingRepomd = true
|
||||
@@ -57,24 +80,55 @@ func (m *MetaManager) run(dir string) {
|
||||
if err != nil {
|
||||
if isLockError(err) {
|
||||
log.Printf("rpm metadata: lock busy dir=%s err=%v", dir, err)
|
||||
m.states[dir].pending = true
|
||||
m.states[dir].inProgress = false
|
||||
log.Printf("rpm metadata: job end dir=%s result=lock_busy", dir)
|
||||
state, ok = m.states[dir]
|
||||
if ok {
|
||||
state.pending = true
|
||||
state.inProgress = false
|
||||
}
|
||||
m.mutex.Unlock()
|
||||
time.AfterFunc(2*time.Second, func() {
|
||||
m.Schedule(dir)
|
||||
})
|
||||
return
|
||||
}
|
||||
log.Printf("rpm metadata: build failed dir=%s err=%v", dir, err)
|
||||
m.states[dir].inProgress = false
|
||||
log.Printf("rpm metadata: job end dir=%s result=failed err=%v", dir, err)
|
||||
state, ok = m.states[dir]
|
||||
if ok {
|
||||
state.inProgress = false
|
||||
}
|
||||
m.mutex.Unlock()
|
||||
return
|
||||
}
|
||||
repodataDir = filepath.Join(dir, "repodata")
|
||||
repomdPath = filepath.Join(repodataDir, "repomd.xml")
|
||||
entries, err = os.ReadDir(repodataDir)
|
||||
if err != nil {
|
||||
log.Printf("rpm metadata: post-check dir=%s repodata_dir=%s read_err=%v", dir, repodataDir, err)
|
||||
} else {
|
||||
statErr = nil
|
||||
repomdInfo = nil
|
||||
repomdInfo, statErr = os.Stat(repomdPath)
|
||||
if statErr != nil {
|
||||
log.Printf("rpm metadata: post-check dir=%s repodata_entries=%d repomd_path=%s repomd_err=%v", dir, len(entries), repomdPath, statErr)
|
||||
} else {
|
||||
log.Printf("rpm metadata: post-check dir=%s repodata_entries=%d repomd_path=%s repomd_size=%d", dir, len(entries), repomdPath, repomdInfo.Size())
|
||||
}
|
||||
}
|
||||
log.Printf("rpm metadata: build done dir=%s", dir)
|
||||
if m.states[dir].pending {
|
||||
m.states[dir].pending = false
|
||||
state, ok = m.states[dir]
|
||||
if ok && state.pending {
|
||||
log.Printf("rpm metadata: job end dir=%s result=pending_rerun", dir)
|
||||
state.pending = false
|
||||
m.mutex.Unlock()
|
||||
continue
|
||||
}
|
||||
m.states[dir].inProgress = false
|
||||
if ok {
|
||||
state.inProgress = false
|
||||
}
|
||||
m.mutex.Unlock()
|
||||
log.Printf("rpm metadata: job end dir=%s result=success", dir)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
908
backend/internal/rpm/mirror.go
Normal file
908
backend/internal/rpm/mirror.go
Normal file
@@ -0,0 +1,908 @@
|
||||
package rpm
|
||||
|
||||
import "compress/gzip"
|
||||
import "context"
|
||||
import "crypto/md5"
|
||||
import "crypto/sha1"
|
||||
import "crypto/sha256"
|
||||
import "crypto/sha512"
|
||||
import "crypto/tls"
|
||||
import "bytes"
|
||||
import "encoding/hex"
|
||||
import "encoding/xml"
|
||||
import "errors"
|
||||
import "hash"
|
||||
import "io"
|
||||
import "io/fs"
|
||||
import "net"
|
||||
import "net/http"
|
||||
import "net/url"
|
||||
import "os"
|
||||
import "path/filepath"
|
||||
import "strconv"
|
||||
import "strings"
|
||||
import "sync"
|
||||
import "time"
|
||||
|
||||
import "codit/internal/db"
|
||||
import "codit/internal/models"
|
||||
import "codit/internal/util"
|
||||
|
||||
type MirrorManager struct {
|
||||
store *db.Store
|
||||
logger *util.Logger
|
||||
meta *MetaManager
|
||||
stopCh chan struct{}
|
||||
cancelMu sync.Mutex
|
||||
cancelByKey map[string]context.CancelFunc
|
||||
}
|
||||
|
||||
type repomdDoc struct {
|
||||
Data []repomdData `xml:"data"`
|
||||
}
|
||||
|
||||
type repomdData struct {
|
||||
Type string `xml:"type,attr"`
|
||||
Location repomdLocation `xml:"location"`
|
||||
}
|
||||
|
||||
type repomdLocation struct {
|
||||
Href string `xml:"href,attr"`
|
||||
}
|
||||
|
||||
type primaryDoc struct {
|
||||
Packages []primaryPackage `xml:"package"`
|
||||
}
|
||||
|
||||
type primaryPackage struct {
|
||||
Location primaryLocation `xml:"location"`
|
||||
Checksum primaryChecksum `xml:"checksum"`
|
||||
Time primaryTime `xml:"time"`
|
||||
}
|
||||
|
||||
type primaryLocation struct {
|
||||
Href string `xml:"href,attr"`
|
||||
}
|
||||
|
||||
type primaryChecksum struct {
|
||||
Type string `xml:"type,attr"`
|
||||
Value string `xml:",chardata"`
|
||||
}
|
||||
|
||||
type mirrorChecksum struct {
|
||||
Algo string
|
||||
Value string
|
||||
BuildTime int64
|
||||
FileTime int64
|
||||
}
|
||||
|
||||
type primaryTime struct {
|
||||
File string `xml:"file,attr"`
|
||||
Build string `xml:"build,attr"`
|
||||
}
|
||||
|
||||
type mirrorHTTPConfig struct {
|
||||
BaseURL string
|
||||
ConnectHost string
|
||||
HostHeader string
|
||||
TLSServerName string
|
||||
TLSInsecure bool
|
||||
DefaultHost string
|
||||
DefaultServer string
|
||||
}
|
||||
|
||||
func NewMirrorManager(store *db.Store, logger *util.Logger, meta *MetaManager) *MirrorManager {
|
||||
var m *MirrorManager
|
||||
m = &MirrorManager{
|
||||
store: store,
|
||||
logger: logger,
|
||||
meta: meta,
|
||||
stopCh: make(chan struct{}),
|
||||
cancelByKey: make(map[string]context.CancelFunc),
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
func (m *MirrorManager) CancelTask(repoID string, path string) bool {
|
||||
var key string
|
||||
var cancel context.CancelFunc
|
||||
if m == nil {
|
||||
return false
|
||||
}
|
||||
key = mirrorTaskKey(repoID, path)
|
||||
m.cancelMu.Lock()
|
||||
cancel = m.cancelByKey[key]
|
||||
m.cancelMu.Unlock()
|
||||
if cancel == nil {
|
||||
return false
|
||||
}
|
||||
cancel()
|
||||
return true
|
||||
}
|
||||
|
||||
func (m *MirrorManager) Start() {
|
||||
var err error
|
||||
var tasks []models.RPMMirrorTask
|
||||
var i int
|
||||
if m == nil {
|
||||
return
|
||||
}
|
||||
err = m.store.ResetRunningRPMMirrorTasks()
|
||||
if err != nil && m.logger != nil {
|
||||
m.logger.Write("rpm-mirror", util.LOG_ERROR, "reset running tasks failed err=%v", err)
|
||||
}
|
||||
tasks, err = m.store.ListRPMMirrorPaths()
|
||||
if err == nil {
|
||||
for i = 0; i < len(tasks); i++ {
|
||||
_ = m.store.CleanupRPMMirrorRunsRetention(tasks[i].RepoID, tasks[i].MirrorPath, 200, 30)
|
||||
}
|
||||
}
|
||||
go m.loop()
|
||||
}
|
||||
|
||||
func (m *MirrorManager) Stop() {
|
||||
if m == nil {
|
||||
return
|
||||
}
|
||||
close(m.stopCh)
|
||||
}
|
||||
|
||||
func (m *MirrorManager) loop() {
|
||||
var ticker *time.Ticker
|
||||
ticker = time.NewTicker(10 * time.Second)
|
||||
defer ticker.Stop()
|
||||
m.runDue()
|
||||
for {
|
||||
select {
|
||||
case <-m.stopCh:
|
||||
return
|
||||
case <-ticker.C:
|
||||
m.runDue()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (m *MirrorManager) runDue() {
|
||||
var tasks []models.RPMMirrorTask
|
||||
var started bool
|
||||
var now int64
|
||||
var i int
|
||||
var err error
|
||||
now = time.Now().UTC().Unix()
|
||||
tasks, err = m.store.ListDueRPMMirrorTasks(now, 8)
|
||||
if err != nil {
|
||||
if m.logger != nil {
|
||||
m.logger.Write("rpm-mirror", util.LOG_ERROR, "list due tasks failed err=%v", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
for i = 0; i < len(tasks); i++ {
|
||||
started, err = m.store.TryStartRPMMirrorTask(tasks[i].RepoID, tasks[i].MirrorPath, now)
|
||||
if err != nil {
|
||||
if m.logger != nil {
|
||||
m.logger.Write("rpm-mirror", util.LOG_ERROR, "try start failed repo=%s path=%s err=%v", tasks[i].RepoID, tasks[i].MirrorPath, err)
|
||||
}
|
||||
continue
|
||||
}
|
||||
if !started {
|
||||
continue
|
||||
}
|
||||
m.syncOne(tasks[i])
|
||||
}
|
||||
}
|
||||
|
||||
func (m *MirrorManager) syncOne(task models.RPMMirrorTask) {
|
||||
var localRoot string
|
||||
var cfg mirrorHTTPConfig
|
||||
var client *http.Client
|
||||
var repomdData []byte
|
||||
var revision string
|
||||
var primaryHref string
|
||||
var primaryData []byte
|
||||
var expected map[string]mirrorChecksum
|
||||
var duplicateCount int
|
||||
var runID string
|
||||
var startedAt int64
|
||||
var total int64
|
||||
var done int64
|
||||
var failed int64
|
||||
var deleted int64
|
||||
var changed int64
|
||||
var err error
|
||||
var syncCtx context.Context
|
||||
var syncCancel context.CancelFunc
|
||||
var canceled bool
|
||||
var key string
|
||||
localRoot = filepath.Join(task.RepoPath, filepath.FromSlash(task.MirrorPath))
|
||||
startedAt = time.Now().UTC().Unix()
|
||||
syncCtx, syncCancel = context.WithCancel(context.Background())
|
||||
key = mirrorTaskKey(task.RepoID, task.MirrorPath)
|
||||
m.cancelMu.Lock()
|
||||
m.cancelByKey[key] = syncCancel
|
||||
m.cancelMu.Unlock()
|
||||
defer func() {
|
||||
m.cancelMu.Lock()
|
||||
delete(m.cancelByKey, key)
|
||||
m.cancelMu.Unlock()
|
||||
syncCancel()
|
||||
}()
|
||||
runID, err = m.store.CreateRPMMirrorRun(task.RepoID, task.MirrorPath, startedAt)
|
||||
if err != nil {
|
||||
_ = m.store.FinishRPMMirrorTask(task.RepoID, task.MirrorPath, false, "", err.Error())
|
||||
return
|
||||
}
|
||||
cfg, err = buildMirrorHTTPConfig(task)
|
||||
if err != nil {
|
||||
if m.logger != nil {
|
||||
m.logger.Write("rpm-mirror", util.LOG_ERROR, "sync failed repo=%s path=%s step=start err=%v", task.RepoID, task.MirrorPath, err)
|
||||
}
|
||||
_ = m.store.FinishRPMMirrorTask(task.RepoID, task.MirrorPath, false, "", err.Error())
|
||||
_ = m.store.FinishRPMMirrorRun(runID, time.Now().UTC().Unix(), "failed", "start", 0, 0, 0, 0, "", err.Error())
|
||||
return
|
||||
}
|
||||
client = buildMirrorHTTPClient(cfg)
|
||||
if m.logger != nil {
|
||||
m.logger.Write("rpm-mirror", util.LOG_INFO, "sync start repo=%s path=%s remote=%s", task.RepoID, task.MirrorPath, task.RemoteURL)
|
||||
}
|
||||
_ = m.store.UpdateRPMMirrorTaskProgress(task.RepoID, task.MirrorPath, "fetch_repodata", 0, 0, 0, 0)
|
||||
repomdData, err = mirrorFetch(syncCtx, client, cfg, "repodata/repomd.xml")
|
||||
if err != nil {
|
||||
if m.logger != nil {
|
||||
m.logger.Write("rpm-mirror", util.LOG_ERROR, "sync failed repo=%s path=%s step=fetch_repodata err=%v", task.RepoID, task.MirrorPath, err)
|
||||
}
|
||||
_ = m.store.FinishRPMMirrorTask(task.RepoID, task.MirrorPath, false, "", err.Error())
|
||||
_ = m.store.FinishRPMMirrorRun(runID, time.Now().UTC().Unix(), "failed", "fetch_repodata", 0, 0, 0, 0, "", err.Error())
|
||||
return
|
||||
}
|
||||
revision = sha256HexBytes(repomdData)
|
||||
if !task.Dirty && task.LastSyncedRevision != "" && task.LastSyncedRevision == revision {
|
||||
if m.meta != nil {
|
||||
ensureRepodata(task, localRoot, m.meta, m.logger)
|
||||
}
|
||||
if m.logger != nil {
|
||||
m.logger.Write("rpm-mirror", util.LOG_INFO, "sync done repo=%s path=%s status=no_change revision=%s", task.RepoID, task.MirrorPath, revision)
|
||||
}
|
||||
_ = m.store.FinishRPMMirrorTask(task.RepoID, task.MirrorPath, true, revision, "")
|
||||
_ = m.store.FinishRPMMirrorRun(runID, time.Now().UTC().Unix(), "success", "no_change", 0, 0, 0, 0, revision, "")
|
||||
return
|
||||
}
|
||||
primaryHref, err = parseRepomdPrimaryHref(repomdData)
|
||||
if err != nil {
|
||||
if m.logger != nil {
|
||||
m.logger.Write("rpm-mirror", util.LOG_ERROR, "sync failed repo=%s path=%s step=parse_repodata err=%v", task.RepoID, task.MirrorPath, err)
|
||||
}
|
||||
_ = m.store.FinishRPMMirrorTask(task.RepoID, task.MirrorPath, false, "", err.Error())
|
||||
_ = m.store.FinishRPMMirrorRun(runID, time.Now().UTC().Unix(), "failed", "fetch_repodata", 0, 0, 0, 0, "", err.Error())
|
||||
return
|
||||
}
|
||||
_ = m.store.UpdateRPMMirrorTaskProgress(task.RepoID, task.MirrorPath, "fetch_primary", 0, 0, 0, 0)
|
||||
primaryData, err = mirrorFetch(syncCtx, client, cfg, primaryHref)
|
||||
if err != nil {
|
||||
if m.logger != nil {
|
||||
m.logger.Write("rpm-mirror", util.LOG_ERROR, "sync failed repo=%s path=%s step=fetch_primary err=%v", task.RepoID, task.MirrorPath, err)
|
||||
}
|
||||
_ = m.store.FinishRPMMirrorTask(task.RepoID, task.MirrorPath, false, "", err.Error())
|
||||
_ = m.store.FinishRPMMirrorRun(runID, time.Now().UTC().Unix(), "failed", "fetch_primary", 0, 0, 0, 0, "", err.Error())
|
||||
return
|
||||
}
|
||||
if strings.HasSuffix(strings.ToLower(primaryHref), ".gz") {
|
||||
primaryData, err = gunzipBytes(primaryData)
|
||||
if err != nil {
|
||||
if m.logger != nil {
|
||||
m.logger.Write("rpm-mirror", util.LOG_ERROR, "sync failed repo=%s path=%s step=decode_primary err=%v", task.RepoID, task.MirrorPath, err)
|
||||
}
|
||||
_ = m.store.FinishRPMMirrorTask(task.RepoID, task.MirrorPath, false, "", err.Error())
|
||||
_ = m.store.FinishRPMMirrorRun(runID, time.Now().UTC().Unix(), "failed", "fetch_primary", 0, 0, 0, 0, "", err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
expected, duplicateCount, err = parsePrimaryPackages(primaryData)
|
||||
if err != nil {
|
||||
if m.logger != nil {
|
||||
m.logger.Write("rpm-mirror", util.LOG_ERROR, "sync failed repo=%s path=%s step=parse_primary err=%v", task.RepoID, task.MirrorPath, err)
|
||||
}
|
||||
_ = m.store.FinishRPMMirrorTask(task.RepoID, task.MirrorPath, false, "", err.Error())
|
||||
_ = m.store.FinishRPMMirrorRun(runID, time.Now().UTC().Unix(), "failed", "fetch_primary", 0, 0, 0, 0, "", err.Error())
|
||||
return
|
||||
}
|
||||
if m.logger != nil {
|
||||
m.logger.Write("rpm-mirror", util.LOG_INFO, "primary parsed repo=%s path=%s primary_href=%s packages=%d", task.RepoID, task.MirrorPath, primaryHref, len(expected))
|
||||
if duplicateCount > 0 {
|
||||
m.logger.Write("rpm-mirror", util.LOG_WARN, "primary has duplicate package paths repo=%s path=%s primary_href=%s duplicates=%d", task.RepoID, task.MirrorPath, primaryHref, duplicateCount)
|
||||
}
|
||||
}
|
||||
total, done, failed, deleted, changed, err = m.applyMirror(syncCtx, task, localRoot, client, cfg, expected)
|
||||
if err != nil {
|
||||
canceled = errors.Is(err, context.Canceled)
|
||||
if m.logger != nil {
|
||||
if canceled {
|
||||
m.logger.Write("rpm-mirror", util.LOG_WARN, "sync canceled repo=%s path=%s step=apply total=%d done=%d failed=%d deleted=%d err=%v", task.RepoID, task.MirrorPath, total, done, failed, deleted, err)
|
||||
} else {
|
||||
m.logger.Write("rpm-mirror", util.LOG_ERROR, "sync failed repo=%s path=%s step=apply total=%d done=%d failed=%d deleted=%d err=%v", task.RepoID, task.MirrorPath, total, done, failed, deleted, err)
|
||||
}
|
||||
}
|
||||
if canceled {
|
||||
_ = m.store.FinishRPMMirrorTask(task.RepoID, task.MirrorPath, false, "", "sync canceled by user")
|
||||
_ = m.store.FinishRPMMirrorRun(runID, time.Now().UTC().Unix(), "failed", "canceled", total, done, failed, deleted, "", "sync canceled by user")
|
||||
} else {
|
||||
_ = m.store.FinishRPMMirrorTask(task.RepoID, task.MirrorPath, false, "", err.Error())
|
||||
_ = m.store.FinishRPMMirrorRun(runID, time.Now().UTC().Unix(), "failed", "apply", total, done, failed, deleted, "", err.Error())
|
||||
}
|
||||
return
|
||||
}
|
||||
if m.meta != nil && changed > 0 {
|
||||
if m.logger != nil {
|
||||
m.logger.Write("rpm-mirror", util.LOG_INFO, "repodata schedule repo=%s path=%s reason=sync_changed changed=%d", task.RepoID, task.MirrorPath, changed)
|
||||
}
|
||||
m.meta.Schedule(localRoot)
|
||||
}
|
||||
_ = m.store.FinishRPMMirrorTask(task.RepoID, task.MirrorPath, true, revision, "")
|
||||
_ = m.store.FinishRPMMirrorRun(runID, time.Now().UTC().Unix(), "success", "done", total, done, failed, deleted, revision, "")
|
||||
_ = m.store.CleanupRPMMirrorRunsRetention(task.RepoID, task.MirrorPath, 200, 30)
|
||||
if m.logger != nil {
|
||||
m.logger.Write("rpm-mirror", util.LOG_INFO, "sync done repo=%s path=%s status=success total=%d done=%d failed=%d deleted=%d revision=%s", task.RepoID, task.MirrorPath, total, done, failed, deleted, revision)
|
||||
}
|
||||
}
|
||||
|
||||
func (m *MirrorManager) applyMirror(ctx context.Context, task models.RPMMirrorTask, localRoot string, client *http.Client, cfg mirrorHTTPConfig, expected map[string]mirrorChecksum) (int64, int64, int64, int64, int64, error) {
|
||||
var local map[string]bool
|
||||
var total int64
|
||||
var done int64
|
||||
var failed int64
|
||||
var deleted int64
|
||||
var changed int64
|
||||
var path string
|
||||
var checksum mirrorChecksum
|
||||
var fullPath string
|
||||
var localSum string
|
||||
var needDownload bool
|
||||
var err error
|
||||
local, err = listLocalRPMs(localRoot)
|
||||
if err != nil {
|
||||
return 0, 0, 0, 0, 0, err
|
||||
}
|
||||
total = int64(len(expected))
|
||||
_ = m.store.UpdateRPMMirrorTaskProgress(task.RepoID, task.MirrorPath, "apply", total, 0, 0, 0)
|
||||
for path = range local {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return total, done, failed, deleted, changed, ctx.Err()
|
||||
default:
|
||||
}
|
||||
if expected[path].Value != "" {
|
||||
continue
|
||||
}
|
||||
if m.logger != nil {
|
||||
m.logger.Write("rpm-mirror", util.LOG_DEBUG, "delete local stale repo=%s path=%s file=%s", task.RepoID, task.MirrorPath, path)
|
||||
}
|
||||
err = os.Remove(filepath.Join(localRoot, filepath.FromSlash(path)))
|
||||
if err == nil || os.IsNotExist(err) {
|
||||
deleted = deleted + 1
|
||||
changed = changed + 1
|
||||
} else {
|
||||
if m.logger != nil {
|
||||
m.logger.Write("rpm-mirror", util.LOG_WARN, "delete local stale failed repo=%s path=%s file=%s err=%v", task.RepoID, task.MirrorPath, path, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
for path, checksum = range expected {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return total, done, failed, deleted, changed, ctx.Err()
|
||||
default:
|
||||
}
|
||||
fullPath = filepath.Join(localRoot, filepath.FromSlash(path))
|
||||
needDownload = true
|
||||
_, err = os.Stat(fullPath)
|
||||
if err == nil {
|
||||
localSum, err = fileHexByAlgo(fullPath, checksum.Algo)
|
||||
if err == nil && (checksum.Value == "" || strings.EqualFold(localSum, checksum.Value)) {
|
||||
needDownload = false
|
||||
}
|
||||
}
|
||||
if needDownload {
|
||||
if m.logger != nil {
|
||||
m.logger.Write("rpm-mirror", util.LOG_DEBUG, "download start repo=%s path=%s file=%s checksum_type=%s checksum=%s", task.RepoID, task.MirrorPath, path, checksum.Algo, checksum.Value)
|
||||
}
|
||||
err = mirrorDownload(ctx, client, cfg, path, fullPath, checksum.Algo, checksum.Value)
|
||||
if err != nil {
|
||||
failed = failed + 1
|
||||
if m.logger != nil {
|
||||
m.logger.Write("rpm-mirror", util.LOG_WARN, "download failed repo=%s path=%s file=%s err=%v", task.RepoID, task.MirrorPath, path, err)
|
||||
}
|
||||
_ = m.store.UpdateRPMMirrorTaskProgress(task.RepoID, task.MirrorPath, "apply", total, done, failed, deleted)
|
||||
continue
|
||||
}
|
||||
changed = changed + 1
|
||||
if m.logger != nil {
|
||||
m.logger.Write("rpm-mirror", util.LOG_DEBUG, "download done repo=%s path=%s file=%s", task.RepoID, task.MirrorPath, path)
|
||||
}
|
||||
} else {
|
||||
if m.logger != nil {
|
||||
m.logger.Write("rpm-mirror", util.LOG_DEBUG, "download skip repo=%s path=%s file=%s reason=up-to-date", task.RepoID, task.MirrorPath, path)
|
||||
}
|
||||
}
|
||||
done = done + 1
|
||||
_ = m.store.UpdateRPMMirrorTaskProgress(task.RepoID, task.MirrorPath, "apply", total, done, failed, deleted)
|
||||
}
|
||||
if failed > 0 {
|
||||
return total, done, failed, deleted, changed, errors.New("some mirror files failed to sync")
|
||||
}
|
||||
return total, done, failed, deleted, changed, nil
|
||||
}
|
||||
|
||||
func buildMirrorHTTPConfig(task models.RPMMirrorTask) (mirrorHTTPConfig, error) {
|
||||
var cfg mirrorHTTPConfig
|
||||
var u *url.URL
|
||||
var err error
|
||||
cfg = mirrorHTTPConfig{
|
||||
BaseURL: strings.TrimRight(strings.TrimSpace(task.RemoteURL), "/"),
|
||||
ConnectHost: strings.TrimSpace(task.ConnectHost),
|
||||
HostHeader: strings.TrimSpace(task.HostHeader),
|
||||
TLSServerName: strings.TrimSpace(task.TLSServerName),
|
||||
TLSInsecure: task.TLSInsecureSkipVerify,
|
||||
}
|
||||
if cfg.BaseURL == "" {
|
||||
return cfg, errors.New("remote url is empty")
|
||||
}
|
||||
u, err = url.Parse(cfg.BaseURL)
|
||||
if err != nil {
|
||||
return cfg, err
|
||||
}
|
||||
cfg.DefaultHost = u.Host
|
||||
cfg.DefaultServer = u.Hostname()
|
||||
if cfg.DefaultHost == "" {
|
||||
return cfg, errors.New("remote url host is empty")
|
||||
}
|
||||
return cfg, nil
|
||||
}
|
||||
|
||||
func buildMirrorHTTPClient(cfg mirrorHTTPConfig) *http.Client {
|
||||
var transport *http.Transport
|
||||
transport = &http.Transport{
|
||||
Proxy: http.ProxyFromEnvironment,
|
||||
TLSClientConfig: &tls.Config{
|
||||
InsecureSkipVerify: cfg.TLSInsecure,
|
||||
ServerName: effectiveServerName(cfg),
|
||||
},
|
||||
}
|
||||
if cfg.ConnectHost != "" {
|
||||
transport.DialContext = func(ctx context.Context, network string, addr string) (net.Conn, error) {
|
||||
var d net.Dialer
|
||||
_ = addr
|
||||
return d.DialContext(ctx, network, cfg.ConnectHost)
|
||||
}
|
||||
}
|
||||
return &http.Client{
|
||||
Transport: transport,
|
||||
Timeout: 60 * time.Second,
|
||||
}
|
||||
}
|
||||
|
||||
func effectiveHostHeader(cfg mirrorHTTPConfig) string {
|
||||
if strings.TrimSpace(cfg.HostHeader) != "" {
|
||||
return strings.TrimSpace(cfg.HostHeader)
|
||||
}
|
||||
return cfg.DefaultHost
|
||||
}
|
||||
|
||||
func effectiveServerName(cfg mirrorHTTPConfig) string {
|
||||
var host string
|
||||
if strings.TrimSpace(cfg.TLSServerName) != "" {
|
||||
return strings.TrimSpace(cfg.TLSServerName)
|
||||
}
|
||||
host = strings.TrimSpace(cfg.HostHeader)
|
||||
if host != "" {
|
||||
if strings.Contains(host, ":") {
|
||||
return strings.Split(host, ":")[0]
|
||||
}
|
||||
return host
|
||||
}
|
||||
return cfg.DefaultServer
|
||||
}
|
||||
|
||||
func mirrorFetch(ctx context.Context, client *http.Client, cfg mirrorHTTPConfig, rel string) ([]byte, error) {
|
||||
var fullURL string
|
||||
var req *http.Request
|
||||
var res *http.Response
|
||||
var body []byte
|
||||
var err error
|
||||
fullURL = joinRemoteURL(cfg.BaseURL, rel)
|
||||
req, err = http.NewRequestWithContext(ctx, http.MethodGet, fullURL, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.Host = effectiveHostHeader(cfg)
|
||||
res, err = client.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
if res.StatusCode < 200 || res.StatusCode >= 300 {
|
||||
return nil, errors.New("upstream request failed: " + res.Status)
|
||||
}
|
||||
body, err = io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return body, nil
|
||||
}
|
||||
|
||||
func mirrorDownload(ctx context.Context, client *http.Client, cfg mirrorHTTPConfig, rel string, dstPath string, checksumType string, checksum string) error {
|
||||
var fullURL string
|
||||
var req *http.Request
|
||||
var res *http.Response
|
||||
var tempPath string
|
||||
var out *os.File
|
||||
var hash hashWriter
|
||||
var copied int64
|
||||
var actualSum string
|
||||
var contentType string
|
||||
var finalURL string
|
||||
var err error
|
||||
err = os.MkdirAll(filepath.Dir(dstPath), 0o755)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
fullURL = joinRemoteURL(cfg.BaseURL, rel)
|
||||
req, err = http.NewRequestWithContext(ctx, http.MethodGet, fullURL, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req.Host = effectiveHostHeader(cfg)
|
||||
res, err = client.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
if res.StatusCode < 200 || res.StatusCode >= 300 {
|
||||
return errors.New("upstream request failed: " + res.Status)
|
||||
}
|
||||
contentType = strings.TrimSpace(res.Header.Get("Content-Type"))
|
||||
finalURL = ""
|
||||
if res.Request != nil && res.Request.URL != nil {
|
||||
finalURL = res.Request.URL.String()
|
||||
}
|
||||
tempPath = dstPath + ".mirror.tmp"
|
||||
out, err = os.Create(tempPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer out.Close()
|
||||
hash, err = newHashWriter(checksumType)
|
||||
if err != nil {
|
||||
_ = os.Remove(tempPath)
|
||||
return err
|
||||
}
|
||||
copied, err = io.Copy(io.MultiWriter(out, hash), res.Body)
|
||||
_ = copied
|
||||
if err != nil {
|
||||
_ = os.Remove(tempPath)
|
||||
return err
|
||||
}
|
||||
err = out.Close()
|
||||
if err != nil {
|
||||
_ = os.Remove(tempPath)
|
||||
return err
|
||||
}
|
||||
if strings.TrimSpace(checksum) != "" {
|
||||
actualSum = hash.Sum()
|
||||
if !strings.EqualFold(actualSum, strings.TrimSpace(checksum)) {
|
||||
_ = os.Remove(tempPath)
|
||||
return errors.New(
|
||||
"download checksum mismatch for " + rel +
|
||||
" type=" + normalizeChecksumAlgo(checksumType) +
|
||||
" expected=" + strings.TrimSpace(checksum) +
|
||||
" actual=" + actualSum +
|
||||
" bytes=" + int64ToString(copied) +
|
||||
" content_type=" + contentType +
|
||||
" url=" + finalURL)
|
||||
}
|
||||
}
|
||||
err = os.Rename(tempPath, dstPath)
|
||||
if err != nil {
|
||||
_ = os.Remove(tempPath)
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func joinRemoteURL(base string, rel string) string {
|
||||
var baseURL *url.URL
|
||||
var relURL *url.URL
|
||||
var cleanRel string
|
||||
var err error
|
||||
cleanRel = strings.ReplaceAll(rel, "\\", "/")
|
||||
baseURL, err = url.Parse(strings.TrimSpace(base))
|
||||
if err != nil || baseURL == nil {
|
||||
cleanRel = strings.TrimLeft(cleanRel, "/")
|
||||
return strings.TrimRight(base, "/") + "/" + cleanRel
|
||||
}
|
||||
// Treat base as a directory root for repository-relative href resolution.
|
||||
if !strings.HasSuffix(baseURL.Path, "/") {
|
||||
baseURL.Path = baseURL.Path + "/"
|
||||
}
|
||||
relURL, err = url.Parse(strings.TrimSpace(cleanRel))
|
||||
if err != nil || relURL == nil {
|
||||
cleanRel = strings.TrimLeft(cleanRel, "/")
|
||||
return strings.TrimRight(base, "/") + "/" + cleanRel
|
||||
}
|
||||
return baseURL.ResolveReference(relURL).String()
|
||||
}
|
||||
|
||||
func parseRepomdPrimaryHref(data []byte) (string, error) {
|
||||
var doc repomdDoc
|
||||
var i int
|
||||
var href string
|
||||
var err error
|
||||
err = xml.Unmarshal(data, &doc)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
for i = 0; i < len(doc.Data); i++ {
|
||||
if strings.EqualFold(strings.TrimSpace(doc.Data[i].Type), "primary") {
|
||||
href = strings.TrimSpace(doc.Data[i].Location.Href)
|
||||
if href != "" {
|
||||
return href, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return "", errors.New("primary metadata not found in repomd")
|
||||
}
|
||||
|
||||
func parsePrimaryPackages(data []byte) (map[string]mirrorChecksum, int, error) {
|
||||
var doc primaryDoc
|
||||
var out map[string]mirrorChecksum
|
||||
var i int
|
||||
var path string
|
||||
var checksum string
|
||||
var checksumType string
|
||||
var fileTime int64
|
||||
var buildTime int64
|
||||
var existing mirrorChecksum
|
||||
var ok bool
|
||||
var duplicates int
|
||||
var err error
|
||||
err = xml.Unmarshal(data, &doc)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
out = make(map[string]mirrorChecksum)
|
||||
for i = 0; i < len(doc.Packages); i++ {
|
||||
path = strings.TrimSpace(doc.Packages[i].Location.Href)
|
||||
if path == "" {
|
||||
continue
|
||||
}
|
||||
if !strings.HasSuffix(strings.ToLower(path), ".rpm") {
|
||||
continue
|
||||
}
|
||||
checksum = strings.TrimSpace(doc.Packages[i].Checksum.Value)
|
||||
checksumType = strings.TrimSpace(doc.Packages[i].Checksum.Type)
|
||||
fileTime = parseTimeAttr(doc.Packages[i].Time.File)
|
||||
buildTime = parseTimeAttr(doc.Packages[i].Time.Build)
|
||||
if existing, ok = out[path]; ok {
|
||||
duplicates = duplicates + 1
|
||||
if !shouldReplaceDuplicate(existing, buildTime, fileTime, checksum) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
out[path] = mirrorChecksum{
|
||||
Algo: normalizeChecksumAlgo(checksumType),
|
||||
Value: strings.ToLower(checksum),
|
||||
BuildTime: buildTime,
|
||||
FileTime: fileTime,
|
||||
}
|
||||
}
|
||||
return out, duplicates, nil
|
||||
}
|
||||
|
||||
func listLocalRPMs(root string) (map[string]bool, error) {
|
||||
var out map[string]bool
|
||||
var err error
|
||||
out = make(map[string]bool)
|
||||
err = filepath.WalkDir(root, func(path string, d fs.DirEntry, walkErr error) error {
|
||||
var rel string
|
||||
if walkErr != nil {
|
||||
return walkErr
|
||||
}
|
||||
if d == nil {
|
||||
return nil
|
||||
}
|
||||
if d.IsDir() {
|
||||
if strings.EqualFold(d.Name(), "repodata") {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if !strings.HasSuffix(strings.ToLower(d.Name()), ".rpm") {
|
||||
return nil
|
||||
}
|
||||
rel, err = filepath.Rel(root, path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
out[filepath.ToSlash(rel)] = true
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func sha256HexBytes(data []byte) string {
|
||||
var sum [32]byte
|
||||
sum = sha256.Sum256(data)
|
||||
return hex.EncodeToString(sum[:])
|
||||
}
|
||||
|
||||
func fileHexByAlgo(path string, algo string) (string, error) {
|
||||
var file *os.File
|
||||
var hash hashWriter
|
||||
var copied int64
|
||||
var err error
|
||||
file, err = os.Open(path)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer file.Close()
|
||||
hash, err = newHashWriter(algo)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
copied, err = io.Copy(hash, file)
|
||||
_ = copied
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return hash.Sum(), nil
|
||||
}
|
||||
|
||||
func gunzipBytes(data []byte) ([]byte, error) {
|
||||
var reader *gzip.Reader
|
||||
var input *bytes.Reader
|
||||
var out []byte
|
||||
var err error
|
||||
input = bytes.NewReader(data)
|
||||
reader, err = gzip.NewReader(input)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer reader.Close()
|
||||
out, err = io.ReadAll(reader)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
type hashWriter interface {
|
||||
io.Writer
|
||||
Sum() string
|
||||
}
|
||||
|
||||
type shaWriter struct {
|
||||
h hash.Hash
|
||||
}
|
||||
|
||||
func newHashWriter(algo string) (hashWriter, error) {
|
||||
var w *shaWriter
|
||||
var normalized string
|
||||
var h hash.Hash
|
||||
normalized = normalizeChecksumAlgo(algo)
|
||||
switch normalized {
|
||||
case "", "sha256":
|
||||
h = sha256.New()
|
||||
case "sha", "sha1":
|
||||
h = sha1.New()
|
||||
case "sha224":
|
||||
h = sha256.New224()
|
||||
case "sha384":
|
||||
h = sha512.New384()
|
||||
case "sha512":
|
||||
h = sha512.New()
|
||||
case "md5":
|
||||
h = md5.New()
|
||||
default:
|
||||
return nil, errors.New("unsupported checksum type: " + normalized)
|
||||
}
|
||||
w = &shaWriter{h: h}
|
||||
return w, nil
|
||||
}
|
||||
|
||||
func (w *shaWriter) Write(p []byte) (int, error) {
|
||||
return w.h.Write(p)
|
||||
}
|
||||
|
||||
func (w *shaWriter) Sum() string {
|
||||
var raw []byte
|
||||
raw = w.h.Sum(nil)
|
||||
return hex.EncodeToString(raw)
|
||||
}
|
||||
|
||||
func normalizeChecksumAlgo(algo string) string {
|
||||
var out string
|
||||
out = strings.ToLower(strings.TrimSpace(algo))
|
||||
out = strings.ReplaceAll(out, "-", "")
|
||||
out = strings.ReplaceAll(out, "_", "")
|
||||
if out == "sha1" {
|
||||
return "sha1"
|
||||
}
|
||||
if out == "sha" {
|
||||
return "sha"
|
||||
}
|
||||
if out == "sha224" {
|
||||
return "sha224"
|
||||
}
|
||||
if out == "sha256" {
|
||||
return "sha256"
|
||||
}
|
||||
if out == "sha384" {
|
||||
return "sha384"
|
||||
}
|
||||
if out == "sha512" {
|
||||
return "sha512"
|
||||
}
|
||||
if out == "md5" {
|
||||
return "md5"
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func int64ToString(v int64) string {
|
||||
return strconv.FormatInt(v, 10)
|
||||
}
|
||||
|
||||
func mirrorTaskKey(repoID string, path string) string {
|
||||
return repoID + "\x00" + path
|
||||
}
|
||||
|
||||
func ensureRepodata(task models.RPMMirrorTask, localRoot string, meta *MetaManager, logger *util.Logger) {
|
||||
var repomdPath string
|
||||
var statErr error
|
||||
repomdPath = filepath.Join(localRoot, "repodata", "repomd.xml")
|
||||
_, statErr = os.Stat(repomdPath)
|
||||
if statErr == nil {
|
||||
return
|
||||
}
|
||||
if logger != nil {
|
||||
logger.Write("rpm-mirror", util.LOG_INFO, "repodata schedule repo=%s path=%s reason=missing repomd=%s", task.RepoID, task.MirrorPath, repomdPath)
|
||||
}
|
||||
meta.Schedule(localRoot)
|
||||
}
|
||||
|
||||
func parseTimeAttr(value string) int64 {
|
||||
var trimmed string
|
||||
var parsed int64
|
||||
var err error
|
||||
trimmed = strings.TrimSpace(value)
|
||||
if trimmed == "" {
|
||||
return 0
|
||||
}
|
||||
parsed, err = strconv.ParseInt(trimmed, 10, 64)
|
||||
if err != nil {
|
||||
return 0
|
||||
}
|
||||
return parsed
|
||||
}
|
||||
|
||||
func shouldReplaceDuplicate(existing mirrorChecksum, newBuildTime int64, newFileTime int64, newChecksum string) bool {
|
||||
var existingChecksum string
|
||||
if newBuildTime > existing.BuildTime {
|
||||
return true
|
||||
}
|
||||
if newBuildTime < existing.BuildTime {
|
||||
return false
|
||||
}
|
||||
if newFileTime > existing.FileTime {
|
||||
return true
|
||||
}
|
||||
if newFileTime < existing.FileTime {
|
||||
return false
|
||||
}
|
||||
existingChecksum = strings.TrimSpace(existing.Value)
|
||||
if existingChecksum == "" && strings.TrimSpace(newChecksum) != "" {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
@@ -1,15 +1,11 @@
|
||||
package rpm
|
||||
|
||||
import "bufio"
|
||||
import "bytes"
|
||||
import "errors"
|
||||
import "io/fs"
|
||||
import "os/exec"
|
||||
import "path/filepath"
|
||||
import "sort"
|
||||
import "strconv"
|
||||
import "strings"
|
||||
import "sync"
|
||||
|
||||
import repokit "repokit"
|
||||
|
||||
type PackageSummary struct {
|
||||
Filename string `json:"filename"`
|
||||
@@ -30,35 +26,23 @@ type PackageDetail struct {
|
||||
Files []string `json:"files"`
|
||||
Requires []string `json:"requires"`
|
||||
Provides []string `json:"provides"`
|
||||
Changelogs []PackageChangeLog `json:"changelogs"`
|
||||
}
|
||||
|
||||
var rpmPath string
|
||||
var rpmOnce sync.Once
|
||||
var rpmErr error
|
||||
|
||||
func ensureRPM() error {
|
||||
rpmOnce.Do(func() {
|
||||
var path string
|
||||
path, rpmErr = exec.LookPath("rpm")
|
||||
if rpmErr != nil {
|
||||
return
|
||||
}
|
||||
rpmPath = path
|
||||
})
|
||||
return rpmErr
|
||||
type PackageChangeLog struct {
|
||||
Author string `json:"author"`
|
||||
Date int64 `json:"date"`
|
||||
Text string `json:"text"`
|
||||
}
|
||||
|
||||
func ListPackages(repoPath string) ([]PackageSummary, error) {
|
||||
var err error
|
||||
var packages []PackageSummary
|
||||
var walkErr error
|
||||
err = ensureRPM()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var err error
|
||||
walkErr = filepath.WalkDir(repoPath, func(path string, entry fs.DirEntry, entryErr error) error {
|
||||
var lower string
|
||||
var rel string
|
||||
var pkg *repokit.RpmPackage
|
||||
var summary PackageSummary
|
||||
if entryErr != nil {
|
||||
return entryErr
|
||||
@@ -74,11 +58,11 @@ func ListPackages(repoPath string) ([]PackageSummary, error) {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
summary, err = querySummary(path)
|
||||
pkg, err = repokit.RpmPackageFromRpmBase(path, 0)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
summary.Filename = filepath.ToSlash(rel)
|
||||
summary = packageSummaryFromRepokit(pkg, filepath.ToSlash(rel))
|
||||
packages = append(packages, summary)
|
||||
return nil
|
||||
})
|
||||
@@ -86,137 +70,135 @@ func ListPackages(repoPath string) ([]PackageSummary, error) {
|
||||
return nil, walkErr
|
||||
}
|
||||
sort.Slice(packages, func(i int, j int) bool {
|
||||
if packages[i].Name == packages[j].Name {
|
||||
return packages[i].Filename < packages[j].Filename
|
||||
}
|
||||
return packages[i].Name < packages[j].Name
|
||||
})
|
||||
return packages, nil
|
||||
}
|
||||
|
||||
func GetPackageDetail(repoPath string, filename string) (PackageDetail, error) {
|
||||
var err error
|
||||
var detail PackageDetail
|
||||
var fullPath string
|
||||
var data []string
|
||||
var fileList []string
|
||||
var requires []string
|
||||
var provides []string
|
||||
var buildTime int64
|
||||
var size int64
|
||||
err = ensureRPM()
|
||||
if err != nil {
|
||||
return detail, err
|
||||
}
|
||||
var pkg *repokit.RpmPackage
|
||||
var err error
|
||||
fullPath = filepath.Join(repoPath, filepath.FromSlash(filename))
|
||||
data, err = queryFields(fullPath, "%{NAME}\n%{VERSION}\n%{RELEASE}\n%{ARCH}\n%{SUMMARY}\n%{DESCRIPTION}\n%{LICENSE}\n%{URL}\n%{BUILDTIME}\n%{SIZE}\n")
|
||||
pkg, err = repokit.RpmPackageFromRpmBase(fullPath, 256)
|
||||
if err != nil {
|
||||
return detail, err
|
||||
}
|
||||
if len(data) < 10 {
|
||||
return detail, errors.New("rpm query returned incomplete metadata")
|
||||
}
|
||||
buildTime, _ = strconv.ParseInt(strings.TrimSpace(data[8]), 10, 64)
|
||||
size, _ = strconv.ParseInt(strings.TrimSpace(data[9]), 10, 64)
|
||||
fileList, _ = queryList(fullPath)
|
||||
requires, _ = queryLines(fullPath, "--requires")
|
||||
provides, _ = queryLines(fullPath, "--provides")
|
||||
detail = PackageDetail{
|
||||
PackageSummary: PackageSummary{
|
||||
Filename: filename,
|
||||
Name: strings.TrimSpace(data[0]),
|
||||
Version: strings.TrimSpace(data[1]),
|
||||
Release: strings.TrimSpace(data[2]),
|
||||
Arch: strings.TrimSpace(data[3]),
|
||||
Summary: strings.TrimSpace(data[4]),
|
||||
},
|
||||
Description: strings.TrimSpace(data[5]),
|
||||
License: strings.TrimSpace(data[6]),
|
||||
URL: strings.TrimSpace(data[7]),
|
||||
BuildTime: buildTime,
|
||||
Size: size,
|
||||
Files: fileList,
|
||||
Requires: requires,
|
||||
Provides: provides,
|
||||
}
|
||||
detail = packageDetailFromRepokit(pkg, filename)
|
||||
return detail, nil
|
||||
}
|
||||
|
||||
func querySummary(path string) (PackageSummary, error) {
|
||||
var fields []string
|
||||
var err error
|
||||
func packageSummaryFromRepokit(pkg *repokit.RpmPackage, filename string) PackageSummary {
|
||||
var summary PackageSummary
|
||||
fields, err = queryFields(path, "%{NAME}\n%{VERSION}\n%{RELEASE}\n%{ARCH}\n%{SUMMARY}\n")
|
||||
if err != nil {
|
||||
return summary, err
|
||||
summary = PackageSummary{
|
||||
Filename: filename,
|
||||
Name: strings.TrimSpace(pkg.Name),
|
||||
Version: strings.TrimSpace(pkg.Version),
|
||||
Release: strings.TrimSpace(pkg.Release),
|
||||
Arch: strings.TrimSpace(pkg.Arch),
|
||||
Summary: strings.TrimSpace(pkg.Summary),
|
||||
}
|
||||
if len(fields) < 5 {
|
||||
return summary, errors.New("rpm query returned incomplete metadata")
|
||||
}
|
||||
summary.Name = strings.TrimSpace(fields[0])
|
||||
summary.Version = strings.TrimSpace(fields[1])
|
||||
summary.Release = strings.TrimSpace(fields[2])
|
||||
summary.Arch = strings.TrimSpace(fields[3])
|
||||
summary.Summary = strings.TrimSpace(fields[4])
|
||||
return summary, nil
|
||||
return summary
|
||||
}
|
||||
|
||||
func queryFields(path string, format string) ([]string, error) {
|
||||
var output []byte
|
||||
var err error
|
||||
var list []string
|
||||
output, err = runRPM(path, "-qp", "--qf", format)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
list = strings.Split(strings.TrimSuffix(string(output), "\n"), "\n")
|
||||
return list, nil
|
||||
}
|
||||
|
||||
func queryList(path string) ([]string, error) {
|
||||
var output []byte
|
||||
var err error
|
||||
var scanner *bufio.Scanner
|
||||
func packageDetailFromRepokit(pkg *repokit.RpmPackage, filename string) PackageDetail {
|
||||
var files []string
|
||||
output, err = runRPM(path, "-qlp")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
var file repokit.RpmPackageFile
|
||||
var changelogs []PackageChangeLog
|
||||
var changelog repokit.RpmChangelogEntry
|
||||
var detail PackageDetail
|
||||
files = make([]string, 0, len(pkg.Files))
|
||||
for _, file = range pkg.Files {
|
||||
if file.FullPath == "" {
|
||||
continue
|
||||
}
|
||||
files = append(files, file.FullPath)
|
||||
}
|
||||
scanner = bufio.NewScanner(bytes.NewReader(output))
|
||||
for scanner.Scan() {
|
||||
files = append(files, scanner.Text())
|
||||
changelogs = make([]PackageChangeLog, 0, len(pkg.Changelogs))
|
||||
for _, changelog = range pkg.Changelogs {
|
||||
changelogs = append(changelogs, PackageChangeLog{
|
||||
Author: strings.TrimSpace(changelog.Author),
|
||||
Date: changelog.Date,
|
||||
Text: strings.TrimSpace(changelog.Changelog),
|
||||
})
|
||||
}
|
||||
return files, nil
|
||||
sort.SliceStable(changelogs, func(i int, j int) bool {
|
||||
return changelogs[i].Date > changelogs[j].Date
|
||||
})
|
||||
sort.Strings(files)
|
||||
detail = PackageDetail{
|
||||
PackageSummary: packageSummaryFromRepokit(pkg, filename),
|
||||
Description: strings.TrimSpace(pkg.Description),
|
||||
License: strings.TrimSpace(pkg.RpmLicense),
|
||||
URL: strings.TrimSpace(pkg.Url),
|
||||
BuildTime: pkg.TimeBuild,
|
||||
Size: pkg.SizePackage,
|
||||
Files: files,
|
||||
Requires: dependencyListToStrings(pkg.Requires),
|
||||
Provides: dependencyListToStrings(pkg.Provides),
|
||||
Changelogs: changelogs,
|
||||
}
|
||||
return detail
|
||||
}
|
||||
|
||||
func queryLines(path string, flag string) ([]string, error) {
|
||||
var output []byte
|
||||
var err error
|
||||
var scanner *bufio.Scanner
|
||||
func dependencyListToStrings(deps []repokit.RpmDependency) []string {
|
||||
var lines []string
|
||||
output, err = runRPM(path, "-qp", flag)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
var dep repokit.RpmDependency
|
||||
lines = make([]string, 0, len(deps))
|
||||
for _, dep = range deps {
|
||||
lines = append(lines, dependencyToString(dep))
|
||||
}
|
||||
scanner = bufio.NewScanner(bytes.NewReader(output))
|
||||
for scanner.Scan() {
|
||||
lines = append(lines, scanner.Text())
|
||||
}
|
||||
return lines, nil
|
||||
return lines
|
||||
}
|
||||
|
||||
func runRPM(path string, args ...string) ([]byte, error) {
|
||||
var err error
|
||||
var cmd *exec.Cmd
|
||||
var output []byte
|
||||
var full []string
|
||||
err = ensureRPM()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
func dependencyToString(dep repokit.RpmDependency) string {
|
||||
var op string
|
||||
var evr string
|
||||
var line string
|
||||
op = normalizeDependencyOp(dep.Flags)
|
||||
evr = dependencyEVR(dep)
|
||||
line = dep.Name
|
||||
if op == "" || evr == "" {
|
||||
return line
|
||||
}
|
||||
full = append([]string{}, args...)
|
||||
full = append(full, path)
|
||||
cmd = exec.Command(rpmPath, full...)
|
||||
output, err = cmd.Output()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return output, nil
|
||||
line = line + " " + op + " " + evr
|
||||
return line
|
||||
}
|
||||
|
||||
func normalizeDependencyOp(flag string) string {
|
||||
switch strings.ToUpper(strings.TrimSpace(flag)) {
|
||||
case "LT":
|
||||
return "<"
|
||||
case "GT":
|
||||
return ">"
|
||||
case "EQ":
|
||||
return "="
|
||||
case "LE":
|
||||
return "<="
|
||||
case "GE":
|
||||
return ">="
|
||||
default:
|
||||
return strings.TrimSpace(flag)
|
||||
}
|
||||
}
|
||||
|
||||
func dependencyEVR(dep repokit.RpmDependency) string {
|
||||
var value string
|
||||
var version string
|
||||
version = strings.TrimSpace(dep.Version)
|
||||
if version == "" {
|
||||
return ""
|
||||
}
|
||||
value = version
|
||||
if strings.TrimSpace(dep.Release) != "" {
|
||||
value = value + "-" + strings.TrimSpace(dep.Release)
|
||||
}
|
||||
if strings.TrimSpace(dep.Epoch) != "" && strings.TrimSpace(dep.Epoch) != "0" {
|
||||
value = strings.TrimSpace(dep.Epoch) + ":" + value
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
@@ -8,10 +8,12 @@ import "sort"
|
||||
import "strings"
|
||||
|
||||
type TreeEntry struct {
|
||||
Name string `json:"name"`
|
||||
Path string `json:"path"`
|
||||
Type string `json:"type"`
|
||||
Size int64 `json:"size"`
|
||||
Name string `json:"name"`
|
||||
Path string `json:"path"`
|
||||
Type string `json:"type"`
|
||||
Size int64 `json:"size"`
|
||||
IsRepoDir bool `json:"is_repo_dir"`
|
||||
RepoMode string `json:"repo_mode"`
|
||||
}
|
||||
|
||||
var ErrPathNotFound = errors.New("path not found")
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
PRAGMA foreign_keys = ON;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id TEXT PRIMARY KEY,
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
public_id TEXT NOT NULL UNIQUE,
|
||||
username TEXT NOT NULL UNIQUE,
|
||||
display_name TEXT NOT NULL,
|
||||
email TEXT NOT NULL,
|
||||
@@ -9,12 +10,13 @@ CREATE TABLE IF NOT EXISTS users (
|
||||
is_admin INTEGER NOT NULL DEFAULT 0,
|
||||
auth_source TEXT NOT NULL DEFAULT 'db',
|
||||
created_at TIMESTAMP NOT NULL,
|
||||
updated_at TIMESTAMP NOT NULL
|
||||
updated_at TIMESTAMP NOT NULL,
|
||||
disabled INTEGER NOT NULL DEFAULT 0
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS sessions (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
user_id INTEGER NOT NULL,
|
||||
token TEXT NOT NULL UNIQUE,
|
||||
expires_at TIMESTAMP NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL,
|
||||
@@ -22,19 +24,25 @@ CREATE TABLE IF NOT EXISTS sessions (
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS projects (
|
||||
id TEXT PRIMARY KEY,
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
public_id TEXT NOT NULL UNIQUE,
|
||||
slug TEXT NOT NULL UNIQUE,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT NOT NULL,
|
||||
created_by TEXT NOT NULL,
|
||||
home_page TEXT NOT NULL DEFAULT 'info',
|
||||
created_by INTEGER NOT NULL,
|
||||
updated_by INTEGER NOT NULL DEFAULT 0,
|
||||
created_at TIMESTAMP NOT NULL,
|
||||
updated_at TIMESTAMP NOT NULL,
|
||||
created_at_unix INTEGER NOT NULL DEFAULT 0,
|
||||
updated_at_unix INTEGER NOT NULL DEFAULT 0,
|
||||
FOREIGN KEY (created_by) REFERENCES users(id)
|
||||
FOREIGN KEY (updated_by) REFERENCES users(id)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS project_members (
|
||||
project_id TEXT NOT NULL,
|
||||
user_id TEXT NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
user_id INTEGER NOT NULL,
|
||||
role TEXT NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL,
|
||||
PRIMARY KEY (project_id, user_id),
|
||||
@@ -43,24 +51,27 @@ CREATE TABLE IF NOT EXISTS project_members (
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS repos (
|
||||
id TEXT PRIMARY KEY,
|
||||
project_id TEXT NOT NULL,
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
public_id TEXT NOT NULL UNIQUE,
|
||||
project_id INTEGER NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
type TEXT NOT NULL DEFAULT 'git',
|
||||
path TEXT NOT NULL,
|
||||
created_by TEXT NOT NULL,
|
||||
created_by INTEGER NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL,
|
||||
FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (created_by) REFERENCES users(id)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS issues (
|
||||
id TEXT PRIMARY KEY,
|
||||
project_id TEXT NOT NULL,
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
public_id TEXT NOT NULL UNIQUE,
|
||||
project_id INTEGER NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
body TEXT NOT NULL,
|
||||
status TEXT NOT NULL,
|
||||
created_by TEXT NOT NULL,
|
||||
assignee_id TEXT,
|
||||
created_by INTEGER NOT NULL,
|
||||
assignee_id INTEGER,
|
||||
created_at TIMESTAMP NOT NULL,
|
||||
updated_at TIMESTAMP NOT NULL,
|
||||
FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE,
|
||||
@@ -69,22 +80,24 @@ CREATE TABLE IF NOT EXISTS issues (
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS issue_comments (
|
||||
id TEXT PRIMARY KEY,
|
||||
issue_id TEXT NOT NULL,
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
public_id TEXT NOT NULL UNIQUE,
|
||||
issue_id INTEGER NOT NULL,
|
||||
body TEXT NOT NULL,
|
||||
created_by TEXT NOT NULL,
|
||||
created_by INTEGER NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL,
|
||||
FOREIGN KEY (issue_id) REFERENCES issues(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (created_by) REFERENCES users(id)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS wiki_pages (
|
||||
id TEXT PRIMARY KEY,
|
||||
project_id TEXT NOT NULL,
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
public_id TEXT NOT NULL UNIQUE,
|
||||
project_id INTEGER NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
slug TEXT NOT NULL,
|
||||
body TEXT NOT NULL,
|
||||
created_by TEXT NOT NULL,
|
||||
created_by INTEGER NOT NULL,
|
||||
updated_at TIMESTAMP NOT NULL,
|
||||
UNIQUE (project_id, slug),
|
||||
FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE,
|
||||
@@ -92,20 +105,203 @@ CREATE TABLE IF NOT EXISTS wiki_pages (
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS uploads (
|
||||
id TEXT PRIMARY KEY,
|
||||
project_id TEXT NOT NULL,
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
public_id TEXT NOT NULL UNIQUE,
|
||||
project_id INTEGER NOT NULL,
|
||||
filename TEXT NOT NULL,
|
||||
content_type TEXT NOT NULL,
|
||||
size INTEGER NOT NULL,
|
||||
storage_path TEXT NOT NULL,
|
||||
created_by TEXT NOT NULL,
|
||||
created_by INTEGER NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL,
|
||||
FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (created_by) REFERENCES users(id)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS project_repos (
|
||||
project_id INTEGER NOT NULL,
|
||||
repo_id INTEGER NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL,
|
||||
PRIMARY KEY (project_id, repo_id),
|
||||
FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (repo_id) REFERENCES repos(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS app_settings (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL,
|
||||
updated_at INTEGER NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS api_keys (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
public_id TEXT NOT NULL UNIQUE,
|
||||
user_id INTEGER NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
token_hash TEXT NOT NULL,
|
||||
token_prefix TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
last_used_at INTEGER NOT NULL DEFAULT 0,
|
||||
expires_at INTEGER NOT NULL DEFAULT 0,
|
||||
disabled INTEGER NOT NULL DEFAULT 0,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS pki_cas (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
public_id TEXT NOT NULL UNIQUE,
|
||||
name TEXT NOT NULL UNIQUE,
|
||||
parent_ca_id INTEGER,
|
||||
is_root INTEGER NOT NULL DEFAULT 0,
|
||||
cert_pem TEXT NOT NULL,
|
||||
key_pem TEXT NOT NULL,
|
||||
serial_counter INTEGER NOT NULL DEFAULT 1,
|
||||
status TEXT NOT NULL DEFAULT 'active',
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
FOREIGN KEY(parent_ca_id) REFERENCES pki_cas(id)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS pki_certs (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
public_id TEXT NOT NULL UNIQUE,
|
||||
ca_id INTEGER,
|
||||
serial_hex TEXT NOT NULL,
|
||||
common_name TEXT NOT NULL,
|
||||
san_dns TEXT NOT NULL DEFAULT '',
|
||||
san_ips TEXT NOT NULL DEFAULT '',
|
||||
is_ca INTEGER NOT NULL DEFAULT 0,
|
||||
cert_pem TEXT NOT NULL,
|
||||
key_pem TEXT NOT NULL,
|
||||
not_before INTEGER NOT NULL,
|
||||
not_after INTEGER NOT NULL,
|
||||
status TEXT NOT NULL DEFAULT 'active',
|
||||
revoked_at INTEGER NOT NULL DEFAULT 0,
|
||||
revocation_reason TEXT NOT NULL DEFAULT '',
|
||||
created_at INTEGER NOT NULL,
|
||||
FOREIGN KEY(ca_id) REFERENCES pki_cas(id) ON DELETE CASCADE,
|
||||
UNIQUE(ca_id, serial_hex)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS tls_listeners (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
public_id TEXT NOT NULL UNIQUE,
|
||||
name TEXT NOT NULL UNIQUE,
|
||||
enabled INTEGER NOT NULL DEFAULT 1,
|
||||
http_addrs TEXT NOT NULL DEFAULT '',
|
||||
https_addrs TEXT NOT NULL DEFAULT '',
|
||||
tls_server_cert_source TEXT NOT NULL DEFAULT 'files',
|
||||
tls_cert_file TEXT NOT NULL DEFAULT '',
|
||||
tls_key_file TEXT NOT NULL DEFAULT '',
|
||||
tls_pki_server_cert_id TEXT NOT NULL DEFAULT '',
|
||||
tls_client_auth TEXT NOT NULL DEFAULT 'none',
|
||||
tls_client_ca_file TEXT NOT NULL DEFAULT '',
|
||||
tls_pki_client_ca_id TEXT NOT NULL DEFAULT '',
|
||||
tls_min_version TEXT NOT NULL DEFAULT '1.2',
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
auth_policy TEXT NOT NULL DEFAULT 'default',
|
||||
apply_policy_api INTEGER NOT NULL DEFAULT 0,
|
||||
apply_policy_git INTEGER NOT NULL DEFAULT 0,
|
||||
apply_policy_rpm INTEGER NOT NULL DEFAULT 0,
|
||||
apply_policy_v2 INTEGER NOT NULL DEFAULT 0,
|
||||
client_cert_allowlist TEXT NOT NULL DEFAULT ''
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS service_principals (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
public_id TEXT NOT NULL UNIQUE,
|
||||
name TEXT NOT NULL UNIQUE,
|
||||
description TEXT NOT NULL DEFAULT '',
|
||||
disabled INTEGER NOT NULL DEFAULT 0,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
is_admin INTEGER NOT NULL DEFAULT 0
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS cert_principal_bindings (
|
||||
fingerprint TEXT PRIMARY KEY,
|
||||
principal_id INTEGER NOT NULL,
|
||||
enabled INTEGER NOT NULL DEFAULT 1,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
FOREIGN KEY (principal_id) REFERENCES service_principals(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS principal_project_roles (
|
||||
principal_id INTEGER NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
role TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
PRIMARY KEY (principal_id, project_id),
|
||||
FOREIGN KEY (principal_id) REFERENCES service_principals(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS rpm_repo_dirs (
|
||||
repo_id INTEGER NOT NULL,
|
||||
path TEXT NOT NULL,
|
||||
mode TEXT NOT NULL DEFAULT 'local',
|
||||
allow_delete INTEGER NOT NULL DEFAULT 0,
|
||||
remote_url TEXT NOT NULL DEFAULT '',
|
||||
connect_host TEXT NOT NULL DEFAULT '',
|
||||
host_header TEXT NOT NULL DEFAULT '',
|
||||
tls_server_name TEXT NOT NULL DEFAULT '',
|
||||
tls_insecure_skip_verify INTEGER NOT NULL DEFAULT 0,
|
||||
sync_interval_sec INTEGER NOT NULL DEFAULT 300,
|
||||
sync_enabled INTEGER NOT NULL DEFAULT 1,
|
||||
dirty INTEGER NOT NULL DEFAULT 1,
|
||||
next_sync_at INTEGER NOT NULL DEFAULT 0,
|
||||
sync_running INTEGER NOT NULL DEFAULT 0,
|
||||
sync_status TEXT NOT NULL DEFAULT 'idle',
|
||||
sync_error TEXT NOT NULL DEFAULT '',
|
||||
sync_step TEXT NOT NULL DEFAULT '',
|
||||
sync_total INTEGER NOT NULL DEFAULT 0,
|
||||
sync_done INTEGER NOT NULL DEFAULT 0,
|
||||
sync_failed INTEGER NOT NULL DEFAULT 0,
|
||||
sync_deleted INTEGER NOT NULL DEFAULT 0,
|
||||
last_sync_started_at INTEGER NOT NULL DEFAULT 0,
|
||||
last_sync_finished_at INTEGER NOT NULL DEFAULT 0,
|
||||
last_sync_success_at INTEGER NOT NULL DEFAULT 0,
|
||||
last_synced_revision TEXT NOT NULL DEFAULT '',
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
PRIMARY KEY (repo_id, path),
|
||||
FOREIGN KEY (repo_id) REFERENCES repos(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS rpm_mirror_runs (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
public_id TEXT NOT NULL UNIQUE,
|
||||
repo_id INTEGER NOT NULL,
|
||||
path TEXT NOT NULL,
|
||||
started_at INTEGER NOT NULL,
|
||||
finished_at INTEGER NOT NULL DEFAULT 0,
|
||||
status TEXT NOT NULL DEFAULT 'running',
|
||||
step TEXT NOT NULL DEFAULT '',
|
||||
total INTEGER NOT NULL DEFAULT 0,
|
||||
done INTEGER NOT NULL DEFAULT 0,
|
||||
failed INTEGER NOT NULL DEFAULT 0,
|
||||
deleted INTEGER NOT NULL DEFAULT 0,
|
||||
revision TEXT NOT NULL DEFAULT '',
|
||||
error TEXT NOT NULL DEFAULT '',
|
||||
FOREIGN KEY(repo_id, path) REFERENCES rpm_repo_dirs(repo_id, path) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_users_disabled ON users(disabled);
|
||||
CREATE INDEX IF NOT EXISTS idx_projects_name ON projects(name);
|
||||
CREATE INDEX IF NOT EXISTS idx_repos_project ON repos(project_id);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_repos_project_name_type ON repos(project_id, name, type);
|
||||
CREATE INDEX IF NOT EXISTS idx_issues_project ON issues(project_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_wiki_project ON wiki_pages(project_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_uploads_project ON uploads(project_id);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_api_keys_hash ON api_keys(token_hash);
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_user_id ON api_keys(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_expires_at ON api_keys(expires_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_disabled ON api_keys(disabled);
|
||||
CREATE INDEX IF NOT EXISTS idx_project_repos_project ON project_repos(project_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_project_repos_repo ON project_repos(repo_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_pki_cas_parent ON pki_cas(parent_ca_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_pki_certs_ca ON pki_certs(ca_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_cert_principal_bindings_principal_id ON cert_principal_bindings(principal_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_principal_project_roles_project_id ON principal_project_roles(project_id);
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
PRAGMA foreign_keys = ON;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS project_repos (
|
||||
project_id TEXT NOT NULL,
|
||||
repo_id TEXT NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL,
|
||||
PRIMARY KEY (project_id, repo_id),
|
||||
FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (repo_id) REFERENCES repos(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_project_repos_project ON project_repos(project_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_project_repos_repo ON project_repos(repo_id);
|
||||
@@ -1,10 +0,0 @@
|
||||
ALTER TABLE projects ADD COLUMN updated_by TEXT NOT NULL DEFAULT '';
|
||||
ALTER TABLE projects ADD COLUMN created_at_unix INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE projects ADD COLUMN updated_at_unix INTEGER NOT NULL DEFAULT 0;
|
||||
|
||||
UPDATE projects
|
||||
SET
|
||||
updated_by = created_by,
|
||||
created_at_unix = COALESCE(CAST(strftime('%s', created_at) AS INTEGER), 0),
|
||||
updated_at_unix = COALESCE(CAST(strftime('%s', updated_at) AS INTEGER), 0)
|
||||
WHERE created_at_unix = 0 OR updated_at_unix = 0 OR updated_by = '';
|
||||
@@ -1,3 +0,0 @@
|
||||
PRAGMA foreign_keys = ON;
|
||||
|
||||
ALTER TABLE repos ADD COLUMN type TEXT NOT NULL DEFAULT 'git';
|
||||
@@ -1,3 +0,0 @@
|
||||
PRAGMA foreign_keys = ON;
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_repos_project_name_type ON repos(project_id, name, type);
|
||||
@@ -1,13 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS api_keys (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
token_hash TEXT NOT NULL,
|
||||
token_prefix TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
last_used_at INTEGER NOT NULL DEFAULT 0,
|
||||
FOREIGN KEY(user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_api_keys_hash ON api_keys(token_hash);
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_user_id ON api_keys(user_id);
|
||||
@@ -1 +0,0 @@
|
||||
ALTER TABLE projects ADD COLUMN home_page TEXT NOT NULL DEFAULT 'info';
|
||||
@@ -1,3 +0,0 @@
|
||||
ALTER TABLE api_keys ADD COLUMN expires_at INTEGER NOT NULL DEFAULT 0;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_expires_at ON api_keys(expires_at);
|
||||
@@ -1,5 +0,0 @@
|
||||
ALTER TABLE users ADD COLUMN disabled INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE api_keys ADD COLUMN disabled INTEGER NOT NULL DEFAULT 0;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_users_disabled ON users(disabled);
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_disabled ON api_keys(disabled);
|
||||
@@ -1,5 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS app_settings (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL,
|
||||
updated_at INTEGER NOT NULL
|
||||
);
|
||||
@@ -65,6 +65,7 @@ export interface RpmPackageDetail extends RpmPackageSummary {
|
||||
files: string[]
|
||||
requires: string[]
|
||||
provides: string[]
|
||||
changelogs: { author: string; date: number; text: string }[]
|
||||
}
|
||||
|
||||
export interface DockerTagInfo {
|
||||
@@ -96,6 +97,54 @@ export interface RpmTreeEntry {
|
||||
path: string
|
||||
type: 'file' | 'dir'
|
||||
size: number
|
||||
is_repo_dir?: boolean
|
||||
repo_mode?: 'local' | 'mirror' | ''
|
||||
}
|
||||
|
||||
export interface RpmRepoDirConfig {
|
||||
repo_id: string
|
||||
path: string
|
||||
mode: 'local' | 'mirror' | ''
|
||||
allow_delete: boolean
|
||||
remote_url: string
|
||||
connect_host: string
|
||||
host_header: string
|
||||
tls_server_name: string
|
||||
tls_insecure_skip_verify: boolean
|
||||
sync_interval_sec: number
|
||||
sync_enabled: boolean
|
||||
dirty: boolean
|
||||
next_sync_at: number
|
||||
sync_running: boolean
|
||||
sync_status: string
|
||||
sync_error: string
|
||||
sync_step: string
|
||||
sync_total: number
|
||||
sync_done: number
|
||||
sync_failed: number
|
||||
sync_deleted: number
|
||||
last_sync_started_at: number
|
||||
last_sync_finished_at: number
|
||||
last_sync_success_at: number
|
||||
last_synced_revision: string
|
||||
created_at: number
|
||||
updated_at: number
|
||||
}
|
||||
|
||||
export interface RpmMirrorRun {
|
||||
id: string
|
||||
repo_id: string
|
||||
path: string
|
||||
started_at: number
|
||||
finished_at: number
|
||||
status: string
|
||||
step: string
|
||||
total: number
|
||||
done: number
|
||||
failed: number
|
||||
deleted: number
|
||||
revision: string
|
||||
error: string
|
||||
}
|
||||
|
||||
export interface RepoTypeItem {
|
||||
@@ -193,6 +242,31 @@ export interface AdminAPIKey extends APIKey {
|
||||
email: string
|
||||
}
|
||||
|
||||
export interface ServicePrincipal {
|
||||
id: string
|
||||
name: string
|
||||
description: string
|
||||
is_admin: boolean
|
||||
disabled: boolean
|
||||
created_at: number
|
||||
updated_at: number
|
||||
}
|
||||
|
||||
export interface CertPrincipalBinding {
|
||||
fingerprint: string
|
||||
principal_id: string
|
||||
enabled: boolean
|
||||
created_at: number
|
||||
updated_at: number
|
||||
}
|
||||
|
||||
export interface PrincipalProjectRole {
|
||||
principal_id: string
|
||||
project_id: string
|
||||
role: 'viewer' | 'writer' | 'admin'
|
||||
created_at: number
|
||||
}
|
||||
|
||||
export interface AuthSettings {
|
||||
auth_mode: 'db' | 'ldap' | 'hybrid'
|
||||
oidc_enabled: boolean
|
||||
@@ -212,12 +286,87 @@ export interface AuthSettings {
|
||||
oidc_tls_insecure_skip_verify: boolean
|
||||
}
|
||||
|
||||
export interface TLSSettings {
|
||||
http_addrs: string[]
|
||||
https_addrs: string[]
|
||||
tls_server_cert_source: 'pki'
|
||||
tls_cert_file: string
|
||||
tls_key_file: string
|
||||
tls_pki_server_cert_id: string
|
||||
tls_client_auth: 'none' | 'request' | 'require' | 'verify_if_given' | 'require_and_verify'
|
||||
tls_client_ca_file: string
|
||||
tls_pki_client_ca_id: string
|
||||
tls_min_version: '1.0' | '1.1' | '1.2' | '1.3'
|
||||
}
|
||||
|
||||
export interface TLSListener {
|
||||
id: string
|
||||
name: string
|
||||
enabled: boolean
|
||||
http_addrs: string[]
|
||||
https_addrs: string[]
|
||||
auth_policy: 'default' | 'read_open_write_cert' | 'read_open_write_cert_or_auth' | 'cert_only' | 'read_only_public'
|
||||
apply_policy_api: boolean
|
||||
apply_policy_git: boolean
|
||||
apply_policy_rpm: boolean
|
||||
apply_policy_v2: boolean
|
||||
client_cert_allowlist: string[]
|
||||
tls_server_cert_source: 'pki'
|
||||
tls_cert_file: string
|
||||
tls_key_file: string
|
||||
tls_pki_server_cert_id: string
|
||||
tls_client_auth: 'none' | 'request' | 'require' | 'verify_if_given' | 'require_and_verify'
|
||||
tls_client_ca_file: string
|
||||
tls_pki_client_ca_id: string
|
||||
tls_min_version: '1.0' | '1.1' | '1.2' | '1.3'
|
||||
created_at: number
|
||||
updated_at: number
|
||||
}
|
||||
|
||||
export interface OIDCStatus {
|
||||
enabled: boolean
|
||||
configured?: boolean
|
||||
auth_mode?: string
|
||||
}
|
||||
|
||||
export interface PKICA {
|
||||
id: string
|
||||
name: string
|
||||
parent_ca_id: string
|
||||
is_root: boolean
|
||||
status: string
|
||||
created_at: number
|
||||
updated_at: number
|
||||
}
|
||||
|
||||
export interface PKICADetail extends PKICA {
|
||||
cert_pem: string
|
||||
key_pem: string
|
||||
serial_counter: number
|
||||
}
|
||||
|
||||
export interface PKICert {
|
||||
id: string
|
||||
ca_id: string
|
||||
serial_hex: string
|
||||
common_name: string
|
||||
fingerprint?: string
|
||||
san_dns: string
|
||||
san_ips: string
|
||||
is_ca: boolean
|
||||
not_before: number
|
||||
not_after: number
|
||||
status: string
|
||||
revoked_at: number
|
||||
revocation_reason: string
|
||||
created_at: number
|
||||
}
|
||||
|
||||
export interface PKICertDetail extends PKICert {
|
||||
cert_pem: string
|
||||
key_pem: string
|
||||
}
|
||||
|
||||
async function request<T>(path: string, options: RequestInit = {}): Promise<T> {
|
||||
const res = await fetch(path, {
|
||||
credentials: 'include',
|
||||
@@ -287,6 +436,22 @@ export const api = {
|
||||
deleteAdminAPIKey: (id: string) => request<void>(`/api/admin/api-keys/${id}`, { method: 'DELETE' }),
|
||||
disableAdminAPIKey: (id: string) => request<void>(`/api/admin/api-keys/${id}/disable`, { method: 'POST' }),
|
||||
enableAdminAPIKey: (id: string) => request<void>(`/api/admin/api-keys/${id}/enable`, { method: 'POST' }),
|
||||
listServicePrincipals: () => request<ServicePrincipal[]>('/api/admin/service-principals'),
|
||||
createServicePrincipal: (payload: { name: string; description?: string; is_admin?: boolean; disabled?: boolean }) =>
|
||||
request<ServicePrincipal>('/api/admin/service-principals', { method: 'POST', body: JSON.stringify(payload) }),
|
||||
updateServicePrincipal: (id: string, payload: { name: string; description?: string; is_admin: boolean; disabled: boolean }) =>
|
||||
request<ServicePrincipal>(`/api/admin/service-principals/${id}`, { method: 'PATCH', body: JSON.stringify(payload) }),
|
||||
deleteServicePrincipal: (id: string) => request<void>(`/api/admin/service-principals/${id}`, { method: 'DELETE' }),
|
||||
listPrincipalProjectRoles: (principalID: string) => request<PrincipalProjectRole[]>(`/api/admin/service-principals/${principalID}/roles`),
|
||||
upsertPrincipalProjectRole: (principalID: string, payload: { project_id: string; role: 'viewer' | 'writer' | 'admin' }) =>
|
||||
request<PrincipalProjectRole>(`/api/admin/service-principals/${principalID}/roles`, { method: 'POST', body: JSON.stringify(payload) }),
|
||||
deletePrincipalProjectRole: (principalID: string, projectID: string) =>
|
||||
request<void>(`/api/admin/service-principals/${principalID}/roles/${projectID}`, { method: 'DELETE' }),
|
||||
listCertPrincipalBindings: () => request<CertPrincipalBinding[]>('/api/admin/cert-principal-bindings'),
|
||||
upsertCertPrincipalBinding: (payload: { fingerprint: string; principal_id: string; enabled: boolean }) =>
|
||||
request<CertPrincipalBinding>('/api/admin/cert-principal-bindings', { method: 'POST', body: JSON.stringify(payload) }),
|
||||
deleteCertPrincipalBinding: (fingerprint: string) =>
|
||||
request<void>(`/api/admin/cert-principal-bindings/${encodeURIComponent(fingerprint)}`, { method: 'DELETE' }),
|
||||
getAuthSettings: () => request<AuthSettings>('/api/admin/auth'),
|
||||
updateAuthSettings: (payload: AuthSettings) =>
|
||||
request<AuthSettings>('/api/admin/auth', { method: 'PATCH', body: JSON.stringify(payload) }),
|
||||
@@ -299,6 +464,49 @@ export const api = {
|
||||
body: JSON.stringify(payload),
|
||||
signal
|
||||
}),
|
||||
getTLSSettings: () => request<TLSSettings>('/api/admin/tls'),
|
||||
updateTLSSettings: (payload: TLSSettings) =>
|
||||
request<TLSSettings>('/api/admin/tls', { method: 'PATCH', body: JSON.stringify(payload) }),
|
||||
listTLSListeners: () => request<TLSListener[]>('/api/admin/tls/listeners'),
|
||||
getTLSListenerRuntimeStatus: () => request<Record<string, number>>('/api/admin/tls/listeners/runtime'),
|
||||
createTLSListener: (payload: Omit<TLSListener, 'id' | 'created_at' | 'updated_at'>) =>
|
||||
request<TLSListener>('/api/admin/tls/listeners', { method: 'POST', body: JSON.stringify(payload) }),
|
||||
updateTLSListener: (id: string, payload: Omit<TLSListener, 'id' | 'created_at' | 'updated_at'>) =>
|
||||
request<TLSListener>(`/api/admin/tls/listeners/${id}`, { method: 'PATCH', body: JSON.stringify(payload) }),
|
||||
deleteTLSListener: (id: string) => request<void>(`/api/admin/tls/listeners/${id}`, { method: 'DELETE' }),
|
||||
listPKICAs: () => request<PKICA[]>('/api/admin/pki/cas'),
|
||||
getPKICA: (id: string) => request<PKICADetail>(`/api/admin/pki/cas/${id}`),
|
||||
updatePKICA: (id: string, payload: { name: string }) =>
|
||||
request<PKICADetail>(`/api/admin/pki/cas/${id}`, { method: 'PATCH', body: JSON.stringify(payload) }),
|
||||
downloadPKICABundle: (id: string) => requestBinary(`/api/admin/pki/cas/${id}/bundle`),
|
||||
createPKIRootCA: (payload: { name: string; common_name: string; days: number; cert_pem?: string; key_pem?: string }) =>
|
||||
request<PKICA>('/api/admin/pki/cas/root', { method: 'POST', body: JSON.stringify(payload) }),
|
||||
createPKIIntermediateCA: (payload: { name: string; parent_ca_id: string; common_name: string; days: number; cert_pem?: string; key_pem?: string }) =>
|
||||
request<PKICA>('/api/admin/pki/cas/intermediate', { method: 'POST', body: JSON.stringify(payload) }),
|
||||
deletePKICA: (id: string, force?: boolean) =>
|
||||
request<void>(`/api/admin/pki/cas/${id}${force ? '?force=1' : ''}`, { method: 'DELETE' }),
|
||||
listPKICerts: (ca_id?: string) => {
|
||||
const params = new URLSearchParams()
|
||||
if (ca_id) params.set('ca_id', ca_id)
|
||||
const qs = params.toString()
|
||||
return request<PKICert[]>(`/api/admin/pki/certs${qs ? `?${qs}` : ''}`)
|
||||
},
|
||||
getPKICert: (id: string) => request<PKICertDetail>(`/api/admin/pki/certs/${id}`),
|
||||
downloadPKICertBundle: (id: string) => requestBinary(`/api/admin/pki/certs/${id}/bundle`),
|
||||
issuePKICert: (payload: {
|
||||
ca_id: string
|
||||
common_name: string
|
||||
san_dns: string[]
|
||||
san_ips: string[]
|
||||
days: number
|
||||
is_ca: boolean
|
||||
}) => request<PKICert>('/api/admin/pki/certs', { method: 'POST', body: JSON.stringify(payload) }),
|
||||
importPKICert: (payload: { ca_id?: string; cert_pem: string; key_pem: string }) =>
|
||||
request<PKICert>('/api/admin/pki/certs/import', { method: 'POST', body: JSON.stringify(payload) }),
|
||||
revokePKICert: (id: string, reason: string) =>
|
||||
request<{ status: string }>(`/api/admin/pki/certs/${id}/revoke`, { method: 'POST', body: JSON.stringify({ reason }) }),
|
||||
deletePKICert: (id: string) => request<void>(`/api/admin/pki/certs/${id}`, { method: 'DELETE' }),
|
||||
getPKICRL: (id: string) => request<{ crl_pem: string }>(`/api/admin/pki/cas/${id}/crl`),
|
||||
|
||||
listUsers: () => request<User[]>('/api/users'),
|
||||
createUser: (payload: { username: string; display_name: string; email: string; password: string; is_admin: boolean }) =>
|
||||
@@ -453,16 +661,87 @@ export const api = {
|
||||
body: form
|
||||
})
|
||||
},
|
||||
createRpmSubdir: (repoId: string, name: string, type: string, parent?: string) =>
|
||||
createRpmSubdir: (
|
||||
repoId: string,
|
||||
name: string,
|
||||
type: string,
|
||||
parent?: string,
|
||||
mode?: 'local' | 'mirror',
|
||||
allow_delete?: boolean,
|
||||
remote_url?: string,
|
||||
connect_host?: string,
|
||||
host_header?: string,
|
||||
tls_server_name?: string,
|
||||
tls_insecure_skip_verify?: boolean,
|
||||
sync_interval_sec?: number
|
||||
) =>
|
||||
request<{ status: string }>(`/api/repos/${repoId}/rpm/subdirs`, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ name, type, parent })
|
||||
body: JSON.stringify({ name, type, parent, mode, allow_delete, remote_url, connect_host, host_header, tls_server_name, tls_insecure_skip_verify, sync_interval_sec })
|
||||
}),
|
||||
renameRpmSubdir: (repoId: string, path: string, name: string) =>
|
||||
request<{ status: string }>(`/api/repos/${repoId}/rpm/subdir/rename`, {
|
||||
getRpmSubdir: (repoId: string, path: string) => {
|
||||
const params = new URLSearchParams()
|
||||
params.set('path', path)
|
||||
return request<RpmRepoDirConfig>(`/api/repos/${repoId}/rpm/subdir?${params.toString()}`)
|
||||
},
|
||||
updateRpmSubdir: (
|
||||
repoId: string,
|
||||
path: string,
|
||||
name?: string,
|
||||
mode?: 'local' | 'mirror',
|
||||
allow_delete?: boolean,
|
||||
remote_url?: string,
|
||||
connect_host?: string,
|
||||
host_header?: string,
|
||||
tls_server_name?: string,
|
||||
tls_insecure_skip_verify?: boolean,
|
||||
sync_interval_sec?: number
|
||||
) =>
|
||||
request<{ status: string }>(`/api/repos/${repoId}/rpm/subdir/update`, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ path, name })
|
||||
body: JSON.stringify({ path, name, mode, allow_delete, remote_url, connect_host, host_header, tls_server_name, tls_insecure_skip_verify, sync_interval_sec })
|
||||
}),
|
||||
syncRpmSubdir: (repoId: string, path: string) => {
|
||||
const params = new URLSearchParams()
|
||||
params.set('path', path)
|
||||
return request<{ status: string }>(`/api/repos/${repoId}/rpm/subdir/sync?${params.toString()}`, {
|
||||
method: 'POST'
|
||||
})
|
||||
},
|
||||
suspendRpmSubdir: (repoId: string, path: string) => {
|
||||
const params = new URLSearchParams()
|
||||
params.set('path', path)
|
||||
return request<{ status: string; sync_enabled: boolean }>(`/api/repos/${repoId}/rpm/subdir/suspend?${params.toString()}`, {
|
||||
method: 'POST'
|
||||
})
|
||||
},
|
||||
resumeRpmSubdir: (repoId: string, path: string) => {
|
||||
const params = new URLSearchParams()
|
||||
params.set('path', path)
|
||||
return request<{ status: string; sync_enabled: boolean }>(`/api/repos/${repoId}/rpm/subdir/resume?${params.toString()}`, {
|
||||
method: 'POST'
|
||||
})
|
||||
},
|
||||
rebuildRpmSubdirMetadata: (repoId: string, path: string) => {
|
||||
const params = new URLSearchParams()
|
||||
params.set('path', path)
|
||||
return request<{ status: string }>(`/api/repos/${repoId}/rpm/subdir/rebuild-metadata?${params.toString()}`, {
|
||||
method: 'POST'
|
||||
})
|
||||
},
|
||||
listRpmMirrorRuns: (repoId: string, path: string, limit?: number) => {
|
||||
const params = new URLSearchParams()
|
||||
params.set('path', path)
|
||||
if (limit && limit > 0) params.set('limit', String(limit))
|
||||
return request<RpmMirrorRun[]>(`/api/repos/${repoId}/rpm/subdir/runs?${params.toString()}`)
|
||||
},
|
||||
clearRpmMirrorRuns: (repoId: string, path: string) => {
|
||||
const params = new URLSearchParams()
|
||||
params.set('path', path)
|
||||
return request<{ status: string; deleted_count: number }>(`/api/repos/${repoId}/rpm/subdir/runs?${params.toString()}`, {
|
||||
method: 'DELETE'
|
||||
})
|
||||
},
|
||||
deleteRpmSubdir: (repoId: string, path: string) => {
|
||||
const params = new URLSearchParams()
|
||||
params.set('path', path)
|
||||
|
||||
@@ -24,6 +24,9 @@ import KeyIcon from '@mui/icons-material/Key'
|
||||
import AdminPanelSettingsIcon from '@mui/icons-material/AdminPanelSettings'
|
||||
import PersonIcon from '@mui/icons-material/Person'
|
||||
import BadgeIcon from '@mui/icons-material/Badge'
|
||||
import SecurityIcon from '@mui/icons-material/Security'
|
||||
import HttpsIcon from '@mui/icons-material/Https'
|
||||
import VpnKeyIcon from '@mui/icons-material/VpnKey'
|
||||
import DarkModeIcon from '@mui/icons-material/DarkMode'
|
||||
import LightModeIcon from '@mui/icons-material/LightMode'
|
||||
import { ThemeModeContext } from './ThemeModeContext'
|
||||
@@ -58,7 +61,10 @@ export default function Layout() {
|
||||
if (user?.is_admin) {
|
||||
items.push({ label: 'Admin Users', path: '/admin/users', icon: <PeopleIcon fontSize="small" /> })
|
||||
items.push({ label: 'Admin API Keys', path: '/admin/api-keys', icon: <AdminPanelSettingsIcon fontSize="small" /> })
|
||||
items.push({ label: 'Admin PKI', path: '/admin/pki', icon: <SecurityIcon fontSize="small" /> })
|
||||
items.push({ label: 'Service Principals', path: '/admin/principals', icon: <VpnKeyIcon fontSize="small" /> })
|
||||
items.push({ label: 'Site Auth', path: '/admin/auth', icon: <BadgeIcon fontSize="small" /> })
|
||||
items.push({ label: 'Site TLS', path: '/admin/tls', icon: <HttpsIcon fontSize="small" /> })
|
||||
}
|
||||
return items
|
||||
}, [user])
|
||||
|
||||
@@ -17,6 +17,9 @@ import FilesPage from '../pages/FilesPage'
|
||||
import AdminUsersPage from '../pages/AdminUsersPage'
|
||||
import AdminApiKeysPage from '../pages/AdminApiKeysPage'
|
||||
import AdminAuthLdapPage from '../pages/AdminAuthLdapPage'
|
||||
import AdminPKIPage from '../pages/AdminPKIPage'
|
||||
import AdminTLSSettingsPage from '../pages/AdminTLSSettingsPage'
|
||||
import AdminServicePrincipalsPage from '../pages/AdminServicePrincipalsPage'
|
||||
import ApiKeysPage from '../pages/ApiKeysPage'
|
||||
import AccountPage from '../pages/AccountPage'
|
||||
import NotFoundPage from '../pages/NotFoundPage'
|
||||
@@ -44,7 +47,10 @@ export const routes: RouteObject[] = [
|
||||
{ path: 'projects/:projectId/files', element: <FilesPage /> },
|
||||
{ path: 'admin/users', element: <AdminUsersPage /> },
|
||||
{ path: 'admin/api-keys', element: <AdminApiKeysPage /> },
|
||||
{ path: 'admin/pki', element: <AdminPKIPage /> },
|
||||
{ path: 'admin/principals', element: <AdminServicePrincipalsPage /> },
|
||||
{ path: 'admin/auth', element: <AdminAuthLdapPage /> },
|
||||
{ path: 'admin/tls', element: <AdminTLSSettingsPage /> },
|
||||
{ path: 'admin/auth/ldap', element: <AdminAuthLdapPage /> }
|
||||
]
|
||||
},
|
||||
|
||||
@@ -36,7 +36,7 @@ export default function AdminAuthLdapPage() {
|
||||
setLoading(true)
|
||||
api
|
||||
.getAuthSettings()
|
||||
.then((data) => setSettings(data))
|
||||
.then((data) => setSettings(data))
|
||||
.catch((err) => {
|
||||
const message = err instanceof Error ? err.message : 'Failed to load authentication settings'
|
||||
setError(message)
|
||||
@@ -74,11 +74,14 @@ export default function AdminAuthLdapPage() {
|
||||
setError(null)
|
||||
setTestResult(null)
|
||||
try {
|
||||
const result = await api.testAuthSettings({
|
||||
...settings,
|
||||
username: testUsername.trim() || undefined,
|
||||
password: testPassword || undefined
|
||||
}, controller.signal)
|
||||
const result = await api.testAuthSettings(
|
||||
{
|
||||
...settings,
|
||||
username: testUsername.trim() || undefined,
|
||||
password: testPassword || undefined
|
||||
},
|
||||
controller.signal
|
||||
)
|
||||
setTestResult(result.user ? `Connection ok. User test ok: ${result.user}` : 'Connection ok.')
|
||||
} catch (err) {
|
||||
if (err instanceof Error && err.name === 'AbortError') {
|
||||
@@ -107,7 +110,11 @@ export default function AdminAuthLdapPage() {
|
||||
Admin: Site Authentication
|
||||
</Typography>
|
||||
<Paper sx={{ p: 2, maxWidth: 820 }}>
|
||||
{loading ? <Typography variant="body2" color="text.secondary" sx={{ mb: 1 }}>Loading...</Typography> : null}
|
||||
{loading ? (
|
||||
<Typography variant="body2" color="text.secondary" sx={{ mb: 1 }}>
|
||||
Loading...
|
||||
</Typography>
|
||||
) : null}
|
||||
{error ? <Alert severity="error" sx={{ mb: 1 }}>{error}</Alert> : null}
|
||||
{saved ? <Alert severity="success" sx={{ mb: 1 }}>Saved.</Alert> : null}
|
||||
{testResult ? <Alert severity="success" sx={{ mb: 1 }}>{testResult}</Alert> : null}
|
||||
@@ -126,19 +133,10 @@ export default function AdminAuthLdapPage() {
|
||||
OIDC
|
||||
</Typography>
|
||||
<FormControlLabel
|
||||
control={
|
||||
<Checkbox
|
||||
checked={settings.oidc_enabled}
|
||||
onChange={(event) => setSettings((prev) => ({ ...prev, oidc_enabled: event.target.checked }))}
|
||||
/>
|
||||
}
|
||||
control={<Checkbox checked={settings.oidc_enabled} onChange={(event) => setSettings((prev) => ({ ...prev, oidc_enabled: event.target.checked }))} />}
|
||||
label="Enable OIDC login"
|
||||
/>
|
||||
<TextField
|
||||
label="Client ID"
|
||||
value={settings.oidc_client_id}
|
||||
onChange={(event) => setSettings((prev) => ({ ...prev, oidc_client_id: event.target.value }))}
|
||||
/>
|
||||
<TextField label="Client ID" value={settings.oidc_client_id} onChange={(event) => setSettings((prev) => ({ ...prev, oidc_client_id: event.target.value }))} />
|
||||
<TextField
|
||||
label="Client Secret"
|
||||
type="password"
|
||||
@@ -172,27 +170,14 @@ export default function AdminAuthLdapPage() {
|
||||
helperText="Example: openid profile email"
|
||||
/>
|
||||
<FormControlLabel
|
||||
control={
|
||||
<Checkbox
|
||||
checked={settings.oidc_tls_insecure_skip_verify}
|
||||
onChange={(event) => setSettings((prev) => ({ ...prev, oidc_tls_insecure_skip_verify: event.target.checked }))}
|
||||
/>
|
||||
}
|
||||
control={<Checkbox checked={settings.oidc_tls_insecure_skip_verify} onChange={(event) => setSettings((prev) => ({ ...prev, oidc_tls_insecure_skip_verify: event.target.checked }))} />}
|
||||
label="OIDC TLS insecure skip verify (testing/self-signed only)"
|
||||
/>
|
||||
<Typography variant="subtitle2" sx={{ mt: 1 }}>
|
||||
LDAP
|
||||
</Typography>
|
||||
<TextField
|
||||
label="LDAP URL"
|
||||
value={settings.ldap_url}
|
||||
onChange={(event) => setSettings((prev) => ({ ...prev, ldap_url: event.target.value }))}
|
||||
/>
|
||||
<TextField
|
||||
label="Bind DN"
|
||||
value={settings.ldap_bind_dn}
|
||||
onChange={(event) => setSettings((prev) => ({ ...prev, ldap_bind_dn: event.target.value }))}
|
||||
/>
|
||||
<TextField label="LDAP URL" value={settings.ldap_url} onChange={(event) => setSettings((prev) => ({ ...prev, ldap_url: event.target.value }))} />
|
||||
<TextField label="Bind DN" value={settings.ldap_bind_dn} onChange={(event) => setSettings((prev) => ({ ...prev, ldap_bind_dn: event.target.value }))} />
|
||||
<TextField
|
||||
label="Bind Password"
|
||||
type="password"
|
||||
@@ -211,28 +196,14 @@ export default function AdminAuthLdapPage() {
|
||||
helperText="Use {username} placeholder."
|
||||
/>
|
||||
<FormControlLabel
|
||||
control={
|
||||
<Checkbox
|
||||
checked={settings.ldap_tls_insecure_skip_verify}
|
||||
onChange={(event) => setSettings((prev) => ({ ...prev, ldap_tls_insecure_skip_verify: event.target.checked }))}
|
||||
/>
|
||||
}
|
||||
control={<Checkbox checked={settings.ldap_tls_insecure_skip_verify} onChange={(event) => setSettings((prev) => ({ ...prev, ldap_tls_insecure_skip_verify: event.target.checked }))} />}
|
||||
label="TLS insecure skip verify (testing/self-signed only)"
|
||||
/>
|
||||
<Typography variant="subtitle2" sx={{ mt: 1 }}>
|
||||
Test (optional user bind)
|
||||
</Typography>
|
||||
<TextField
|
||||
label="Test Username"
|
||||
value={testUsername}
|
||||
onChange={(event) => setTestUsername(event.target.value)}
|
||||
/>
|
||||
<TextField
|
||||
label="Test Password"
|
||||
type="password"
|
||||
value={testPassword}
|
||||
onChange={(event) => setTestPassword(event.target.value)}
|
||||
/>
|
||||
<TextField label="Test Username" value={testUsername} onChange={(event) => setTestUsername(event.target.value)} />
|
||||
<TextField label="Test Password" type="password" value={testPassword} onChange={(event) => setTestPassword(event.target.value)} />
|
||||
<Box sx={{ display: 'flex', justifyContent: 'space-between', mt: 1 }}>
|
||||
<Button variant="outlined" onClick={handleTest} color={testing ? 'warning' : 'primary'}>
|
||||
{testing ? 'Cancel Test' : 'Test Connection'}
|
||||
|
||||
888
frontend/src/pages/AdminPKIPage.tsx
Normal file
888
frontend/src/pages/AdminPKIPage.tsx
Normal file
@@ -0,0 +1,888 @@
|
||||
import AddIcon from '@mui/icons-material/Add'
|
||||
import BlockIcon from '@mui/icons-material/Block'
|
||||
import DeleteIcon from '@mui/icons-material/Delete'
|
||||
import DownloadIcon from '@mui/icons-material/Download'
|
||||
import EditIcon from '@mui/icons-material/Edit'
|
||||
import VisibilityIcon from '@mui/icons-material/Visibility'
|
||||
import Alert from '@mui/material/Alert'
|
||||
import {
|
||||
Box,
|
||||
Button,
|
||||
Checkbox,
|
||||
Dialog,
|
||||
DialogActions,
|
||||
DialogContent,
|
||||
DialogTitle,
|
||||
FormControlLabel,
|
||||
IconButton,
|
||||
List,
|
||||
ListItem,
|
||||
ListItemText,
|
||||
MenuItem,
|
||||
Paper,
|
||||
TextField,
|
||||
Typography
|
||||
} from '@mui/material'
|
||||
import { useEffect, useState } from 'react'
|
||||
import { api, PKICA, PKICADetail, PKICert, PKICertDetail } from '../api'
|
||||
|
||||
function fmt(ts: number): string {
|
||||
if (!ts || ts <= 0) {
|
||||
return '-'
|
||||
}
|
||||
return new Date(ts * 1000).toLocaleString()
|
||||
}
|
||||
|
||||
function downloadText(filename: string, text: string) {
|
||||
const blob = new Blob([text], { type: 'text/plain;charset=utf-8' })
|
||||
const url = URL.createObjectURL(blob)
|
||||
const link = document.createElement('a')
|
||||
link.href = url
|
||||
link.download = filename
|
||||
document.body.appendChild(link)
|
||||
link.click()
|
||||
document.body.removeChild(link)
|
||||
URL.revokeObjectURL(url)
|
||||
}
|
||||
|
||||
function downloadBinary(filename: string, data: ArrayBuffer, contentType: string) {
|
||||
const blob = new Blob([data], { type: contentType })
|
||||
const url = URL.createObjectURL(blob)
|
||||
const link = document.createElement('a')
|
||||
link.href = url
|
||||
link.download = filename
|
||||
document.body.appendChild(link)
|
||||
link.click()
|
||||
document.body.removeChild(link)
|
||||
URL.revokeObjectURL(url)
|
||||
}
|
||||
|
||||
async function readFileText(file: File): Promise<string> {
|
||||
return file.text()
|
||||
}
|
||||
|
||||
export default function AdminPKIPage() {
|
||||
const [cas, setCAs] = useState<PKICA[]>([])
|
||||
const [certs, setCerts] = useState<PKICert[]>([])
|
||||
const [selectedCA, setSelectedCA] = useState('')
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [dialogError, setDialogError] = useState<string | null>(null)
|
||||
|
||||
const [rootOpen, setRootOpen] = useState(false)
|
||||
const [interOpen, setInterOpen] = useState(false)
|
||||
const [issueOpen, setIssueOpen] = useState(false)
|
||||
const [importOpen, setImportOpen] = useState(false)
|
||||
const [revokeID, setRevokeID] = useState('')
|
||||
const [revokeReason, setRevokeReason] = useState('')
|
||||
const [deleteID, setDeleteID] = useState('')
|
||||
const [deleteCAID, setDeleteCAID] = useState('')
|
||||
const [deleteCAName, setDeleteCAName] = useState('')
|
||||
const [deleteCAConfirm, setDeleteCAConfirm] = useState('')
|
||||
const [deleteCAForce, setDeleteCAForce] = useState(false)
|
||||
const [busy, setBusy] = useState(false)
|
||||
const [viewCA, setViewCA] = useState<PKICADetail | null>(null)
|
||||
const [viewCert, setViewCert] = useState<PKICertDetail | null>(null)
|
||||
const [editCAID, setEditCAID] = useState('')
|
||||
const [editCAName, setEditCAName] = useState('')
|
||||
|
||||
const [rootName, setRootName] = useState('')
|
||||
const [rootCN, setRootCN] = useState('')
|
||||
const [rootDays, setRootDays] = useState('3650')
|
||||
const [rootCertPEM, setRootCertPEM] = useState('')
|
||||
const [rootKeyPEM, setRootKeyPEM] = useState('')
|
||||
|
||||
const [interName, setInterName] = useState('')
|
||||
const [interParent, setInterParent] = useState('')
|
||||
const [interCN, setInterCN] = useState('')
|
||||
const [interDays, setInterDays] = useState('1825')
|
||||
const [interCertPEM, setInterCertPEM] = useState('')
|
||||
const [interKeyPEM, setInterKeyPEM] = useState('')
|
||||
|
||||
const [issueCA, setIssueCA] = useState('')
|
||||
const [issueCN, setIssueCN] = useState('')
|
||||
const [issueDNS, setIssueDNS] = useState('')
|
||||
const [issueIPs, setIssueIPs] = useState('')
|
||||
const [issueDays, setIssueDays] = useState('365')
|
||||
const [issueIsCA, setIssueIsCA] = useState(false)
|
||||
const [importCA, setImportCA] = useState('')
|
||||
const [importCertPEM, setImportCertPEM] = useState('')
|
||||
const [importKeyPEM, setImportKeyPEM] = useState('')
|
||||
|
||||
const loadTextFile = async (event: React.ChangeEvent<HTMLInputElement>, setter: (value: string) => void) => {
|
||||
const file = event.target.files && event.target.files[0]
|
||||
let text: string
|
||||
if (!file) {
|
||||
return
|
||||
}
|
||||
text = await readFileText(file)
|
||||
setter(text)
|
||||
event.target.value = ''
|
||||
}
|
||||
|
||||
const load = async () => {
|
||||
let listCAs: PKICA[]
|
||||
let listCerts: PKICert[]
|
||||
listCAs = await api.listPKICAs()
|
||||
setCAs(Array.isArray(listCAs) ? listCAs : [])
|
||||
listCerts = await api.listPKICerts(selectedCA || undefined)
|
||||
setCerts(Array.isArray(listCerts) ? listCerts : [])
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
load().catch((err) => setError(err instanceof Error ? err.message : 'Failed to load PKI data'))
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
api
|
||||
.listPKICerts(selectedCA || undefined)
|
||||
.then((list) => setCerts(Array.isArray(list) ? list : []))
|
||||
.catch((err) => setError(err instanceof Error ? err.message : 'Failed to load certificates'))
|
||||
}, [selectedCA])
|
||||
|
||||
const createRoot = async () => {
|
||||
let days: number
|
||||
if (!rootName.trim()) {
|
||||
setDialogError('Name is required.')
|
||||
return
|
||||
}
|
||||
days = Number(rootDays) || 3650
|
||||
setBusy(true)
|
||||
setError(null)
|
||||
setDialogError(null)
|
||||
try {
|
||||
await api.createPKIRootCA({
|
||||
name: rootName.trim(),
|
||||
common_name: rootCN.trim(),
|
||||
days: days,
|
||||
cert_pem: rootCertPEM.trim() || undefined,
|
||||
key_pem: rootKeyPEM.trim() || undefined
|
||||
})
|
||||
setRootOpen(false)
|
||||
await load()
|
||||
} catch (err) {
|
||||
setDialogError(err instanceof Error ? err.message : 'Failed to create root CA')
|
||||
} finally {
|
||||
setBusy(false)
|
||||
}
|
||||
}
|
||||
|
||||
const createIntermediate = async () => {
|
||||
let days: number
|
||||
if (!interName.trim() || !interParent) {
|
||||
setDialogError('Name and parent CA are required.')
|
||||
return
|
||||
}
|
||||
days = Number(interDays) || 1825
|
||||
setBusy(true)
|
||||
setError(null)
|
||||
setDialogError(null)
|
||||
try {
|
||||
await api.createPKIIntermediateCA({
|
||||
name: interName.trim(),
|
||||
parent_ca_id: interParent,
|
||||
common_name: interCN.trim(),
|
||||
days: days,
|
||||
cert_pem: interCertPEM.trim() || undefined,
|
||||
key_pem: interKeyPEM.trim() || undefined
|
||||
})
|
||||
setInterOpen(false)
|
||||
setInterCertPEM('')
|
||||
setInterKeyPEM('')
|
||||
await load()
|
||||
} catch (err) {
|
||||
setDialogError(err instanceof Error ? err.message : 'Failed to create intermediate CA')
|
||||
} finally {
|
||||
setBusy(false)
|
||||
}
|
||||
}
|
||||
|
||||
const issueCert = async () => {
|
||||
let days: number
|
||||
let dns: string[]
|
||||
let ips: string[]
|
||||
if (!issueCA || !issueCN.trim()) {
|
||||
setDialogError('Issuer CA and common name are required.')
|
||||
return
|
||||
}
|
||||
days = Number(issueDays) || 365
|
||||
dns = issueDNS
|
||||
.split(',')
|
||||
.map((v) => v.trim())
|
||||
.filter((v) => v)
|
||||
ips = issueIPs
|
||||
.split(',')
|
||||
.map((v) => v.trim())
|
||||
.filter((v) => v)
|
||||
setBusy(true)
|
||||
setError(null)
|
||||
setDialogError(null)
|
||||
try {
|
||||
await api.issuePKICert({
|
||||
ca_id: issueCA,
|
||||
common_name: issueCN.trim(),
|
||||
san_dns: dns,
|
||||
san_ips: ips,
|
||||
days: days,
|
||||
is_ca: issueIsCA
|
||||
})
|
||||
setIssueOpen(false)
|
||||
await load()
|
||||
} catch (err) {
|
||||
setDialogError(err instanceof Error ? err.message : 'Failed to issue certificate')
|
||||
} finally {
|
||||
setBusy(false)
|
||||
}
|
||||
}
|
||||
|
||||
const importCert = async () => {
|
||||
let payload: { ca_id?: string; cert_pem: string; key_pem: string }
|
||||
if (!importCertPEM.trim() || !importKeyPEM.trim()) {
|
||||
setDialogError('Certificate PEM and private key PEM are required.')
|
||||
return
|
||||
}
|
||||
payload = { cert_pem: importCertPEM.trim(), key_pem: importKeyPEM.trim() }
|
||||
if (importCA.trim() != "") {
|
||||
payload.ca_id = importCA.trim()
|
||||
}
|
||||
setBusy(true)
|
||||
setError(null)
|
||||
setDialogError(null)
|
||||
try {
|
||||
await api.importPKICert(payload)
|
||||
setImportOpen(false)
|
||||
setImportCA('')
|
||||
setImportCertPEM('')
|
||||
setImportKeyPEM('')
|
||||
await load()
|
||||
} catch (err) {
|
||||
setDialogError(err instanceof Error ? err.message : 'Failed to import certificate')
|
||||
} finally {
|
||||
setBusy(false)
|
||||
}
|
||||
}
|
||||
|
||||
const revokeCert = async () => {
|
||||
setBusy(true)
|
||||
setError(null)
|
||||
setDialogError(null)
|
||||
try {
|
||||
await api.revokePKICert(revokeID, revokeReason)
|
||||
setRevokeID('')
|
||||
setRevokeReason('')
|
||||
await load()
|
||||
} catch (err) {
|
||||
setDialogError(err instanceof Error ? err.message : 'Failed to revoke certificate')
|
||||
} finally {
|
||||
setBusy(false)
|
||||
}
|
||||
}
|
||||
|
||||
const deleteCert = async () => {
|
||||
setBusy(true)
|
||||
setError(null)
|
||||
setDialogError(null)
|
||||
try {
|
||||
await api.deletePKICert(deleteID)
|
||||
setDeleteID('')
|
||||
await load()
|
||||
} catch (err) {
|
||||
setDialogError(err instanceof Error ? err.message : 'Failed to delete certificate')
|
||||
} finally {
|
||||
setBusy(false)
|
||||
}
|
||||
}
|
||||
|
||||
const deleteCA = async () => {
|
||||
setBusy(true)
|
||||
setError(null)
|
||||
setDialogError(null)
|
||||
try {
|
||||
await api.deletePKICA(deleteCAID, deleteCAForce)
|
||||
setDeleteCAID('')
|
||||
setDeleteCAName('')
|
||||
setDeleteCAConfirm('')
|
||||
setDeleteCAForce(false)
|
||||
await load()
|
||||
} catch (err) {
|
||||
setDialogError(err instanceof Error ? err.message : 'Failed to delete CA')
|
||||
} finally {
|
||||
setBusy(false)
|
||||
}
|
||||
}
|
||||
|
||||
const openCAView = async (id: string) => {
|
||||
let detail: PKICADetail
|
||||
setDialogError(null)
|
||||
detail = await api.getPKICA(id)
|
||||
setViewCA(detail)
|
||||
}
|
||||
|
||||
const openCAEdit = async (id: string) => {
|
||||
let detail: PKICADetail
|
||||
setDialogError(null)
|
||||
detail = await api.getPKICA(id)
|
||||
setEditCAID(detail.id)
|
||||
setEditCAName(detail.name)
|
||||
}
|
||||
|
||||
const saveCAEdit = async () => {
|
||||
setBusy(true)
|
||||
setDialogError(null)
|
||||
try {
|
||||
await api.updatePKICA(editCAID, { name: editCAName.trim() })
|
||||
setEditCAID('')
|
||||
setEditCAName('')
|
||||
await load()
|
||||
} catch (err) {
|
||||
setDialogError(err instanceof Error ? err.message : 'Failed to update CA')
|
||||
} finally {
|
||||
setBusy(false)
|
||||
}
|
||||
}
|
||||
|
||||
const openCertView = async (id: string) => {
|
||||
let detail: PKICertDetail
|
||||
setDialogError(null)
|
||||
detail = await api.getPKICert(id)
|
||||
setViewCert(detail)
|
||||
}
|
||||
|
||||
return (
|
||||
<Box>
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', justifyContent: 'space-between', mb: 2 }}>
|
||||
<Typography variant="h5">Admin: PKI</Typography>
|
||||
<Box sx={{ display: 'flex', gap: 1 }}>
|
||||
<Button variant="outlined" startIcon={<AddIcon />} onClick={() => { setDialogError(null); setRootOpen(true) }}>
|
||||
New Root CA
|
||||
</Button>
|
||||
<Button
|
||||
variant="outlined"
|
||||
startIcon={<AddIcon />}
|
||||
onClick={() => {
|
||||
setDialogError(null)
|
||||
setInterCertPEM('')
|
||||
setInterKeyPEM('')
|
||||
setInterOpen(true)
|
||||
}}
|
||||
>
|
||||
New Intermediate CA
|
||||
</Button>
|
||||
<Button variant="outlined" startIcon={<AddIcon />} onClick={() => { setDialogError(null); setIssueOpen(true) }}>
|
||||
Issue Certificate
|
||||
</Button>
|
||||
<Button
|
||||
variant="outlined"
|
||||
startIcon={<AddIcon />}
|
||||
onClick={() => {
|
||||
setDialogError(null)
|
||||
setImportCA('')
|
||||
setImportCertPEM('')
|
||||
setImportKeyPEM('')
|
||||
setImportOpen(true)
|
||||
}}
|
||||
>
|
||||
Import Certificate
|
||||
</Button>
|
||||
</Box>
|
||||
</Box>
|
||||
{error ? <Alert severity="error" sx={{ mb: 1 }}>{error}</Alert> : null}
|
||||
|
||||
<Paper sx={{ p: 2, mb: 2 }}>
|
||||
<Typography variant="h6" sx={{ mb: 1 }}>Certificate Authorities</Typography>
|
||||
<List>
|
||||
{cas.map((ca) => (
|
||||
<ListItem
|
||||
key={ca.id}
|
||||
divider
|
||||
secondaryAction={
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', gap: 0.5 }}>
|
||||
<Button
|
||||
size="small"
|
||||
onClick={async () => {
|
||||
const data = await api.getPKICRL(ca.id)
|
||||
const blob = new Blob([data.crl_pem], { type: 'application/x-pem-file' })
|
||||
const url = URL.createObjectURL(blob)
|
||||
window.open(url, '_blank')
|
||||
}}
|
||||
>
|
||||
CRL
|
||||
</Button>
|
||||
<IconButton size="small" onClick={() => openCAView(ca.id)} title="View details">
|
||||
<VisibilityIcon fontSize="small" />
|
||||
</IconButton>
|
||||
<IconButton size="small" onClick={() => openCAEdit(ca.id)} title="Edit CA">
|
||||
<EditIcon fontSize="small" />
|
||||
</IconButton>
|
||||
<IconButton
|
||||
size="small"
|
||||
color="error"
|
||||
onClick={() => {
|
||||
setDialogError(null)
|
||||
setDeleteCAID(ca.id)
|
||||
setDeleteCAName(ca.name)
|
||||
setDeleteCAConfirm('')
|
||||
setDeleteCAForce(false)
|
||||
}}
|
||||
title="Delete CA"
|
||||
>
|
||||
<DeleteIcon fontSize="small" />
|
||||
</IconButton>
|
||||
</Box>
|
||||
}
|
||||
>
|
||||
<ListItemText
|
||||
primary={`${ca.name} (${ca.id})`}
|
||||
secondary={`${ca.is_root ? 'root' : 'intermediate'} · status: ${ca.status} · parent: ${ca.parent_ca_id || '-'} · updated: ${fmt(ca.updated_at)}`}
|
||||
/>
|
||||
</ListItem>
|
||||
))}
|
||||
</List>
|
||||
</Paper>
|
||||
|
||||
<Paper sx={{ p: 2 }}>
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', justifyContent: 'space-between', mb: 1 }}>
|
||||
<Typography variant="h6">Issued Certificates</Typography>
|
||||
<TextField
|
||||
select
|
||||
size="small"
|
||||
label="CA"
|
||||
value={selectedCA}
|
||||
onChange={(event) => setSelectedCA(event.target.value)}
|
||||
sx={{ minWidth: 280 }}
|
||||
>
|
||||
<MenuItem value="">(all)</MenuItem>
|
||||
<MenuItem value="standalone">(standalone)</MenuItem>
|
||||
{cas.map((ca) => (
|
||||
<MenuItem key={ca.id} value={ca.id}>{ca.name}</MenuItem>
|
||||
))}
|
||||
</TextField>
|
||||
</Box>
|
||||
<List>
|
||||
{certs.map((cert) => (
|
||||
<ListItem
|
||||
key={cert.id}
|
||||
divider
|
||||
secondaryAction={
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', gap: 0.5 }}>
|
||||
<IconButton size="small" onClick={() => openCertView(cert.id)} title="View details">
|
||||
<VisibilityIcon fontSize="small" />
|
||||
</IconButton>
|
||||
<IconButton
|
||||
size="small"
|
||||
color="warning"
|
||||
onClick={() => setRevokeID(cert.id)}
|
||||
disabled={cert.status === 'revoked'}
|
||||
title="Revoke"
|
||||
>
|
||||
<BlockIcon fontSize="small" />
|
||||
</IconButton>
|
||||
<IconButton size="small" color="error" onClick={() => setDeleteID(cert.id)} title="Delete">
|
||||
<DeleteIcon fontSize="small" />
|
||||
</IconButton>
|
||||
</Box>
|
||||
}
|
||||
>
|
||||
<ListItemText
|
||||
primary={`${cert.common_name} (${cert.id})`}
|
||||
secondary={`serial: ${cert.serial_hex} · ca: ${cert.ca_id || 'standalone'} · status: ${cert.status} · valid: ${fmt(cert.not_before)} ~ ${fmt(cert.not_after)}${cert.revoked_at ? ` · revoked: ${fmt(cert.revoked_at)}` : ''}`}
|
||||
/>
|
||||
</ListItem>
|
||||
))}
|
||||
</List>
|
||||
</Paper>
|
||||
|
||||
<Dialog open={rootOpen} onClose={() => setRootOpen(false)} maxWidth="md" fullWidth>
|
||||
<DialogTitle>
|
||||
<Box sx={{ display: 'grid', gap: 1 }}>
|
||||
<Typography variant="h6">New Root CA</Typography>
|
||||
{dialogError ? <Alert severity="error">{dialogError}</Alert> : null}
|
||||
</Box>
|
||||
</DialogTitle>
|
||||
<DialogContent>
|
||||
<Box sx={{ display: 'grid', gap: 1, mt: 1 }}>
|
||||
<TextField label="Name" value={rootName} onChange={(event) => setRootName(event.target.value)} />
|
||||
<TextField label="Common Name" value={rootCN} onChange={(event) => setRootCN(event.target.value)} />
|
||||
<TextField label="Validity Days" value={rootDays} onChange={(event) => setRootDays(event.target.value)} />
|
||||
<TextField
|
||||
label="Import Certificate PEM (optional)"
|
||||
multiline
|
||||
minRows={6}
|
||||
value={rootCertPEM}
|
||||
onChange={(event) => setRootCertPEM(event.target.value)}
|
||||
sx={{ '& .MuiInputBase-input': { fontFamily: 'ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, monospace' } }}
|
||||
/>
|
||||
<Box>
|
||||
<Button variant="outlined" component="label" size="small">
|
||||
Load Certificate File
|
||||
<input hidden type="file" accept=".pem,.crt,.cer,.txt" onChange={(event) => loadTextFile(event, setRootCertPEM)} />
|
||||
</Button>
|
||||
</Box>
|
||||
<TextField
|
||||
label="Import Private Key PEM (optional)"
|
||||
multiline
|
||||
minRows={6}
|
||||
value={rootKeyPEM}
|
||||
onChange={(event) => setRootKeyPEM(event.target.value)}
|
||||
sx={{ '& .MuiInputBase-input': { fontFamily: 'ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, monospace' } }}
|
||||
/>
|
||||
<Box>
|
||||
<Button variant="outlined" component="label" size="small">
|
||||
Load Private Key File
|
||||
<input hidden type="file" accept=".pem,.key,.txt" onChange={(event) => loadTextFile(event, setRootKeyPEM)} />
|
||||
</Button>
|
||||
</Box>
|
||||
</Box>
|
||||
</DialogContent>
|
||||
<DialogActions>
|
||||
<Button onClick={() => setRootOpen(false)}>Cancel</Button>
|
||||
<Button variant="contained" onClick={createRoot} disabled={busy}>{busy ? 'Saving...' : 'Create'}</Button>
|
||||
</DialogActions>
|
||||
</Dialog>
|
||||
|
||||
<Dialog open={interOpen} onClose={() => setInterOpen(false)} maxWidth="md" fullWidth>
|
||||
<DialogTitle>
|
||||
<Box sx={{ display: 'grid', gap: 1 }}>
|
||||
<Typography variant="h6">New Intermediate CA</Typography>
|
||||
{dialogError ? <Alert severity="error">{dialogError}</Alert> : null}
|
||||
</Box>
|
||||
</DialogTitle>
|
||||
<DialogContent>
|
||||
<Box sx={{ display: 'grid', gap: 1, mt: 1 }}>
|
||||
<TextField label="Name" value={interName} onChange={(event) => setInterName(event.target.value)} />
|
||||
<TextField
|
||||
select
|
||||
label="Parent CA"
|
||||
value={interParent}
|
||||
onChange={(event) => setInterParent(event.target.value)}
|
||||
>
|
||||
{cas.map((ca) => (
|
||||
<MenuItem key={ca.id} value={ca.id}>{ca.name}</MenuItem>
|
||||
))}
|
||||
</TextField>
|
||||
<TextField label="Common Name" value={interCN} onChange={(event) => setInterCN(event.target.value)} />
|
||||
<TextField label="Validity Days" value={interDays} onChange={(event) => setInterDays(event.target.value)} />
|
||||
<TextField
|
||||
label="Import Intermediate Certificate PEM (optional)"
|
||||
multiline
|
||||
minRows={6}
|
||||
value={interCertPEM}
|
||||
onChange={(event) => setInterCertPEM(event.target.value)}
|
||||
sx={{ '& .MuiInputBase-input': { fontFamily: 'ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, monospace' } }}
|
||||
/>
|
||||
<Box>
|
||||
<Button variant="outlined" component="label" size="small">
|
||||
Load Intermediate Certificate File
|
||||
<input hidden type="file" accept=".pem,.crt,.cer,.txt" onChange={(event) => loadTextFile(event, setInterCertPEM)} />
|
||||
</Button>
|
||||
</Box>
|
||||
<TextField
|
||||
label="Import Intermediate Private Key PEM (optional)"
|
||||
multiline
|
||||
minRows={6}
|
||||
value={interKeyPEM}
|
||||
onChange={(event) => setInterKeyPEM(event.target.value)}
|
||||
sx={{ '& .MuiInputBase-input': { fontFamily: 'ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, monospace' } }}
|
||||
/>
|
||||
<Box>
|
||||
<Button variant="outlined" component="label" size="small">
|
||||
Load Intermediate Private Key File
|
||||
<input hidden type="file" accept=".pem,.key,.txt" onChange={(event) => loadTextFile(event, setInterKeyPEM)} />
|
||||
</Button>
|
||||
</Box>
|
||||
</Box>
|
||||
</DialogContent>
|
||||
<DialogActions>
|
||||
<Button onClick={() => setInterOpen(false)}>Cancel</Button>
|
||||
<Button variant="contained" onClick={createIntermediate} disabled={busy}>{busy ? 'Saving...' : 'Create'}</Button>
|
||||
</DialogActions>
|
||||
</Dialog>
|
||||
|
||||
<Dialog open={issueOpen} onClose={() => setIssueOpen(false)} maxWidth="sm" fullWidth>
|
||||
<DialogTitle>
|
||||
<Box sx={{ display: 'grid', gap: 1 }}>
|
||||
<Typography variant="h6">Issue Certificate</Typography>
|
||||
{dialogError ? <Alert severity="error">{dialogError}</Alert> : null}
|
||||
</Box>
|
||||
</DialogTitle>
|
||||
<DialogContent>
|
||||
<Box sx={{ display: 'grid', gap: 1, mt: 1 }}>
|
||||
<TextField select label="Issuer CA" value={issueCA} onChange={(event) => setIssueCA(event.target.value)}>
|
||||
{cas.map((ca) => (
|
||||
<MenuItem key={ca.id} value={ca.id}>{ca.name}</MenuItem>
|
||||
))}
|
||||
</TextField>
|
||||
<TextField label="Common Name" value={issueCN} onChange={(event) => setIssueCN(event.target.value)} />
|
||||
<TextField label="SAN DNS (comma-separated)" value={issueDNS} onChange={(event) => setIssueDNS(event.target.value)} />
|
||||
<TextField label="SAN IPs (comma-separated)" value={issueIPs} onChange={(event) => setIssueIPs(event.target.value)} />
|
||||
<TextField label="Validity Days" value={issueDays} onChange={(event) => setIssueDays(event.target.value)} />
|
||||
<FormControlLabel control={<Checkbox checked={issueIsCA} onChange={(event) => setIssueIsCA(event.target.checked)} />} label="Issue as CA certificate" />
|
||||
</Box>
|
||||
</DialogContent>
|
||||
<DialogActions>
|
||||
<Button onClick={() => setIssueOpen(false)}>Cancel</Button>
|
||||
<Button variant="contained" onClick={issueCert} disabled={busy}>{busy ? 'Saving...' : 'Issue'}</Button>
|
||||
</DialogActions>
|
||||
</Dialog>
|
||||
|
||||
<Dialog open={importOpen} onClose={() => setImportOpen(false)} maxWidth="md" fullWidth>
|
||||
<DialogTitle>
|
||||
<Box sx={{ display: 'grid', gap: 1 }}>
|
||||
<Typography variant="h6">Import Certificate</Typography>
|
||||
{dialogError ? <Alert severity="error">{dialogError}</Alert> : null}
|
||||
</Box>
|
||||
</DialogTitle>
|
||||
<DialogContent>
|
||||
<Box sx={{ display: 'grid', gap: 1, mt: 1 }}>
|
||||
<TextField select label="Issuer CA (optional)" value={importCA} onChange={(event) => setImportCA(event.target.value)}>
|
||||
<MenuItem value="">(none, standalone)</MenuItem>
|
||||
{cas.map((ca) => (
|
||||
<MenuItem key={ca.id} value={ca.id}>{ca.name}</MenuItem>
|
||||
))}
|
||||
</TextField>
|
||||
<TextField
|
||||
label="Certificate PEM"
|
||||
multiline
|
||||
minRows={6}
|
||||
value={importCertPEM}
|
||||
onChange={(event) => setImportCertPEM(event.target.value)}
|
||||
sx={{ '& .MuiInputBase-input': { fontFamily: 'ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, monospace' } }}
|
||||
/>
|
||||
<Box>
|
||||
<Button variant="outlined" component="label" size="small">
|
||||
Load Certificate File
|
||||
<input hidden type="file" accept=".pem,.crt,.cer,.txt" onChange={(event) => loadTextFile(event, setImportCertPEM)} />
|
||||
</Button>
|
||||
</Box>
|
||||
<TextField
|
||||
label="Private Key PEM"
|
||||
multiline
|
||||
minRows={6}
|
||||
value={importKeyPEM}
|
||||
onChange={(event) => setImportKeyPEM(event.target.value)}
|
||||
sx={{ '& .MuiInputBase-input': { fontFamily: 'ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, monospace' } }}
|
||||
/>
|
||||
<Box>
|
||||
<Button variant="outlined" component="label" size="small">
|
||||
Load Private Key File
|
||||
<input hidden type="file" accept=".pem,.key,.txt" onChange={(event) => loadTextFile(event, setImportKeyPEM)} />
|
||||
</Button>
|
||||
</Box>
|
||||
</Box>
|
||||
</DialogContent>
|
||||
<DialogActions>
|
||||
<Button onClick={() => setImportOpen(false)}>Cancel</Button>
|
||||
<Button variant="contained" onClick={importCert} disabled={busy}>{busy ? 'Saving...' : 'Import'}</Button>
|
||||
</DialogActions>
|
||||
</Dialog>
|
||||
|
||||
<Dialog open={Boolean(revokeID)} onClose={() => setRevokeID('')} maxWidth="xs" fullWidth>
|
||||
<DialogTitle>
|
||||
<Box sx={{ display: 'grid', gap: 1 }}>
|
||||
<Typography variant="h6">Revoke Certificate</Typography>
|
||||
{dialogError ? <Alert severity="error">{dialogError}</Alert> : null}
|
||||
</Box>
|
||||
</DialogTitle>
|
||||
<DialogContent>
|
||||
<TextField fullWidth label="Reason (optional)" value={revokeReason} onChange={(event) => setRevokeReason(event.target.value)} />
|
||||
</DialogContent>
|
||||
<DialogActions>
|
||||
<Button onClick={() => setRevokeID('')}>Cancel</Button>
|
||||
<Button color="warning" variant="contained" onClick={revokeCert} disabled={busy}>{busy ? 'Working...' : 'Revoke'}</Button>
|
||||
</DialogActions>
|
||||
</Dialog>
|
||||
|
||||
<Dialog open={Boolean(deleteID)} onClose={() => setDeleteID('')} maxWidth="xs" fullWidth>
|
||||
<DialogTitle>
|
||||
<Box sx={{ display: 'grid', gap: 1 }}>
|
||||
<Typography variant="h6">Delete Certificate</Typography>
|
||||
{dialogError ? <Alert severity="error">{dialogError}</Alert> : null}
|
||||
</Box>
|
||||
</DialogTitle>
|
||||
<DialogContent>
|
||||
<Typography variant="body2" color="text.secondary">Delete certificate permanently?</Typography>
|
||||
</DialogContent>
|
||||
<DialogActions>
|
||||
<Button onClick={() => setDeleteID('')}>Cancel</Button>
|
||||
<Button color="error" variant="contained" onClick={deleteCert} disabled={busy}>{busy ? 'Working...' : 'Delete'}</Button>
|
||||
</DialogActions>
|
||||
</Dialog>
|
||||
|
||||
<Dialog open={Boolean(deleteCAID)} onClose={() => setDeleteCAID('')} maxWidth="xs" fullWidth>
|
||||
<DialogTitle>
|
||||
<Box sx={{ display: 'grid', gap: 1 }}>
|
||||
<Typography variant="h6">Delete Certificate Authority</Typography>
|
||||
{dialogError ? <Alert severity="error">{dialogError}</Alert> : null}
|
||||
</Box>
|
||||
</DialogTitle>
|
||||
<DialogContent>
|
||||
<Typography variant="body2" color="text.secondary" sx={{ mb: 1 }}>
|
||||
Type the CA name to confirm deletion.
|
||||
</Typography>
|
||||
<TextField fullWidth label="CA Name" value={deleteCAConfirm} onChange={(event) => setDeleteCAConfirm(event.target.value)} />
|
||||
<FormControlLabel
|
||||
control={<Checkbox checked={deleteCAForce} onChange={(event) => setDeleteCAForce(event.target.checked)} />}
|
||||
label="Force delete (includes child CAs and issued certs)"
|
||||
/>
|
||||
</DialogContent>
|
||||
<DialogActions>
|
||||
<Button onClick={() => setDeleteCAID('')}>Cancel</Button>
|
||||
<Button
|
||||
color="error"
|
||||
variant="contained"
|
||||
onClick={deleteCA}
|
||||
disabled={busy || deleteCAConfirm !== deleteCAName}
|
||||
>
|
||||
{busy ? 'Working...' : 'Delete CA'}
|
||||
</Button>
|
||||
</DialogActions>
|
||||
</Dialog>
|
||||
|
||||
<Dialog open={Boolean(viewCA)} onClose={() => setViewCA(null)} maxWidth="md" fullWidth>
|
||||
<DialogTitle>CA Details</DialogTitle>
|
||||
<DialogContent>
|
||||
<Box sx={{ display: 'grid', gap: 1, mt: 1 }}>
|
||||
<TextField label="ID" value={viewCA?.id || ''} InputProps={{ readOnly: true }} />
|
||||
<TextField label="Name" value={viewCA?.name || ''} InputProps={{ readOnly: true }} />
|
||||
<TextField label="Parent CA" value={viewCA?.parent_ca_id || '-'} InputProps={{ readOnly: true }} />
|
||||
<TextField label="Status" value={viewCA?.status || ''} InputProps={{ readOnly: true }} />
|
||||
<TextField
|
||||
label="Certificate PEM"
|
||||
multiline
|
||||
minRows={8}
|
||||
value={viewCA?.cert_pem || ''}
|
||||
InputProps={{ readOnly: true }}
|
||||
sx={{ '& .MuiInputBase-input': { fontFamily: 'ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, monospace' } }}
|
||||
/>
|
||||
<TextField
|
||||
label="Private Key PEM"
|
||||
multiline
|
||||
minRows={8}
|
||||
value={viewCA?.key_pem || ''}
|
||||
InputProps={{ readOnly: true }}
|
||||
sx={{ '& .MuiInputBase-input': { fontFamily: 'ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, monospace' } }}
|
||||
/>
|
||||
</Box>
|
||||
</DialogContent>
|
||||
<DialogActions>
|
||||
<Button
|
||||
startIcon={<DownloadIcon />}
|
||||
onClick={async () => {
|
||||
if (!viewCA) return
|
||||
const data = await api.downloadPKICABundle(viewCA.id)
|
||||
downloadBinary(`${viewCA.name || viewCA.id}.ca.bundle.zip`, data, 'application/zip')
|
||||
}}
|
||||
>
|
||||
Download Bundle
|
||||
</Button>
|
||||
<Button
|
||||
startIcon={<DownloadIcon />}
|
||||
onClick={() => {
|
||||
if (!viewCA) return
|
||||
downloadText(`${viewCA.name || viewCA.id}.ca.crt.pem`, viewCA.cert_pem || '')
|
||||
}}
|
||||
>
|
||||
Download Cert
|
||||
</Button>
|
||||
<Button
|
||||
startIcon={<DownloadIcon />}
|
||||
onClick={() => {
|
||||
if (!viewCA) return
|
||||
downloadText(`${viewCA.name || viewCA.id}.ca.key.pem`, viewCA.key_pem || '')
|
||||
}}
|
||||
>
|
||||
Download Key
|
||||
</Button>
|
||||
<Button onClick={() => setViewCA(null)}>Close</Button>
|
||||
</DialogActions>
|
||||
</Dialog>
|
||||
|
||||
<Dialog open={Boolean(editCAID)} onClose={() => setEditCAID('')} maxWidth="xs" fullWidth>
|
||||
<DialogTitle>
|
||||
<Box sx={{ display: 'grid', gap: 1 }}>
|
||||
<Typography variant="h6">Edit CA</Typography>
|
||||
{dialogError ? <Alert severity="error">{dialogError}</Alert> : null}
|
||||
</Box>
|
||||
</DialogTitle>
|
||||
<DialogContent>
|
||||
<TextField
|
||||
fullWidth
|
||||
label="Name"
|
||||
value={editCAName}
|
||||
onChange={(event) => setEditCAName(event.target.value)}
|
||||
sx={{ mt: 1 }}
|
||||
/>
|
||||
</DialogContent>
|
||||
<DialogActions>
|
||||
<Button onClick={() => setEditCAID('')}>Cancel</Button>
|
||||
<Button variant="contained" onClick={saveCAEdit} disabled={busy || !editCAName.trim()}>
|
||||
{busy ? 'Saving...' : 'Save'}
|
||||
</Button>
|
||||
</DialogActions>
|
||||
</Dialog>
|
||||
|
||||
<Dialog open={Boolean(viewCert)} onClose={() => setViewCert(null)} maxWidth="md" fullWidth>
|
||||
<DialogTitle>Certificate Details</DialogTitle>
|
||||
<DialogContent>
|
||||
<Box sx={{ display: 'grid', gap: 1, mt: 1 }}>
|
||||
<TextField label="ID" value={viewCert?.id || ''} InputProps={{ readOnly: true }} />
|
||||
<TextField label="Issuer CA ID" value={viewCert?.ca_id || 'standalone'} InputProps={{ readOnly: true }} />
|
||||
<TextField label="Serial" value={viewCert?.serial_hex || ''} InputProps={{ readOnly: true }} />
|
||||
<TextField label="Common Name" value={viewCert?.common_name || ''} InputProps={{ readOnly: true }} />
|
||||
<TextField label="SAN DNS" value={viewCert?.san_dns || ''} InputProps={{ readOnly: true }} />
|
||||
<TextField label="SAN IPs" value={viewCert?.san_ips || ''} InputProps={{ readOnly: true }} />
|
||||
<TextField label="Status" value={viewCert?.status || ''} InputProps={{ readOnly: true }} />
|
||||
<TextField label="Not Before" value={fmt(viewCert?.not_before || 0)} InputProps={{ readOnly: true }} />
|
||||
<TextField label="Not After" value={fmt(viewCert?.not_after || 0)} InputProps={{ readOnly: true }} />
|
||||
<TextField
|
||||
label="Certificate PEM"
|
||||
multiline
|
||||
minRows={8}
|
||||
value={viewCert?.cert_pem || ''}
|
||||
InputProps={{ readOnly: true }}
|
||||
sx={{ '& .MuiInputBase-input': { fontFamily: 'ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, monospace' } }}
|
||||
/>
|
||||
<TextField
|
||||
label="Private Key PEM"
|
||||
multiline
|
||||
minRows={8}
|
||||
value={viewCert?.key_pem || ''}
|
||||
InputProps={{ readOnly: true }}
|
||||
sx={{ '& .MuiInputBase-input': { fontFamily: 'ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, monospace' } }}
|
||||
/>
|
||||
</Box>
|
||||
</DialogContent>
|
||||
<DialogActions>
|
||||
<Button
|
||||
startIcon={<DownloadIcon />}
|
||||
onClick={async () => {
|
||||
if (!viewCert) return
|
||||
const data = await api.downloadPKICertBundle(viewCert.id)
|
||||
downloadBinary(`${viewCert.common_name || viewCert.id}.bundle.zip`, data, 'application/zip')
|
||||
}}
|
||||
>
|
||||
Download Bundle
|
||||
</Button>
|
||||
<Button
|
||||
startIcon={<DownloadIcon />}
|
||||
onClick={() => {
|
||||
if (!viewCert) return
|
||||
downloadText(`${viewCert.common_name || viewCert.id}.crt.pem`, viewCert.cert_pem || '')
|
||||
}}
|
||||
>
|
||||
Download Cert
|
||||
</Button>
|
||||
<Button
|
||||
startIcon={<DownloadIcon />}
|
||||
onClick={() => {
|
||||
if (!viewCert) return
|
||||
downloadText(`${viewCert.common_name || viewCert.id}.key.pem`, viewCert.key_pem || '')
|
||||
}}
|
||||
>
|
||||
Download Key
|
||||
</Button>
|
||||
<Button onClick={() => setViewCert(null)}>Close</Button>
|
||||
</DialogActions>
|
||||
</Dialog>
|
||||
</Box>
|
||||
)
|
||||
}
|
||||
488
frontend/src/pages/AdminServicePrincipalsPage.tsx
Normal file
488
frontend/src/pages/AdminServicePrincipalsPage.tsx
Normal file
@@ -0,0 +1,488 @@
|
||||
import Alert from '@mui/material/Alert'
|
||||
import {
|
||||
Box,
|
||||
Button,
|
||||
Chip,
|
||||
Dialog,
|
||||
DialogActions,
|
||||
DialogContent,
|
||||
DialogTitle,
|
||||
MenuItem,
|
||||
Paper,
|
||||
TextField,
|
||||
Typography
|
||||
} from '@mui/material'
|
||||
import { useEffect, useState } from 'react'
|
||||
import { api, CertPrincipalBinding, PKICert, PrincipalProjectRole, Project, ServicePrincipal } from '../api'
|
||||
|
||||
function fmt(ts: number): string {
|
||||
if (!ts || ts <= 0) return '-'
|
||||
return new Date(ts * 1000).toLocaleString()
|
||||
}
|
||||
|
||||
export default function AdminServicePrincipalsPage() {
|
||||
const [principals, setPrincipals] = useState<ServicePrincipal[]>([])
|
||||
const [bindings, setBindings] = useState<CertPrincipalBinding[]>([])
|
||||
const [pkiCerts, setPKICerts] = useState<PKICert[]>([])
|
||||
const [projects, setProjects] = useState<Project[]>([])
|
||||
const [principalRoles, setPrincipalRoles] = useState<Record<string, PrincipalProjectRole[]>>({})
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [dialogError, setDialogError] = useState<string | null>(null)
|
||||
const [loading, setLoading] = useState(false)
|
||||
const [createOpen, setCreateOpen] = useState(false)
|
||||
const [name, setName] = useState('')
|
||||
const [description, setDescription] = useState('')
|
||||
const [bindingOpen, setBindingOpen] = useState(false)
|
||||
const [bindSource, setBindSource] = useState<'pki' | 'manual'>('pki')
|
||||
const [bindPKICertID, setBindPKICertID] = useState('')
|
||||
const [bindFingerprint, setBindFingerprint] = useState('')
|
||||
const [bindPrincipalID, setBindPrincipalID] = useState('')
|
||||
const [busy, setBusy] = useState(false)
|
||||
const [rolePrincipalID, setRolePrincipalID] = useState('')
|
||||
const [roleProjectID, setRoleProjectID] = useState('')
|
||||
const [roleValue, setRoleValue] = useState<'viewer' | 'writer' | 'admin'>('writer')
|
||||
const [rolePrincipalFilter, setRolePrincipalFilter] = useState('')
|
||||
const [roleProjectFilter, setRoleProjectFilter] = useState('')
|
||||
const [roleSearch, setRoleSearch] = useState('')
|
||||
|
||||
const load = async () => {
|
||||
setLoading(true)
|
||||
setError(null)
|
||||
try {
|
||||
const p = await api.listServicePrincipals()
|
||||
const b = await api.listCertPrincipalBindings()
|
||||
const c = await api.listPKICerts()
|
||||
const allProjects = await api.listProjects(1000, 0, '')
|
||||
setPrincipals(Array.isArray(p) ? p : [])
|
||||
setBindings(Array.isArray(b) ? b : [])
|
||||
setPKICerts(Array.isArray(c) ? c : [])
|
||||
setProjects(Array.isArray(allProjects) ? allProjects : [])
|
||||
const roleMap: Record<string, PrincipalProjectRole[]> = {}
|
||||
let i: number
|
||||
let roles: PrincipalProjectRole[]
|
||||
for (i = 0; i < (Array.isArray(p) ? p.length : 0); i++) {
|
||||
roles = await api.listPrincipalProjectRoles(p[i].id)
|
||||
roleMap[p[i].id] = Array.isArray(roles) ? roles : []
|
||||
}
|
||||
setPrincipalRoles(roleMap)
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to load data')
|
||||
} finally {
|
||||
setLoading(false)
|
||||
}
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
load()
|
||||
}, [])
|
||||
|
||||
const createPrincipal = async () => {
|
||||
if (!name.trim()) {
|
||||
setDialogError('Name is required.')
|
||||
return
|
||||
}
|
||||
setBusy(true)
|
||||
setDialogError(null)
|
||||
try {
|
||||
await api.createServicePrincipal({ name: name.trim(), description: description.trim(), disabled: false })
|
||||
setCreateOpen(false)
|
||||
setName('')
|
||||
setDescription('')
|
||||
await load()
|
||||
} catch (err) {
|
||||
setDialogError(err instanceof Error ? err.message : 'Failed to create principal')
|
||||
} finally {
|
||||
setBusy(false)
|
||||
}
|
||||
}
|
||||
|
||||
const togglePrincipal = async (item: ServicePrincipal) => {
|
||||
setError(null)
|
||||
try {
|
||||
await api.updateServicePrincipal(item.id, { name: item.name, description: item.description, is_admin: item.is_admin, disabled: !item.disabled })
|
||||
await load()
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to update principal')
|
||||
}
|
||||
}
|
||||
|
||||
const deletePrincipal = async (item: ServicePrincipal) => {
|
||||
if (!window.confirm(`Delete principal "${item.name}"?`)) return
|
||||
setError(null)
|
||||
try {
|
||||
await api.deleteServicePrincipal(item.id)
|
||||
await load()
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to delete principal')
|
||||
}
|
||||
}
|
||||
|
||||
const upsertBinding = async () => {
|
||||
let fingerprint: string
|
||||
let cert: PKICert | undefined
|
||||
fingerprint = ''
|
||||
if (bindSource === 'pki') {
|
||||
cert = pkiCerts.find((item) => item.id === bindPKICertID)
|
||||
fingerprint = (cert?.fingerprint || '').trim().toLowerCase()
|
||||
} else {
|
||||
fingerprint = bindFingerprint.trim().toLowerCase()
|
||||
}
|
||||
if (!fingerprint || !bindPrincipalID) {
|
||||
setDialogError('Fingerprint and principal are required.')
|
||||
return
|
||||
}
|
||||
setBusy(true)
|
||||
setDialogError(null)
|
||||
try {
|
||||
await api.upsertCertPrincipalBinding({
|
||||
fingerprint: fingerprint,
|
||||
principal_id: bindPrincipalID,
|
||||
enabled: true
|
||||
})
|
||||
setBindingOpen(false)
|
||||
setBindSource('pki')
|
||||
setBindPKICertID('')
|
||||
setBindFingerprint('')
|
||||
setBindPrincipalID('')
|
||||
await load()
|
||||
} catch (err) {
|
||||
setDialogError(err instanceof Error ? err.message : 'Failed to save binding')
|
||||
} finally {
|
||||
setBusy(false)
|
||||
}
|
||||
}
|
||||
|
||||
const toggleBinding = async (item: CertPrincipalBinding) => {
|
||||
setError(null)
|
||||
try {
|
||||
await api.upsertCertPrincipalBinding({ fingerprint: item.fingerprint, principal_id: item.principal_id, enabled: !item.enabled })
|
||||
await load()
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to update binding')
|
||||
}
|
||||
}
|
||||
|
||||
const deleteBinding = async (item: CertPrincipalBinding) => {
|
||||
if (!window.confirm(`Delete binding for fingerprint ${item.fingerprint}?`)) return
|
||||
setError(null)
|
||||
try {
|
||||
await api.deleteCertPrincipalBinding(item.fingerprint)
|
||||
await load()
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to delete binding')
|
||||
}
|
||||
}
|
||||
|
||||
const principalName = (id: string): string => {
|
||||
const p = principals.find((item) => item.id === id)
|
||||
return p ? p.name : id
|
||||
}
|
||||
|
||||
const togglePrincipalAdmin = async (item: ServicePrincipal) => {
|
||||
setError(null)
|
||||
try {
|
||||
await api.updateServicePrincipal(item.id, { name: item.name, description: item.description, is_admin: !item.is_admin, disabled: item.disabled })
|
||||
await load()
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to update principal admin flag')
|
||||
}
|
||||
}
|
||||
|
||||
const upsertRole = async () => {
|
||||
if (!rolePrincipalID || !roleProjectID) {
|
||||
setDialogError('Principal and project are required for role assignment.')
|
||||
return
|
||||
}
|
||||
setBusy(true)
|
||||
setDialogError(null)
|
||||
try {
|
||||
await api.upsertPrincipalProjectRole(rolePrincipalID, { project_id: roleProjectID, role: roleValue })
|
||||
setRolePrincipalID('')
|
||||
setRoleProjectID('')
|
||||
setRoleValue('writer')
|
||||
await load()
|
||||
} catch (err) {
|
||||
setDialogError(err instanceof Error ? err.message : 'Failed to assign role')
|
||||
} finally {
|
||||
setBusy(false)
|
||||
}
|
||||
}
|
||||
|
||||
const deleteRole = async (principalID: string, projectID: string) => {
|
||||
setError(null)
|
||||
try {
|
||||
await api.deletePrincipalProjectRole(principalID, projectID)
|
||||
await load()
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to delete role assignment')
|
||||
}
|
||||
}
|
||||
|
||||
const projectName = (id: string): string => {
|
||||
const p = projects.find((item) => item.id === id)
|
||||
if (!p) return id
|
||||
return `${p.name} (${p.slug})`
|
||||
}
|
||||
|
||||
const pkiCertByFingerprint = (fingerprint: string): PKICert | undefined => {
|
||||
return pkiCerts.find((item) => (item.fingerprint || '').toLowerCase() === (fingerprint || '').toLowerCase())
|
||||
}
|
||||
|
||||
const filteredPrincipals = principals.filter((principal) => {
|
||||
if (rolePrincipalFilter && principal.id !== rolePrincipalFilter) return false
|
||||
const roles = principalRoles[principal.id] || []
|
||||
if (roleProjectFilter && !roles.some((item) => item.project_id === roleProjectFilter)) return false
|
||||
if (roleSearch.trim()) {
|
||||
const q = roleSearch.trim().toLowerCase()
|
||||
const hitPrincipal =
|
||||
principal.name.toLowerCase().includes(q) ||
|
||||
principal.id.toLowerCase().includes(q) ||
|
||||
principal.description.toLowerCase().includes(q)
|
||||
if (hitPrincipal) return true
|
||||
return roles.some((item) => {
|
||||
const project = projects.find((p) => p.id === item.project_id)
|
||||
const projectText = project ? `${project.name} ${project.slug}`.toLowerCase() : ''
|
||||
return item.role.toLowerCase().includes(q) || item.project_id.toLowerCase().includes(q) || projectText.includes(q)
|
||||
})
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
return (
|
||||
<Box>
|
||||
<Typography variant="h5" sx={{ mb: 2 }}>Admin: Service Principals</Typography>
|
||||
{error ? <Alert severity="error" sx={{ mb: 1 }}>{error}</Alert> : null}
|
||||
<Box sx={{ display: 'grid', gap: 1 }}>
|
||||
<Paper sx={{ p: 2 }}>
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', justifyContent: 'space-between', mb: 1 }}>
|
||||
<Typography variant="h6">Principals</Typography>
|
||||
<Button variant="outlined" onClick={() => { setDialogError(null); setCreateOpen(true) }}>New Principal</Button>
|
||||
</Box>
|
||||
{loading && principals.length === 0 ? <Typography variant="body2" color="text.secondary">Loading...</Typography> : null}
|
||||
<Box sx={{ display: 'grid', gap: 1 }}>
|
||||
{principals.map((item) => (
|
||||
<Paper key={item.id} variant="outlined" sx={{ p: 1 }}>
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', justifyContent: 'space-between', gap: 1 }}>
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', gap: 1 }}>
|
||||
<Typography variant="body2">{item.name} ({item.id})</Typography>
|
||||
<Chip size="small" color={item.disabled ? 'default' : 'success'} label={item.disabled ? 'Disabled' : 'Active'} />
|
||||
{item.is_admin ? <Chip size="small" color="warning" label="Principal Admin" /> : null}
|
||||
</Box>
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', gap: 1 }}>
|
||||
<Button size="small" color={item.is_admin ? 'warning' : 'primary'} onClick={() => togglePrincipalAdmin(item)}>
|
||||
{item.is_admin ? 'Unset Admin' : 'Set Admin'}
|
||||
</Button>
|
||||
<Button size="small" color={item.disabled ? 'success' : 'warning'} onClick={() => togglePrincipal(item)}>
|
||||
{item.disabled ? 'Enable' : 'Disable'}
|
||||
</Button>
|
||||
<Button size="small" color="error" onClick={() => deletePrincipal(item)}>Delete</Button>
|
||||
</Box>
|
||||
</Box>
|
||||
<Typography variant="caption" color="text.secondary">
|
||||
{item.description || '(no description)'} · updated: {fmt(item.updated_at)}
|
||||
</Typography>
|
||||
</Paper>
|
||||
))}
|
||||
</Box>
|
||||
</Paper>
|
||||
|
||||
<Paper sx={{ p: 2 }}>
|
||||
<Typography variant="h6" sx={{ mb: 0.5 }}>Project Role Assignments</Typography>
|
||||
<Typography variant="caption" color="text.secondary" sx={{ display: 'block', mb: 1 }}>
|
||||
Define what each principal can do per project.
|
||||
</Typography>
|
||||
{dialogError ? <Alert severity="error" sx={{ mb: 1 }}>{dialogError}</Alert> : null}
|
||||
<Box sx={{ display: 'grid', gridTemplateColumns: '1fr 1fr 1fr auto', gap: 1, mb: 1 }}>
|
||||
<TextField select label="Principal" value={rolePrincipalID} onChange={(event) => setRolePrincipalID(event.target.value)}>
|
||||
{principals.map((item) => (
|
||||
<MenuItem key={item.id} value={item.id}>{item.name}</MenuItem>
|
||||
))}
|
||||
</TextField>
|
||||
<TextField select label="Project" value={roleProjectID} onChange={(event) => setRoleProjectID(event.target.value)}>
|
||||
{projects.map((item) => (
|
||||
<MenuItem key={item.id} value={item.id}>{item.name} ({item.slug})</MenuItem>
|
||||
))}
|
||||
</TextField>
|
||||
<TextField select label="Role" value={roleValue} onChange={(event) => setRoleValue(event.target.value as 'viewer' | 'writer' | 'admin')}>
|
||||
<MenuItem value="viewer">viewer</MenuItem>
|
||||
<MenuItem value="writer">writer</MenuItem>
|
||||
<MenuItem value="admin">admin</MenuItem>
|
||||
</TextField>
|
||||
<Button variant="outlined" onClick={upsertRole} disabled={busy}>Assign</Button>
|
||||
</Box>
|
||||
<Box sx={{ display: 'grid', gridTemplateColumns: '1fr 1fr 2fr auto', gap: 1, mb: 1 }}>
|
||||
<TextField
|
||||
select
|
||||
label="Filter Principal"
|
||||
value={rolePrincipalFilter}
|
||||
onChange={(event) => setRolePrincipalFilter(event.target.value)}
|
||||
size="small"
|
||||
>
|
||||
<MenuItem value="">All</MenuItem>
|
||||
{principals.map((item) => (
|
||||
<MenuItem key={item.id} value={item.id}>{item.name}</MenuItem>
|
||||
))}
|
||||
</TextField>
|
||||
<TextField
|
||||
select
|
||||
label="Filter Project"
|
||||
value={roleProjectFilter}
|
||||
onChange={(event) => setRoleProjectFilter(event.target.value)}
|
||||
size="small"
|
||||
>
|
||||
<MenuItem value="">All</MenuItem>
|
||||
{projects.map((item) => (
|
||||
<MenuItem key={item.id} value={item.id}>{item.name} ({item.slug})</MenuItem>
|
||||
))}
|
||||
</TextField>
|
||||
<TextField
|
||||
label="Search"
|
||||
value={roleSearch}
|
||||
onChange={(event) => setRoleSearch(event.target.value)}
|
||||
size="small"
|
||||
placeholder="Principal/project/role..."
|
||||
/>
|
||||
<Button
|
||||
variant="outlined"
|
||||
size="small"
|
||||
onClick={() => {
|
||||
setRolePrincipalFilter('')
|
||||
setRoleProjectFilter('')
|
||||
setRoleSearch('')
|
||||
}}
|
||||
>
|
||||
Reset
|
||||
</Button>
|
||||
</Box>
|
||||
<Box sx={{ display: 'grid', gap: 1 }}>
|
||||
{filteredPrincipals.map((principal) => (
|
||||
<Paper key={principal.id} variant="outlined" sx={{ p: 1 }}>
|
||||
<Typography variant="subtitle2">{principal.name}</Typography>
|
||||
{(principalRoles[principal.id] || []).length === 0 ? (
|
||||
<Typography variant="caption" color="text.secondary">No project roles</Typography>
|
||||
) : (
|
||||
(principalRoles[principal.id] || []).map((role) => (
|
||||
<Box key={`${role.principal_id}:${role.project_id}`} sx={{ display: 'flex', alignItems: 'center', justifyContent: 'space-between' }}>
|
||||
<Typography variant="caption" color="text.secondary">{projectName(role.project_id)} · {role.role}</Typography>
|
||||
<Button size="small" color="error" onClick={() => deleteRole(role.principal_id, role.project_id)}>Remove</Button>
|
||||
</Box>
|
||||
))
|
||||
)}
|
||||
</Paper>
|
||||
))}
|
||||
</Box>
|
||||
</Paper>
|
||||
|
||||
<Paper sx={{ p: 2 }}>
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', justifyContent: 'space-between', mb: 1 }}>
|
||||
<Typography variant="h6">Cert Fingerprint Bindings</Typography>
|
||||
<Button variant="outlined" onClick={() => { setDialogError(null); setBindSource('pki'); setBindPKICertID(''); setBindFingerprint(''); setBindPrincipalID(''); setBindingOpen(true) }}>Add Binding</Button>
|
||||
</Box>
|
||||
<Box sx={{ display: 'grid', gap: 1 }}>
|
||||
{bindings.map((item) => (
|
||||
<Paper key={item.fingerprint} variant="outlined" sx={{ p: 1 }}>
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', justifyContent: 'space-between', gap: 1 }}>
|
||||
<Box sx={{ display: 'grid' }}>
|
||||
{pkiCertByFingerprint(item.fingerprint) ? (
|
||||
<Typography variant="body2">
|
||||
{pkiCertByFingerprint(item.fingerprint)?.common_name || pkiCertByFingerprint(item.fingerprint)?.serial_hex} ({pkiCertByFingerprint(item.fingerprint)?.id})
|
||||
</Typography>
|
||||
) : (
|
||||
<Typography variant="body2">{item.fingerprint}</Typography>
|
||||
)}
|
||||
<Typography variant="caption" color="text.secondary">principal: {principalName(item.principal_id)} ({item.principal_id})</Typography>
|
||||
{pkiCertByFingerprint(item.fingerprint) ? (
|
||||
<Typography variant="caption" color="text.secondary">
|
||||
source: pki cert · fingerprint: {item.fingerprint}
|
||||
</Typography>
|
||||
) : (
|
||||
<Typography variant="caption" color="text.secondary">source: manual fingerprint</Typography>
|
||||
)}
|
||||
</Box>
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', gap: 1 }}>
|
||||
<Button size="small" color={item.enabled ? 'warning' : 'success'} onClick={() => toggleBinding(item)}>
|
||||
{item.enabled ? 'Disable' : 'Enable'}
|
||||
</Button>
|
||||
<Button size="small" color="error" onClick={() => deleteBinding(item)}>Delete</Button>
|
||||
</Box>
|
||||
</Box>
|
||||
</Paper>
|
||||
))}
|
||||
</Box>
|
||||
</Paper>
|
||||
</Box>
|
||||
|
||||
<Dialog open={createOpen} onClose={() => setCreateOpen(false)} maxWidth="sm" fullWidth>
|
||||
<DialogTitle>
|
||||
<Box sx={{ display: 'grid', gap: 1 }}>
|
||||
<Typography variant="h6">New Service Principal</Typography>
|
||||
{dialogError ? <Alert severity="error">{dialogError}</Alert> : null}
|
||||
</Box>
|
||||
</DialogTitle>
|
||||
<DialogContent sx={{ display: 'grid', gap: 1, pt: '8px !important' }}>
|
||||
<TextField label="Name" value={name} onChange={(event) => setName(event.target.value)} />
|
||||
<TextField label="Description" value={description} onChange={(event) => setDescription(event.target.value)} />
|
||||
</DialogContent>
|
||||
<DialogActions>
|
||||
<Button onClick={() => setCreateOpen(false)}>Cancel</Button>
|
||||
<Button variant="contained" onClick={createPrincipal} disabled={busy}>{busy ? 'Saving...' : 'Create'}</Button>
|
||||
</DialogActions>
|
||||
</Dialog>
|
||||
|
||||
<Dialog open={bindingOpen} onClose={() => setBindingOpen(false)} maxWidth="sm" fullWidth>
|
||||
<DialogTitle>
|
||||
<Box sx={{ display: 'grid', gap: 1 }}>
|
||||
<Typography variant="h6">Add Cert Binding</Typography>
|
||||
{dialogError ? <Alert severity="error">{dialogError}</Alert> : null}
|
||||
</Box>
|
||||
</DialogTitle>
|
||||
<DialogContent sx={{ display: 'grid', gap: 1, pt: '8px !important' }}>
|
||||
<TextField
|
||||
select
|
||||
label="Binding Source"
|
||||
value={bindSource}
|
||||
onChange={(event) => setBindSource(event.target.value as 'pki' | 'manual')}
|
||||
>
|
||||
<MenuItem value="pki">PKI Certificate</MenuItem>
|
||||
<MenuItem value="manual">Manual Fingerprint</MenuItem>
|
||||
</TextField>
|
||||
{bindSource === 'pki' ? (
|
||||
<TextField
|
||||
select
|
||||
label="PKI Certificate"
|
||||
value={bindPKICertID}
|
||||
onChange={(event) => setBindPKICertID(event.target.value)}
|
||||
>
|
||||
{pkiCerts.map((item) => (
|
||||
<MenuItem key={item.id} value={item.id}>
|
||||
{item.common_name || item.serial_hex} ({item.id.slice(0, 8)})
|
||||
</MenuItem>
|
||||
))}
|
||||
</TextField>
|
||||
) : null}
|
||||
{bindSource === 'manual' ? (
|
||||
<TextField
|
||||
label="Fingerprint (sha256 hex)"
|
||||
value={bindFingerprint}
|
||||
onChange={(event) => setBindFingerprint(event.target.value)}
|
||||
/>
|
||||
) : null}
|
||||
<TextField
|
||||
select
|
||||
label="Principal"
|
||||
value={bindPrincipalID}
|
||||
onChange={(event) => setBindPrincipalID(event.target.value)}
|
||||
>
|
||||
{principals.map((item) => (
|
||||
<MenuItem key={item.id} value={item.id}>{item.name} ({item.id})</MenuItem>
|
||||
))}
|
||||
</TextField>
|
||||
</DialogContent>
|
||||
<DialogActions>
|
||||
<Button onClick={() => setBindingOpen(false)}>Cancel</Button>
|
||||
<Button variant="contained" onClick={upsertBinding} disabled={busy}>{busy ? 'Saving...' : 'Save'}</Button>
|
||||
</DialogActions>
|
||||
</Dialog>
|
||||
</Box>
|
||||
)
|
||||
}
|
||||
712
frontend/src/pages/AdminTLSSettingsPage.tsx
Normal file
712
frontend/src/pages/AdminTLSSettingsPage.tsx
Normal file
@@ -0,0 +1,712 @@
|
||||
import Alert from '@mui/material/Alert'
|
||||
import {
|
||||
Box,
|
||||
Button,
|
||||
Chip,
|
||||
Dialog,
|
||||
DialogActions,
|
||||
DialogContent,
|
||||
DialogTitle,
|
||||
MenuItem,
|
||||
Paper,
|
||||
TextField,
|
||||
Typography
|
||||
} from '@mui/material'
|
||||
import { useEffect, useMemo, useState } from 'react'
|
||||
import { api, PKICA, PKICert, ServicePrincipal, TLSListener, TLSSettings } from '../api'
|
||||
|
||||
type ListenerForm = Omit<TLSListener, 'id' | 'created_at' | 'updated_at'>
|
||||
|
||||
const emptyListener = (): ListenerForm => ({
|
||||
name: '',
|
||||
enabled: false,
|
||||
http_addrs: [],
|
||||
https_addrs: [],
|
||||
auth_policy: 'default',
|
||||
apply_policy_api: true,
|
||||
apply_policy_git: true,
|
||||
apply_policy_rpm: true,
|
||||
apply_policy_v2: true,
|
||||
client_cert_allowlist: [],
|
||||
tls_server_cert_source: 'pki',
|
||||
tls_cert_file: '',
|
||||
tls_key_file: '',
|
||||
tls_pki_server_cert_id: '',
|
||||
tls_client_auth: 'none',
|
||||
tls_client_ca_file: '',
|
||||
tls_pki_client_ca_id: '',
|
||||
tls_min_version: '1.2'
|
||||
})
|
||||
|
||||
export default function AdminTLSSettingsPage() {
|
||||
const [settings, setSettings] = useState<TLSSettings>({
|
||||
http_addrs: [':1080'],
|
||||
https_addrs: [],
|
||||
tls_server_cert_source: 'pki',
|
||||
tls_cert_file: '',
|
||||
tls_key_file: '',
|
||||
tls_pki_server_cert_id: '',
|
||||
tls_client_auth: 'none',
|
||||
tls_client_ca_file: '',
|
||||
tls_pki_client_ca_id: '',
|
||||
tls_min_version: '1.2'
|
||||
})
|
||||
const [loading, setLoading] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [listeners, setListeners] = useState<TLSListener[]>([])
|
||||
const [listenersLoading, setListenersLoading] = useState(false)
|
||||
const [dialogOpen, setDialogOpen] = useState(false)
|
||||
const [editingMain, setEditingMain] = useState(false)
|
||||
const [editingID, setEditingID] = useState<string | null>(null)
|
||||
const [listenerForm, setListenerForm] = useState<ListenerForm>(emptyListener())
|
||||
const [listenerHTTPText, setListenerHTTPText] = useState('')
|
||||
const [listenerHTTPSText, setListenerHTTPSText] = useState('')
|
||||
const [listenerCertAllowText, setListenerCertAllowText] = useState('')
|
||||
const [selectedAllowedCertIDs, setSelectedAllowedCertIDs] = useState<string[]>([])
|
||||
const [listenerError, setListenerError] = useState<string | null>(null)
|
||||
const [listenerSaving, setListenerSaving] = useState(false)
|
||||
const [runtimeCounts, setRuntimeCounts] = useState<Record<string, number>>({})
|
||||
const [pkiCAs, setPKICAs] = useState<PKICA[]>([])
|
||||
const [pkiCerts, setPKICerts] = useState<PKICert[]>([])
|
||||
const [principals, setPrincipals] = useState<ServicePrincipal[]>([])
|
||||
const [bindPrincipalID, setBindPrincipalID] = useState('')
|
||||
const [confirmOpen, setConfirmOpen] = useState(false)
|
||||
const [confirmTitle, setConfirmTitle] = useState('')
|
||||
const [confirmMessage, setConfirmMessage] = useState('')
|
||||
const [confirmLabel, setConfirmLabel] = useState('Confirm')
|
||||
const [confirmColor, setConfirmColor] = useState<'primary' | 'warning' | 'error'>('primary')
|
||||
const [confirmAction, setConfirmAction] = useState<null | (() => Promise<void>)>(null)
|
||||
const certIDByFingerprint = useMemo(() => {
|
||||
const map: Record<string, string> = {}
|
||||
pkiCerts.forEach((cert) => {
|
||||
if (cert.fingerprint) {
|
||||
map[cert.fingerprint.toLowerCase()] = cert.id
|
||||
}
|
||||
})
|
||||
return map
|
||||
}, [pkiCerts])
|
||||
|
||||
const loadMainSettings = async () => {
|
||||
setLoading(true)
|
||||
api
|
||||
.getTLSSettings()
|
||||
.then((data) => {
|
||||
setSettings(data)
|
||||
})
|
||||
.catch((err) => {
|
||||
const message = err instanceof Error ? err.message : 'Failed to load TLS settings'
|
||||
setError(message)
|
||||
})
|
||||
.finally(() => setLoading(false))
|
||||
}
|
||||
|
||||
const loadListeners = async () => {
|
||||
setListenersLoading(true)
|
||||
api
|
||||
.listTLSListeners()
|
||||
.then((data) => setListeners(Array.isArray(data) ? data : []))
|
||||
.catch((err) => {
|
||||
const message = err instanceof Error ? err.message : 'Failed to load extra listeners'
|
||||
setError(message)
|
||||
})
|
||||
.finally(() => setListenersLoading(false))
|
||||
}
|
||||
|
||||
const loadRuntimeStatus = async () => {
|
||||
api
|
||||
.getTLSListenerRuntimeStatus()
|
||||
.then((data) => setRuntimeCounts(data || {}))
|
||||
.catch(() => setRuntimeCounts({}))
|
||||
}
|
||||
|
||||
const loadPKIOptions = async () => {
|
||||
api
|
||||
.listPKICAs()
|
||||
.then((data) => setPKICAs(Array.isArray(data) ? data : []))
|
||||
.catch(() => setPKICAs([]))
|
||||
api
|
||||
.listPKICerts()
|
||||
.then((data) => setPKICerts(Array.isArray(data) ? data : []))
|
||||
.catch(() => setPKICerts([]))
|
||||
api
|
||||
.listServicePrincipals()
|
||||
.then((data) => setPrincipals(Array.isArray(data) ? data : []))
|
||||
.catch(() => setPrincipals([]))
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
loadMainSettings()
|
||||
loadListeners()
|
||||
loadRuntimeStatus()
|
||||
loadPKIOptions()
|
||||
}, [])
|
||||
|
||||
const splitAddrText = (text: string): string[] => {
|
||||
return text
|
||||
.split(/\n|,/)
|
||||
.map((v) => v.trim())
|
||||
.filter((v) => v !== '')
|
||||
}
|
||||
|
||||
const openCreateDialog = () => {
|
||||
setEditingMain(false)
|
||||
setEditingID(null)
|
||||
setListenerForm(emptyListener())
|
||||
setListenerHTTPText('')
|
||||
setListenerHTTPSText('')
|
||||
setListenerCertAllowText('')
|
||||
setSelectedAllowedCertIDs([])
|
||||
setBindPrincipalID('')
|
||||
setListenerError(null)
|
||||
setDialogOpen(true)
|
||||
}
|
||||
|
||||
const openMainEditDialog = () => {
|
||||
setEditingMain(true)
|
||||
setEditingID(null)
|
||||
setListenerForm({
|
||||
name: 'Main Listener',
|
||||
enabled: true,
|
||||
http_addrs: settings.http_addrs || [],
|
||||
https_addrs: settings.https_addrs || [],
|
||||
auth_policy: 'default',
|
||||
apply_policy_api: true,
|
||||
apply_policy_git: true,
|
||||
apply_policy_rpm: true,
|
||||
apply_policy_v2: true,
|
||||
client_cert_allowlist: [],
|
||||
tls_server_cert_source: 'pki',
|
||||
tls_cert_file: '',
|
||||
tls_key_file: '',
|
||||
tls_pki_server_cert_id: settings.tls_pki_server_cert_id,
|
||||
tls_client_auth: settings.tls_client_auth,
|
||||
tls_client_ca_file: '',
|
||||
tls_pki_client_ca_id: settings.tls_pki_client_ca_id,
|
||||
tls_min_version: settings.tls_min_version
|
||||
})
|
||||
setListenerHTTPText((settings.http_addrs || []).join('\n'))
|
||||
setListenerHTTPSText((settings.https_addrs || []).join('\n'))
|
||||
setListenerCertAllowText('')
|
||||
setSelectedAllowedCertIDs([])
|
||||
setBindPrincipalID('')
|
||||
setListenerError(null)
|
||||
setDialogOpen(true)
|
||||
}
|
||||
|
||||
const openEditDialog = (item: TLSListener) => {
|
||||
const matchedIDs = (item.client_cert_allowlist || [])
|
||||
.map((fp) => certIDByFingerprint[(fp || '').toLowerCase()] || '')
|
||||
.filter((id) => id !== '')
|
||||
const matchedFPSet = new Set(
|
||||
matchedIDs
|
||||
.map((id) => (pkiCerts.find((cert) => cert.id === id)?.fingerprint || '').toLowerCase())
|
||||
.filter((fp) => fp !== '')
|
||||
)
|
||||
const manualOnlyFPs = (item.client_cert_allowlist || [])
|
||||
.map((fp) => (fp || '').toLowerCase())
|
||||
.filter((fp) => fp !== '' && !matchedFPSet.has(fp))
|
||||
setEditingMain(false)
|
||||
setEditingID(item.id)
|
||||
setListenerForm({
|
||||
name: item.name,
|
||||
enabled: item.enabled,
|
||||
http_addrs: item.http_addrs || [],
|
||||
https_addrs: item.https_addrs || [],
|
||||
auth_policy: item.auth_policy || 'default',
|
||||
apply_policy_api: item.apply_policy_api,
|
||||
apply_policy_git: item.apply_policy_git,
|
||||
apply_policy_rpm: item.apply_policy_rpm,
|
||||
apply_policy_v2: item.apply_policy_v2,
|
||||
client_cert_allowlist: item.client_cert_allowlist || [],
|
||||
tls_server_cert_source: 'pki',
|
||||
tls_cert_file: '',
|
||||
tls_key_file: '',
|
||||
tls_pki_server_cert_id: item.tls_pki_server_cert_id,
|
||||
tls_client_auth: item.tls_client_auth,
|
||||
tls_client_ca_file: '',
|
||||
tls_pki_client_ca_id: item.tls_pki_client_ca_id,
|
||||
tls_min_version: item.tls_min_version
|
||||
})
|
||||
setListenerHTTPText((item.http_addrs || []).join('\n'))
|
||||
setListenerHTTPSText((item.https_addrs || []).join('\n'))
|
||||
setListenerCertAllowText(manualOnlyFPs.join('\n'))
|
||||
setSelectedAllowedCertIDs(matchedIDs)
|
||||
setBindPrincipalID('')
|
||||
setListenerError(null)
|
||||
setDialogOpen(true)
|
||||
}
|
||||
|
||||
const handleSaveDialog = async () => {
|
||||
const selectedFingerprints = selectedAllowedCertIDs
|
||||
.map((id) => (pkiCerts.find((cert) => cert.id === id)?.fingerprint || '').toLowerCase())
|
||||
.filter((fp) => fp !== '')
|
||||
const mergedAllowlist = Array.from(new Set([...splitAddrText(listenerCertAllowText), ...selectedFingerprints]))
|
||||
const payload: ListenerForm = {
|
||||
...listenerForm,
|
||||
name: listenerForm.name.trim(),
|
||||
http_addrs: splitAddrText(listenerHTTPText),
|
||||
https_addrs: splitAddrText(listenerHTTPSText),
|
||||
client_cert_allowlist: mergedAllowlist
|
||||
}
|
||||
if (!editingMain && !payload.name) {
|
||||
setListenerError('Listener name is required')
|
||||
return
|
||||
}
|
||||
if (payload.http_addrs.length === 0 && payload.https_addrs.length === 0) {
|
||||
setListenerError('Provide at least one HTTP or HTTPS address')
|
||||
return
|
||||
}
|
||||
if (!editingMain && !payload.apply_policy_api && !payload.apply_policy_git && !payload.apply_policy_rpm && !payload.apply_policy_v2) {
|
||||
setListenerError('Select at least one scope (API/Git/RPM/V2).')
|
||||
return
|
||||
}
|
||||
if (!editingMain && (payload.auth_policy === 'read_open_write_cert' || payload.auth_policy === 'cert_only') && payload.client_cert_allowlist.length === 0) {
|
||||
setListenerError('Client certificate fingerprint allowlist is required for this policy.')
|
||||
return
|
||||
}
|
||||
if (payload.https_addrs.length > 0 && !payload.tls_pki_server_cert_id.trim()) {
|
||||
setListenerError('TLS PKI Server Cert is required when HTTPS addresses are configured.')
|
||||
return
|
||||
}
|
||||
if ((payload.tls_client_auth === 'require_and_verify' || payload.tls_client_auth === 'verify_if_given') && !payload.tls_pki_client_ca_id.trim()) {
|
||||
setListenerError('TLS PKI Client CA is required for the selected TLS Client Auth mode.')
|
||||
return
|
||||
}
|
||||
setListenerSaving(true)
|
||||
setListenerError(null)
|
||||
try {
|
||||
if (editingMain) {
|
||||
await api.updateTLSSettings({
|
||||
http_addrs: payload.http_addrs,
|
||||
https_addrs: payload.https_addrs,
|
||||
tls_server_cert_source: 'pki',
|
||||
tls_cert_file: '',
|
||||
tls_key_file: '',
|
||||
tls_pki_server_cert_id: payload.tls_pki_server_cert_id,
|
||||
tls_client_auth: payload.tls_client_auth,
|
||||
tls_client_ca_file: '',
|
||||
tls_pki_client_ca_id: payload.tls_pki_client_ca_id,
|
||||
tls_min_version: payload.tls_min_version
|
||||
})
|
||||
await loadMainSettings()
|
||||
} else if (editingID) {
|
||||
await api.updateTLSListener(editingID, payload)
|
||||
} else {
|
||||
await api.createTLSListener(payload)
|
||||
}
|
||||
setDialogOpen(false)
|
||||
if (!editingMain) {
|
||||
await loadListeners()
|
||||
}
|
||||
await loadRuntimeStatus()
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : 'Failed to save listener'
|
||||
setListenerError(message)
|
||||
} finally {
|
||||
setListenerSaving(false)
|
||||
}
|
||||
}
|
||||
|
||||
const bindSelectedCertsToPrincipal = async () => {
|
||||
let i: number
|
||||
let cert: PKICert | undefined
|
||||
if (!bindPrincipalID) {
|
||||
setListenerError('Choose a service principal for certificate bindings.')
|
||||
return
|
||||
}
|
||||
if (selectedAllowedCertIDs.length === 0) {
|
||||
setListenerError('Select at least one PKI certificate to bind.')
|
||||
return
|
||||
}
|
||||
setListenerSaving(true)
|
||||
setListenerError(null)
|
||||
try {
|
||||
for (i = 0; i < selectedAllowedCertIDs.length; i++) {
|
||||
cert = pkiCerts.find((item) => item.id === selectedAllowedCertIDs[i])
|
||||
if (!cert || !cert.fingerprint) {
|
||||
continue
|
||||
}
|
||||
await api.upsertCertPrincipalBinding({
|
||||
fingerprint: cert.fingerprint,
|
||||
principal_id: bindPrincipalID,
|
||||
enabled: true
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : 'Failed to bind selected certs to principal'
|
||||
setListenerError(message)
|
||||
return
|
||||
} finally {
|
||||
setListenerSaving(false)
|
||||
}
|
||||
setListenerError(null)
|
||||
}
|
||||
|
||||
const handleToggleListener = async (item: TLSListener) => {
|
||||
const nextEnabled = !item.enabled
|
||||
const payload: ListenerForm = {
|
||||
name: item.name,
|
||||
enabled: nextEnabled,
|
||||
http_addrs: item.http_addrs || [],
|
||||
https_addrs: item.https_addrs || [],
|
||||
auth_policy: item.auth_policy || 'default',
|
||||
apply_policy_api: item.apply_policy_api,
|
||||
apply_policy_git: item.apply_policy_git,
|
||||
apply_policy_rpm: item.apply_policy_rpm,
|
||||
apply_policy_v2: item.apply_policy_v2,
|
||||
client_cert_allowlist: item.client_cert_allowlist || [],
|
||||
tls_server_cert_source: 'pki',
|
||||
tls_cert_file: '',
|
||||
tls_key_file: '',
|
||||
tls_pki_server_cert_id: item.tls_pki_server_cert_id,
|
||||
tls_client_auth: item.tls_client_auth,
|
||||
tls_client_ca_file: '',
|
||||
tls_pki_client_ca_id: item.tls_pki_client_ca_id,
|
||||
tls_min_version: item.tls_min_version
|
||||
}
|
||||
setConfirmTitle(nextEnabled ? 'Enable Listener' : 'Disable Listener')
|
||||
setConfirmMessage(`Do you want to ${nextEnabled ? 'enable' : 'disable'} listener "${item.name}"?`)
|
||||
setConfirmLabel(nextEnabled ? 'Enable' : 'Disable')
|
||||
setConfirmColor(nextEnabled ? 'primary' : 'warning')
|
||||
setConfirmAction(() => async () => {
|
||||
setError(null)
|
||||
try {
|
||||
await api.updateTLSListener(item.id, payload)
|
||||
await loadListeners()
|
||||
await loadRuntimeStatus()
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : 'Failed to update listener state'
|
||||
setError(message)
|
||||
}
|
||||
})
|
||||
setConfirmOpen(true)
|
||||
}
|
||||
|
||||
const handleDeleteListener = async (item: TLSListener) => {
|
||||
setConfirmTitle('Delete Listener')
|
||||
setConfirmMessage(`Delete listener "${item.name}"?`)
|
||||
setConfirmLabel('Delete')
|
||||
setConfirmColor('error')
|
||||
setConfirmAction(() => async () => {
|
||||
setError(null)
|
||||
try {
|
||||
await api.deleteTLSListener(item.id)
|
||||
await loadListeners()
|
||||
await loadRuntimeStatus()
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : 'Failed to delete listener'
|
||||
setError(message)
|
||||
}
|
||||
})
|
||||
setConfirmOpen(true)
|
||||
}
|
||||
|
||||
const handleConfirm = async () => {
|
||||
if (!confirmAction) {
|
||||
setConfirmOpen(false)
|
||||
return
|
||||
}
|
||||
await confirmAction()
|
||||
setConfirmOpen(false)
|
||||
setConfirmAction(null)
|
||||
}
|
||||
|
||||
return (
|
||||
<Box>
|
||||
<Typography variant="h5" sx={{ mb: 2 }}>
|
||||
Admin: Site TLS
|
||||
</Typography>
|
||||
<Paper sx={{ p: 2, maxWidth: 980 }}>
|
||||
{error ? <Alert severity="error" sx={{ mb: 1 }}>{error}</Alert> : null}
|
||||
{(loading || listenersLoading) ? (
|
||||
<Typography variant="body2" color="text.secondary" sx={{ mb: 1 }}>
|
||||
Loading...
|
||||
</Typography>
|
||||
) : null}
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', justifyContent: 'space-between', mb: 1 }}>
|
||||
<Typography variant="h6">Listeners</Typography>
|
||||
<Button variant="outlined" onClick={openCreateDialog}>
|
||||
Add Listener
|
||||
</Button>
|
||||
</Box>
|
||||
<Box sx={{ display: 'grid', gap: 1 }}>
|
||||
<Paper variant="outlined" sx={{ p: 1, display: 'grid', gap: 0.5 }}>
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', justifyContent: 'space-between', gap: 1 }}>
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', gap: 1 }}>
|
||||
<Typography variant="subtitle2">Main Listener</Typography>
|
||||
<Chip size="small" color="info" label="Main" />
|
||||
</Box>
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', gap: 1 }}>
|
||||
<Button size="small" onClick={openMainEditDialog}>Edit</Button>
|
||||
</Box>
|
||||
</Box>
|
||||
<Typography variant="caption" color="text.secondary">
|
||||
HTTP: {(settings.http_addrs || []).join(', ') || '(none)'}
|
||||
</Typography>
|
||||
<Typography variant="caption" color="text.secondary">
|
||||
HTTPS: {(settings.https_addrs || []).join(', ') || '(none)'}
|
||||
</Typography>
|
||||
</Paper>
|
||||
{!listenersLoading && (listeners || []).length === 0 ? (
|
||||
<Typography variant="body2" color="text.secondary">
|
||||
No additional listeners configured.
|
||||
</Typography>
|
||||
) : null}
|
||||
{(listeners || []).map((item) => (
|
||||
<Paper key={item.id} variant="outlined" sx={{ p: 1, display: 'grid', gap: 0.5 }}>
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', justifyContent: 'space-between', gap: 1 }}>
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', gap: 1 }}>
|
||||
<Typography variant="subtitle2">{item.name}</Typography>
|
||||
{item.enabled ? (
|
||||
(runtimeCounts[item.id] || 0) > 0 ? (
|
||||
<Chip size="small" color="success" label={`Running (${runtimeCounts[item.id]} endpoint${runtimeCounts[item.id] > 1 ? 's' : ''})`} />
|
||||
) : (
|
||||
<Chip size="small" color="warning" label="Enabled (starting...)" />
|
||||
)
|
||||
) : (
|
||||
<Chip size="small" label="Disabled" />
|
||||
)}
|
||||
</Box>
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', gap: 1 }}>
|
||||
<Button
|
||||
size="small"
|
||||
color={item.enabled ? 'warning' : 'success'}
|
||||
onClick={() => handleToggleListener(item)}
|
||||
>
|
||||
{item.enabled ? 'Disable' : 'Enable'}
|
||||
</Button>
|
||||
<Button size="small" onClick={() => openEditDialog(item)}>
|
||||
Edit
|
||||
</Button>
|
||||
<Button size="small" color="error" onClick={() => handleDeleteListener(item)}>
|
||||
Delete
|
||||
</Button>
|
||||
</Box>
|
||||
</Box>
|
||||
<Typography variant="caption" color="text.secondary">
|
||||
HTTP: {(item.http_addrs || []).join(', ') || '(none)'}
|
||||
</Typography>
|
||||
<Typography variant="caption" color="text.secondary">
|
||||
HTTPS: {(item.https_addrs || []).join(', ') || '(none)'}
|
||||
</Typography>
|
||||
<Typography variant="caption" color="text.secondary">
|
||||
Policy: {item.auth_policy || 'default'} · Scope: {item.apply_policy_api ? 'API ' : ''}{item.apply_policy_git ? 'Git ' : ''}{item.apply_policy_rpm ? 'RPM ' : ''}{item.apply_policy_v2 ? 'V2' : ''}
|
||||
</Typography>
|
||||
</Paper>
|
||||
))}
|
||||
</Box>
|
||||
</Paper>
|
||||
|
||||
<Dialog open={dialogOpen} onClose={() => setDialogOpen(false)} maxWidth="md" fullWidth>
|
||||
<DialogTitle>
|
||||
<Box sx={{ display: 'grid', gap: 1 }}>
|
||||
<Typography variant="h6">{editingMain ? 'Edit Main Listener' : editingID ? 'Edit Listener' : 'Add Listener'}</Typography>
|
||||
{listenerError ? <Alert severity="error">{listenerError}</Alert> : null}
|
||||
</Box>
|
||||
</DialogTitle>
|
||||
<DialogContent sx={{ display: 'grid', gap: 1, pt: '8px !important' }}>
|
||||
{!editingMain ? (
|
||||
<TextField
|
||||
label="Name"
|
||||
value={listenerForm.name}
|
||||
onChange={(event) => setListenerForm((prev) => ({ ...prev, name: event.target.value }))}
|
||||
/>
|
||||
) : null}
|
||||
<TextField
|
||||
label="HTTP Addresses (one per line)"
|
||||
multiline
|
||||
minRows={2}
|
||||
value={listenerHTTPText}
|
||||
onChange={(event) => setListenerHTTPText(event.target.value)}
|
||||
/>
|
||||
<TextField
|
||||
label="HTTPS Addresses (one per line)"
|
||||
multiline
|
||||
minRows={2}
|
||||
value={listenerHTTPSText}
|
||||
onChange={(event) => setListenerHTTPSText(event.target.value)}
|
||||
/>
|
||||
{!editingMain ? (
|
||||
<TextField
|
||||
select
|
||||
label="Auth Policy"
|
||||
value={listenerForm.auth_policy}
|
||||
onChange={(event) =>
|
||||
setListenerForm((prev) => ({
|
||||
...prev,
|
||||
auth_policy: event.target.value as ListenerForm['auth_policy']
|
||||
}))
|
||||
}
|
||||
>
|
||||
<MenuItem value="default">default</MenuItem>
|
||||
<MenuItem value="read_open_write_cert">read_open_write_cert</MenuItem>
|
||||
<MenuItem value="read_open_write_cert_or_auth">read_open_write_cert_or_auth</MenuItem>
|
||||
<MenuItem value="cert_only">cert_only</MenuItem>
|
||||
<MenuItem value="read_only_public">read_only_public</MenuItem>
|
||||
</TextField>
|
||||
) : null}
|
||||
{!editingMain ? (
|
||||
<Box sx={{ display: 'grid', gridTemplateColumns: 'repeat(2, minmax(0,1fr))', gap: 1 }}>
|
||||
<Button
|
||||
variant={listenerForm.apply_policy_api ? 'contained' : 'outlined'}
|
||||
size="small"
|
||||
onClick={() => setListenerForm((prev) => ({ ...prev, apply_policy_api: !prev.apply_policy_api }))}
|
||||
>
|
||||
API
|
||||
</Button>
|
||||
<Button
|
||||
variant={listenerForm.apply_policy_git ? 'contained' : 'outlined'}
|
||||
size="small"
|
||||
onClick={() => setListenerForm((prev) => ({ ...prev, apply_policy_git: !prev.apply_policy_git }))}
|
||||
>
|
||||
Git
|
||||
</Button>
|
||||
<Button
|
||||
variant={listenerForm.apply_policy_rpm ? 'contained' : 'outlined'}
|
||||
size="small"
|
||||
onClick={() => setListenerForm((prev) => ({ ...prev, apply_policy_rpm: !prev.apply_policy_rpm }))}
|
||||
>
|
||||
RPM
|
||||
</Button>
|
||||
<Button
|
||||
variant={listenerForm.apply_policy_v2 ? 'contained' : 'outlined'}
|
||||
size="small"
|
||||
onClick={() => setListenerForm((prev) => ({ ...prev, apply_policy_v2: !prev.apply_policy_v2 }))}
|
||||
>
|
||||
V2
|
||||
</Button>
|
||||
</Box>
|
||||
) : null}
|
||||
{!editingMain ? (
|
||||
<TextField
|
||||
select
|
||||
SelectProps={{ multiple: true }}
|
||||
label="Allowed PKI Client Certificates"
|
||||
value={selectedAllowedCertIDs}
|
||||
onChange={(event) => {
|
||||
const value = event.target.value
|
||||
setSelectedAllowedCertIDs(typeof value === 'string' ? value.split(',') : (value as string[]))
|
||||
}}
|
||||
helperText="Selected certs are added to the fingerprint allowlist automatically."
|
||||
>
|
||||
{pkiCerts.map((cert) => (
|
||||
<MenuItem key={cert.id} value={cert.id}>
|
||||
{cert.common_name || cert.serial_hex} ({cert.id.slice(0, 8)})
|
||||
</MenuItem>
|
||||
))}
|
||||
</TextField>
|
||||
) : null}
|
||||
{!editingMain ? (
|
||||
<Box sx={{ display: 'grid', gridTemplateColumns: '1fr auto', gap: 1, alignItems: 'center' }}>
|
||||
<TextField
|
||||
select
|
||||
label="Bind Selected Certs To Principal"
|
||||
value={bindPrincipalID}
|
||||
onChange={(event) => setBindPrincipalID(event.target.value)}
|
||||
>
|
||||
{principals
|
||||
.filter((item) => !item.disabled)
|
||||
.map((item) => (
|
||||
<MenuItem key={item.id} value={item.id}>{item.name} ({item.id.slice(0, 8)})</MenuItem>
|
||||
))}
|
||||
</TextField>
|
||||
<Button variant="outlined" onClick={bindSelectedCertsToPrincipal} disabled={listenerSaving}>
|
||||
Bind
|
||||
</Button>
|
||||
</Box>
|
||||
) : null}
|
||||
{!editingMain ? (
|
||||
<TextField
|
||||
label="Client Cert Fingerprints (SHA256, one per line)"
|
||||
multiline
|
||||
minRows={3}
|
||||
value={listenerCertAllowText}
|
||||
onChange={(event) => setListenerCertAllowText(event.target.value)}
|
||||
helperText="Manual fingerprints only. Fingerprints for selected PKI certs are managed by the selector above."
|
||||
/>
|
||||
) : null}
|
||||
<TextField
|
||||
select
|
||||
label="TLS PKI Server Cert"
|
||||
value={listenerForm.tls_pki_server_cert_id}
|
||||
onChange={(event) =>
|
||||
setListenerForm((prev) => ({
|
||||
...prev,
|
||||
tls_pki_server_cert_id: event.target.value
|
||||
}))
|
||||
}
|
||||
>
|
||||
<MenuItem value="">(none)</MenuItem>
|
||||
{pkiCerts.map((cert) => (
|
||||
<MenuItem key={cert.id} value={cert.id}>
|
||||
{cert.common_name || cert.serial_hex} ({cert.id.slice(0, 8)})
|
||||
</MenuItem>
|
||||
))}
|
||||
</TextField>
|
||||
<TextField
|
||||
select
|
||||
label="TLS Client Auth"
|
||||
value={listenerForm.tls_client_auth}
|
||||
onChange={(event) =>
|
||||
setListenerForm((prev) => ({
|
||||
...prev,
|
||||
tls_client_auth: event.target.value as 'none' | 'request' | 'require' | 'verify_if_given' | 'require_and_verify'
|
||||
}))
|
||||
}
|
||||
>
|
||||
<MenuItem value="none">none</MenuItem>
|
||||
<MenuItem value="request">request</MenuItem>
|
||||
<MenuItem value="require">require</MenuItem>
|
||||
<MenuItem value="verify_if_given">verify_if_given</MenuItem>
|
||||
<MenuItem value="require_and_verify">require_and_verify</MenuItem>
|
||||
</TextField>
|
||||
<TextField
|
||||
select
|
||||
label="TLS PKI Client CA"
|
||||
value={listenerForm.tls_pki_client_ca_id}
|
||||
onChange={(event) => setListenerForm((prev) => ({ ...prev, tls_pki_client_ca_id: event.target.value }))}
|
||||
>
|
||||
<MenuItem value="">(none)</MenuItem>
|
||||
{pkiCAs.map((ca) => (
|
||||
<MenuItem key={ca.id} value={ca.id}>
|
||||
{ca.name} ({ca.id.slice(0, 8)})
|
||||
</MenuItem>
|
||||
))}
|
||||
</TextField>
|
||||
<TextField
|
||||
select
|
||||
label="TLS Minimum Version"
|
||||
value={listenerForm.tls_min_version}
|
||||
onChange={(event) =>
|
||||
setListenerForm((prev) => ({ ...prev, tls_min_version: event.target.value as '1.0' | '1.1' | '1.2' | '1.3' }))
|
||||
}
|
||||
>
|
||||
<MenuItem value="1.0">1.0</MenuItem>
|
||||
<MenuItem value="1.1">1.1</MenuItem>
|
||||
<MenuItem value="1.2">1.2</MenuItem>
|
||||
<MenuItem value="1.3">1.3</MenuItem>
|
||||
</TextField>
|
||||
</DialogContent>
|
||||
<DialogActions>
|
||||
<Button onClick={() => setDialogOpen(false)}>Cancel</Button>
|
||||
<Button variant="contained" onClick={handleSaveDialog} disabled={listenerSaving}>
|
||||
{listenerSaving ? 'Saving...' : editingMain || editingID ? 'Save' : 'Create'}
|
||||
</Button>
|
||||
</DialogActions>
|
||||
</Dialog>
|
||||
|
||||
<Dialog open={confirmOpen} onClose={() => setConfirmOpen(false)} maxWidth="xs" fullWidth>
|
||||
<DialogTitle>{confirmTitle}</DialogTitle>
|
||||
<DialogContent>
|
||||
<Typography variant="body2">{confirmMessage}</Typography>
|
||||
</DialogContent>
|
||||
<DialogActions>
|
||||
<Button onClick={() => setConfirmOpen(false)}>Cancel</Button>
|
||||
<Button variant="contained" color={confirmColor} onClick={handleConfirm}>
|
||||
{confirmLabel}
|
||||
</Button>
|
||||
</DialogActions>
|
||||
</Dialog>
|
||||
</Box>
|
||||
)
|
||||
}
|
||||
@@ -64,11 +64,42 @@ export default function CommitDetailPage() {
|
||||
})
|
||||
}, [repoId])
|
||||
|
||||
const handleFileDiff = async (file: string) => {
|
||||
if (!repoId || !hash) return
|
||||
const res = await api.getRepoFileDiff(repoId, hash, file)
|
||||
const extractFileDiffFromCommitDiff = (raw: string, file: string) => {
|
||||
const text = (raw || '').replace(/\r\n/g, '\n')
|
||||
if (!text) return ''
|
||||
const lines = text.split('\n')
|
||||
const blocks: string[] = []
|
||||
let current: string[] = []
|
||||
let i = 0
|
||||
let line = ''
|
||||
for (i = 0; i < lines.length; i += 1) {
|
||||
line = lines[i]
|
||||
if (line.startsWith('diff --git ')) {
|
||||
if (current.length > 0) {
|
||||
blocks.push(current.join('\n'))
|
||||
}
|
||||
current = [line]
|
||||
} else if (current.length > 0) {
|
||||
current.push(line)
|
||||
}
|
||||
}
|
||||
if (current.length > 0) {
|
||||
blocks.push(current.join('\n'))
|
||||
}
|
||||
for (i = 0; i < blocks.length; i += 1) {
|
||||
const block = blocks[i]
|
||||
const header = block.split('\n', 1)[0] || ''
|
||||
if (header.includes(` a/${file} `) || header.endsWith(` a/${file}`) || header.includes(` b/${file} `) || header.endsWith(` b/${file}`)) {
|
||||
return block
|
||||
}
|
||||
}
|
||||
return ''
|
||||
}
|
||||
|
||||
const handleFileDiff = (file: string) => {
|
||||
const extracted = extractFileDiffFromCommitDiff(diff, file)
|
||||
setSelectedFile(file)
|
||||
setFileDiff(res.diff || '')
|
||||
setFileDiff(extracted)
|
||||
}
|
||||
|
||||
const renderSplitDiff = (raw: string) => {
|
||||
|
||||
@@ -33,6 +33,7 @@ export default function RepoGitDetailPage(props: RepoGitDetailPageProps) {
|
||||
const [defaultBranch, setDefaultBranch] = useState<string>('')
|
||||
const [tree, setTree] = useState<RepoTreeEntry[]>([])
|
||||
const [treeError, setTreeError] = useState<string | null>(null)
|
||||
const [treeReloadTick, setTreeReloadTick] = useState(0)
|
||||
const [fileQuery, setFileQuery] = useState('')
|
||||
const [path, setPath] = useState('')
|
||||
const [pathSegments, setPathSegments] = useState<string[]>([])
|
||||
@@ -142,7 +143,7 @@ export default function RepoGitDetailPage(props: RepoGitDetailPageProps) {
|
||||
if (!ref && branches.length === 0) return
|
||||
if (!repo) return
|
||||
if (repo && repo.type && repo.type !== 'git') return
|
||||
const key = `${repoId}:${ref}:${path}`
|
||||
const key = `${repoId}:${ref}:${path}:${treeReloadTick}`
|
||||
if (lastTreeKey.current === key) return
|
||||
lastTreeKey.current = key
|
||||
api.listRepoTree(repoId, ref || undefined, path)
|
||||
@@ -163,7 +164,7 @@ export default function RepoGitDetailPage(props: RepoGitDetailPageProps) {
|
||||
setSelectedCommit(null)
|
||||
}
|
||||
})
|
||||
}, [repoId, ref, path, branches])
|
||||
}, [repoId, ref, path, branches, treeReloadTick])
|
||||
|
||||
useEffect(() => {
|
||||
if (!repoId || !ref) {
|
||||
@@ -270,6 +271,10 @@ export default function RepoGitDetailPage(props: RepoGitDetailPageProps) {
|
||||
}
|
||||
|
||||
const handleBreadcrumb = (nextPath: string) => {
|
||||
if (nextPath === path) {
|
||||
setTreeReloadTick((prev) => prev + 1)
|
||||
return
|
||||
}
|
||||
setPath(nextPath)
|
||||
if (nextPath === '') {
|
||||
setPathSegments([])
|
||||
|
||||
@@ -3,6 +3,7 @@ import {
|
||||
Box,
|
||||
Button,
|
||||
Checkbox,
|
||||
Chip,
|
||||
Dialog,
|
||||
DialogActions,
|
||||
DialogContent,
|
||||
@@ -23,7 +24,7 @@ import {
|
||||
} from '@mui/material'
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { Link, useParams } from 'react-router-dom'
|
||||
import { api, Project, Repo, RpmPackageDetail, RpmPackageSummary, RpmTreeEntry } from '../api'
|
||||
import { api, Project, Repo, RpmMirrorRun, RpmPackageDetail, RpmPackageSummary, RpmTreeEntry } from '../api'
|
||||
import ChevronLeftIcon from '@mui/icons-material/ChevronLeft'
|
||||
import ChevronRightIcon from '@mui/icons-material/ChevronRight'
|
||||
import DeleteOutlineIcon from '@mui/icons-material/DeleteOutline'
|
||||
@@ -31,6 +32,7 @@ import DriveFileRenameOutlineIcon from '@mui/icons-material/DriveFileRenameOutli
|
||||
import FolderIcon from '@mui/icons-material/Folder'
|
||||
import InsertDriveFileIcon from '@mui/icons-material/InsertDriveFile'
|
||||
import HomeOutlinedIcon from '@mui/icons-material/HomeOutlined'
|
||||
import MonitorHeartOutlinedIcon from '@mui/icons-material/MonitorHeartOutlined'
|
||||
import ProjectNavBar from '../components/ProjectNavBar'
|
||||
import RepoSubNav from '../components/RepoSubNav'
|
||||
import CodeBlock from '../components/CodeBlock'
|
||||
@@ -52,11 +54,19 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
|
||||
const [rpmMetaContent, setRpmMetaContent] = useState('')
|
||||
const [rpmMetaError, setRpmMetaError] = useState<string | null>(null)
|
||||
const [rpmMetaLoading, setRpmMetaLoading] = useState(false)
|
||||
const [rpmTab, setRpmTab] = useState<'meta' | 'files'>('meta')
|
||||
const [rpmTab, setRpmTab] = useState<'meta' | 'files' | 'changelog'>('meta')
|
||||
const [sidebarOpen, setSidebarOpen] = useState(true)
|
||||
const [subdirOpen, setSubdirOpen] = useState(false)
|
||||
const [subdirName, setSubdirName] = useState('')
|
||||
const [subdirType, setSubdirType] = useState<'container' | 'repo'>('container')
|
||||
const [subdirMode, setSubdirMode] = useState<'local' | 'mirror'>('local')
|
||||
const [subdirAllowDelete, setSubdirAllowDelete] = useState(false)
|
||||
const [subdirSyncIntervalSec, setSubdirSyncIntervalSec] = useState('300')
|
||||
const [subdirRemoteURL, setSubdirRemoteURL] = useState('')
|
||||
const [subdirConnectHost, setSubdirConnectHost] = useState('')
|
||||
const [subdirHostHeader, setSubdirHostHeader] = useState('')
|
||||
const [subdirTLSServerName, setSubdirTLSServerName] = useState('')
|
||||
const [subdirTLSInsecureSkipVerify, setSubdirTLSInsecureSkipVerify] = useState(false)
|
||||
const [subdirError, setSubdirError] = useState<string | null>(null)
|
||||
const [subdirSaving, setSubdirSaving] = useState(false)
|
||||
const [uploadOpen, setUploadOpen] = useState(false)
|
||||
@@ -75,12 +85,39 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
|
||||
const [renamePath, setRenamePath] = useState('')
|
||||
const [renameName, setRenameName] = useState('')
|
||||
const [renameNewName, setRenameNewName] = useState('')
|
||||
const [renameIsRepoDir, setRenameIsRepoDir] = useState(false)
|
||||
const [renameMode, setRenameMode] = useState<'local' | 'mirror'>('local')
|
||||
const [renameAllowDelete, setRenameAllowDelete] = useState(false)
|
||||
const [renameSyncIntervalSec, setRenameSyncIntervalSec] = useState('300')
|
||||
const [renameRemoteURL, setRenameRemoteURL] = useState('')
|
||||
const [renameConnectHost, setRenameConnectHost] = useState('')
|
||||
const [renameHostHeader, setRenameHostHeader] = useState('')
|
||||
const [renameTLSServerName, setRenameTLSServerName] = useState('')
|
||||
const [renameTLSInsecureSkipVerify, setRenameTLSInsecureSkipVerify] = useState(false)
|
||||
const [renameError, setRenameError] = useState<string | null>(null)
|
||||
const [renaming, setRenaming] = useState(false)
|
||||
const [statusOpen, setStatusOpen] = useState(false)
|
||||
const [statusPath, setStatusPath] = useState('')
|
||||
const [statusName, setStatusName] = useState('')
|
||||
const [statusMode, setStatusMode] = useState<'local' | 'mirror'>('local')
|
||||
const [statusSyncStatus, setStatusSyncStatus] = useState('')
|
||||
const [statusSyncStep, setStatusSyncStep] = useState('')
|
||||
const [statusSyncError, setStatusSyncError] = useState('')
|
||||
const [statusSyncTotal, setStatusSyncTotal] = useState(0)
|
||||
const [statusSyncDone, setStatusSyncDone] = useState(0)
|
||||
const [statusSyncFailed, setStatusSyncFailed] = useState(0)
|
||||
const [statusSyncDeleted, setStatusSyncDeleted] = useState(0)
|
||||
const [statusSyncEnabled, setStatusSyncEnabled] = useState(true)
|
||||
const [statusSyncBusy, setStatusSyncBusy] = useState(false)
|
||||
const [statusRuns, setStatusRuns] = useState<RpmMirrorRun[]>([])
|
||||
const [statusError, setStatusError] = useState<string | null>(null)
|
||||
const [clearRunsConfirmOpen, setClearRunsConfirmOpen] = useState(false)
|
||||
const [rpmPath, setRpmPath] = useState('')
|
||||
const [rpmPathSegments, setRpmPathSegments] = useState<string[]>([])
|
||||
const [rpmFileQuery, setRpmFileQuery] = useState('')
|
||||
const [rpmTree, setRpmTree] = useState<RpmTreeEntry[]>([])
|
||||
const [rpmTreeError, setRpmTreeError] = useState<string | null>(null)
|
||||
const [rpmTreeReloadTick, setRpmTreeReloadTick] = useState(0)
|
||||
const [rpmSelectedEntry, setRpmSelectedEntry] = useState<RpmTreeEntry | null>(null)
|
||||
const [canWrite, setCanWrite] = useState(false)
|
||||
const initRepoRef = useRef<string | null>(null)
|
||||
@@ -152,7 +189,7 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
|
||||
setRpmTreeError(message)
|
||||
setRpmTree([])
|
||||
})
|
||||
}, [repoId, repo, rpmPath])
|
||||
}, [repoId, repo, rpmPath, rpmTreeReloadTick])
|
||||
|
||||
const handleSelectRpm = async (pkg: RpmPackageSummary) => {
|
||||
if (!repoId) return
|
||||
@@ -160,7 +197,6 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
|
||||
setRpmDetail(null)
|
||||
setRpmDetailLoading(true)
|
||||
setRpmError(null)
|
||||
setRpmTab('meta')
|
||||
try {
|
||||
const detail = await api.getRpmPackage(repoId, pkg.filename)
|
||||
setRpmSelected({
|
||||
@@ -222,6 +258,7 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
|
||||
}
|
||||
|
||||
const handleCreateSubdir = async () => {
|
||||
let syncIntervalSec: number
|
||||
if (!repoId) return
|
||||
if (!subdirName.trim()) {
|
||||
setSubdirError('Name is required.')
|
||||
@@ -230,8 +267,28 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
|
||||
setSubdirError(null)
|
||||
setSubdirSaving(true)
|
||||
try {
|
||||
syncIntervalSec = 0
|
||||
if (subdirType === 'repo' && subdirMode === 'mirror') {
|
||||
syncIntervalSec = Number(subdirSyncIntervalSec)
|
||||
if (!Number.isFinite(syncIntervalSec) || syncIntervalSec <= 0) {
|
||||
syncIntervalSec = 300
|
||||
}
|
||||
}
|
||||
const parent = rpmPath
|
||||
await api.createRpmSubdir(repoId, subdirName.trim(), subdirType, parent)
|
||||
await api.createRpmSubdir(
|
||||
repoId,
|
||||
subdirName.trim(),
|
||||
subdirType,
|
||||
parent,
|
||||
subdirMode,
|
||||
subdirAllowDelete,
|
||||
subdirRemoteURL.trim(),
|
||||
subdirConnectHost.trim(),
|
||||
subdirHostHeader.trim(),
|
||||
subdirTLSServerName.trim(),
|
||||
subdirTLSInsecureSkipVerify,
|
||||
syncIntervalSec
|
||||
)
|
||||
api.listRpmTree(repoId, rpmPath)
|
||||
.then((list) => {
|
||||
setRpmTree(Array.isArray(list) ? list : [])
|
||||
@@ -244,6 +301,14 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
|
||||
setSubdirOpen(false)
|
||||
setSubdirName('')
|
||||
setSubdirType('container')
|
||||
setSubdirMode('local')
|
||||
setSubdirAllowDelete(false)
|
||||
setSubdirSyncIntervalSec('300')
|
||||
setSubdirRemoteURL('')
|
||||
setSubdirConnectHost('')
|
||||
setSubdirHostHeader('')
|
||||
setSubdirTLSServerName('')
|
||||
setSubdirTLSInsecureSkipVerify(false)
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : 'Failed to create subdirectory'
|
||||
setSubdirError(message)
|
||||
@@ -289,19 +354,52 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
|
||||
}
|
||||
|
||||
const handleRenameSubdir = async () => {
|
||||
let syncIntervalSec: number
|
||||
if (!repoId || !renamePath) return
|
||||
if (!renameNewName.trim()) {
|
||||
setRenameError('New name is required.')
|
||||
return
|
||||
}
|
||||
if (renameIsRepoDir && renameMode === 'mirror' && !renameRemoteURL.trim()) {
|
||||
setRenameError('Remote URL is required for mirror mode.')
|
||||
return
|
||||
}
|
||||
setRenameError(null)
|
||||
setRenaming(true)
|
||||
try {
|
||||
await api.renameRpmSubdir(repoId, renamePath, renameNewName.trim())
|
||||
syncIntervalSec = 0
|
||||
if (renameIsRepoDir && renameMode === 'mirror') {
|
||||
syncIntervalSec = Number(renameSyncIntervalSec)
|
||||
if (!Number.isFinite(syncIntervalSec) || syncIntervalSec <= 0) {
|
||||
syncIntervalSec = 300
|
||||
}
|
||||
}
|
||||
await api.updateRpmSubdir(
|
||||
repoId,
|
||||
renamePath,
|
||||
renameNewName.trim(),
|
||||
renameIsRepoDir ? renameMode : undefined,
|
||||
renameIsRepoDir ? renameAllowDelete : undefined,
|
||||
renameIsRepoDir ? renameRemoteURL.trim() : undefined,
|
||||
renameIsRepoDir ? renameConnectHost.trim() : undefined,
|
||||
renameIsRepoDir ? renameHostHeader.trim() : undefined,
|
||||
renameIsRepoDir ? renameTLSServerName.trim() : undefined,
|
||||
renameIsRepoDir ? renameTLSInsecureSkipVerify : undefined,
|
||||
renameIsRepoDir && renameMode === 'mirror' ? syncIntervalSec : undefined
|
||||
)
|
||||
setRenameOpen(false)
|
||||
setRenamePath('')
|
||||
setRenameName('')
|
||||
setRenameNewName('')
|
||||
setRenameIsRepoDir(false)
|
||||
setRenameMode('local')
|
||||
setRenameAllowDelete(false)
|
||||
setRenameSyncIntervalSec('300')
|
||||
setRenameRemoteURL('')
|
||||
setRenameConnectHost('')
|
||||
setRenameHostHeader('')
|
||||
setRenameTLSServerName('')
|
||||
setRenameTLSInsecureSkipVerify(false)
|
||||
api.listRpmTree(repoId, rpmPath)
|
||||
.then((list) => {
|
||||
setRpmTree(Array.isArray(list) ? list : [])
|
||||
@@ -319,21 +417,127 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
|
||||
}
|
||||
}
|
||||
|
||||
const loadStatus = async (path: string) => {
|
||||
let cfg: Awaited<ReturnType<typeof api.getRpmSubdir>>
|
||||
if (!repoId || !path) return
|
||||
setStatusError(null)
|
||||
cfg = await api.getRpmSubdir(repoId, path)
|
||||
setStatusMode(cfg.mode === 'mirror' ? 'mirror' : 'local')
|
||||
setStatusSyncStatus(cfg.sync_status || '')
|
||||
setStatusSyncStep(cfg.sync_step || '')
|
||||
setStatusSyncError(cfg.sync_error || '')
|
||||
setStatusSyncEnabled(Boolean(cfg.sync_enabled))
|
||||
setStatusSyncTotal(Number(cfg.sync_total || 0))
|
||||
setStatusSyncDone(Number(cfg.sync_done || 0))
|
||||
setStatusSyncFailed(Number(cfg.sync_failed || 0))
|
||||
setStatusSyncDeleted(Number(cfg.sync_deleted || 0))
|
||||
if ((cfg.mode || '') === 'mirror') {
|
||||
const runs = await api.listRpmMirrorRuns(repoId, path, 10)
|
||||
setStatusRuns(Array.isArray(runs) ? runs : [])
|
||||
} else {
|
||||
setStatusRuns([])
|
||||
}
|
||||
}
|
||||
|
||||
const openStatusDialog = async (entry: RpmTreeEntry) => {
|
||||
setStatusPath(entry.path)
|
||||
setStatusName(entry.name)
|
||||
setStatusMode(entry.repo_mode === 'mirror' ? 'mirror' : 'local')
|
||||
setStatusSyncStatus('')
|
||||
setStatusSyncStep('')
|
||||
setStatusSyncError('')
|
||||
setStatusSyncEnabled(true)
|
||||
setStatusSyncTotal(0)
|
||||
setStatusSyncDone(0)
|
||||
setStatusSyncFailed(0)
|
||||
setStatusSyncDeleted(0)
|
||||
setStatusRuns([])
|
||||
setStatusError(null)
|
||||
setStatusOpen(true)
|
||||
if (!repoId || !entry.is_repo_dir) return
|
||||
try {
|
||||
await loadStatus(entry.path)
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : 'Failed to load mirror status'
|
||||
setStatusError(message)
|
||||
}
|
||||
}
|
||||
|
||||
const handleStatusToggleSyncEnabled = async () => {
|
||||
if (!repoId || !statusPath || statusMode !== 'mirror') return
|
||||
setStatusSyncBusy(true)
|
||||
setStatusError(null)
|
||||
try {
|
||||
if (statusSyncEnabled) {
|
||||
await api.suspendRpmSubdir(repoId, statusPath)
|
||||
} else {
|
||||
await api.resumeRpmSubdir(repoId, statusPath)
|
||||
}
|
||||
await loadStatus(statusPath)
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : 'Failed to change mirror sync state'
|
||||
setStatusError(message)
|
||||
} finally {
|
||||
setStatusSyncBusy(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleStatusClearRuns = async () => {
|
||||
let runs: RpmMirrorRun[]
|
||||
let result: { status: string; deleted_count: number }
|
||||
if (!repoId || !statusPath || statusMode !== 'mirror') return
|
||||
setStatusSyncBusy(true)
|
||||
setStatusError(null)
|
||||
try {
|
||||
result = await api.clearRpmMirrorRuns(repoId, statusPath)
|
||||
runs = await api.listRpmMirrorRuns(repoId, statusPath, 10)
|
||||
setStatusRuns(Array.isArray(runs) ? runs : [])
|
||||
if ((result.deleted_count || 0) <= 0 && Array.isArray(runs) && runs.length > 0) {
|
||||
setStatusError('No rows were deleted. New runs may still be added by active mirror sync.')
|
||||
}
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : 'Failed to clear mirror runs'
|
||||
setStatusError(message)
|
||||
} finally {
|
||||
setStatusSyncBusy(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleStatusRebuildMetadata = async () => {
|
||||
if (!repoId || !statusPath || statusMode !== 'mirror') return
|
||||
setStatusSyncBusy(true)
|
||||
setStatusError(null)
|
||||
try {
|
||||
await api.rebuildRpmSubdirMetadata(repoId, statusPath)
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : 'Failed to schedule metadata rebuild'
|
||||
setStatusError(message)
|
||||
} finally {
|
||||
setStatusSyncBusy(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleRpmBack = () => {
|
||||
if (!rpmPath) return
|
||||
const nextSegments = rpmPathSegments.slice(0, -1)
|
||||
setRpmPath(nextSegments.join('/'))
|
||||
setRpmPathSegments(nextSegments)
|
||||
setRpmFileQuery('')
|
||||
setRpmSelectedEntry(null)
|
||||
}
|
||||
|
||||
const handleRpmBreadcrumb = (nextPath: string) => {
|
||||
if (nextPath === rpmPath) {
|
||||
setRpmTreeReloadTick((prev) => prev + 1)
|
||||
return
|
||||
}
|
||||
setRpmPath(nextPath)
|
||||
if (nextPath === '') {
|
||||
setRpmPathSegments([])
|
||||
} else {
|
||||
setRpmPathSegments(nextPath.split('/').filter(Boolean))
|
||||
}
|
||||
setRpmFileQuery('')
|
||||
setRpmSelectedEntry(null)
|
||||
}
|
||||
|
||||
@@ -403,6 +607,7 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
|
||||
if (entry.type === 'dir') {
|
||||
setRpmPath(entry.path)
|
||||
setRpmPathSegments(entry.path.split('/').filter(Boolean))
|
||||
setRpmFileQuery('')
|
||||
setRpmSelectedEntry(null)
|
||||
return
|
||||
}
|
||||
@@ -445,6 +650,84 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
|
||||
}
|
||||
|
||||
const rpmPathParts = rpmPathSegments
|
||||
const normalizedQuery = rpmFileQuery.trim().toLowerCase()
|
||||
const globToRegex = (query: string) => {
|
||||
let i = 0
|
||||
let out = '^'
|
||||
while (i < query.length) {
|
||||
const ch = query[i]
|
||||
if (ch === '*') {
|
||||
out += '.*'
|
||||
i += 1
|
||||
continue
|
||||
}
|
||||
if (ch === '?') {
|
||||
out += '.'
|
||||
i += 1
|
||||
continue
|
||||
}
|
||||
if (ch === '[') {
|
||||
const classStart = i
|
||||
i += 1
|
||||
if (i >= query.length) {
|
||||
out += '\\['
|
||||
break
|
||||
}
|
||||
let negate = false
|
||||
if (query[i] === '!' || query[i] === '^') {
|
||||
negate = true
|
||||
i += 1
|
||||
}
|
||||
let classText = ''
|
||||
let sawClass = false
|
||||
while (i < query.length) {
|
||||
const next = query[i]
|
||||
if (next === ']') {
|
||||
sawClass = true
|
||||
i += 1
|
||||
break
|
||||
}
|
||||
if (next === '\\') {
|
||||
classText += '\\\\'
|
||||
i += 1
|
||||
continue
|
||||
}
|
||||
classText += next
|
||||
i += 1
|
||||
}
|
||||
if (!sawClass) {
|
||||
out += '\\['
|
||||
i = classStart + 1
|
||||
continue
|
||||
}
|
||||
out += negate ? `[^${classText}]` : `[${classText}]`
|
||||
continue
|
||||
}
|
||||
if ('.+^$(){}|\\'.includes(ch)) {
|
||||
out += `\\${ch}`
|
||||
i += 1
|
||||
continue
|
||||
}
|
||||
out += ch
|
||||
i += 1
|
||||
}
|
||||
out += '$'
|
||||
return out
|
||||
}
|
||||
const matchesFileQuery = (name: string, query: string) => {
|
||||
if (!query) return true
|
||||
if (query.includes('*') || query.includes('?') || query.includes('[')) {
|
||||
try {
|
||||
return new RegExp(globToRegex(query), 'i').test(name)
|
||||
} catch {
|
||||
return name.toLowerCase().includes(query)
|
||||
}
|
||||
}
|
||||
return name.toLowerCase().includes(query)
|
||||
}
|
||||
const filteredTree = normalizedQuery
|
||||
? rpmTree.filter((entry) => matchesFileQuery(entry.name, normalizedQuery))
|
||||
: rpmTree
|
||||
|
||||
return (
|
||||
<Box>
|
||||
@@ -493,7 +776,23 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', gap: 0.5 }}>
|
||||
{canWrite ? (
|
||||
<>
|
||||
<Button size="small" onClick={() => setSubdirOpen(true)}>
|
||||
<Button
|
||||
size="small"
|
||||
onClick={() => {
|
||||
setSubdirError(null)
|
||||
setSubdirName('')
|
||||
setSubdirType('container')
|
||||
setSubdirMode('local')
|
||||
setSubdirAllowDelete(false)
|
||||
setSubdirSyncIntervalSec('300')
|
||||
setSubdirRemoteURL('')
|
||||
setSubdirConnectHost('')
|
||||
setSubdirHostHeader('')
|
||||
setSubdirTLSServerName('')
|
||||
setSubdirTLSInsecureSkipVerify(false)
|
||||
setSubdirOpen(true)
|
||||
}}
|
||||
>
|
||||
New Folder...
|
||||
</Button>
|
||||
<Button
|
||||
@@ -536,6 +835,14 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
|
||||
)
|
||||
})}
|
||||
</Box>
|
||||
<TextField
|
||||
size="small"
|
||||
placeholder="Search files"
|
||||
value={rpmFileQuery}
|
||||
onChange={(event) => setRpmFileQuery(event.target.value)}
|
||||
fullWidth
|
||||
sx={{ mb: 1, px: 0.5 }}
|
||||
/>
|
||||
{rpmTreeError ? (
|
||||
<Alert severity="warning" sx={{ mb: 1 }}>
|
||||
{rpmTreeError}
|
||||
@@ -556,7 +863,7 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
|
||||
</ListItemButton>
|
||||
</ListItem>
|
||||
) : null}
|
||||
{rpmTree.map((entry) => (
|
||||
{filteredTree.map((entry) => (
|
||||
<ListItem key={entry.path} disablePadding>
|
||||
<ListItemButton onClick={() => handleRpmEntry(entry)}>
|
||||
<ListItemText
|
||||
@@ -568,21 +875,66 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
|
||||
<InsertDriveFileIcon fontSize="small" color="info" />
|
||||
)}
|
||||
<Typography variant="body2">{entry.name}</Typography>
|
||||
{entry.type === 'dir' && entry.is_repo_dir ? (
|
||||
<Chip
|
||||
size="small"
|
||||
color={entry.repo_mode === 'mirror' ? 'warning' : 'default'}
|
||||
label={entry.repo_mode === 'mirror' ? 'mirror' : 'local'}
|
||||
sx={{ height: 18 }}
|
||||
/>
|
||||
) : null}
|
||||
</Box>
|
||||
}
|
||||
/>
|
||||
</ListItemButton>
|
||||
{canWrite ? (
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', gap: 0.5, pr: 0.5 }}>
|
||||
{entry.type === 'dir' && entry.is_repo_dir ? (
|
||||
<IconButton
|
||||
size="small"
|
||||
onClick={async (event) => {
|
||||
event.stopPropagation()
|
||||
await openStatusDialog(entry)
|
||||
}}
|
||||
aria-label={`View status for ${entry.name}`}
|
||||
>
|
||||
<MonitorHeartOutlinedIcon fontSize="small" />
|
||||
</IconButton>
|
||||
) : null}
|
||||
{entry.type === 'dir' && entry.name.toLowerCase() !== 'repodata' ? (
|
||||
<IconButton
|
||||
size="small"
|
||||
onClick={(event) => {
|
||||
onClick={async (event) => {
|
||||
event.stopPropagation()
|
||||
setRenameError(null)
|
||||
setRenamePath(entry.path)
|
||||
setRenameName(entry.name)
|
||||
setRenameNewName(entry.name)
|
||||
setRenameIsRepoDir(Boolean(entry.is_repo_dir))
|
||||
setRenameMode(entry.repo_mode === 'mirror' ? 'mirror' : 'local')
|
||||
setRenameAllowDelete(false)
|
||||
setRenameSyncIntervalSec('300')
|
||||
setRenameRemoteURL('')
|
||||
setRenameConnectHost('')
|
||||
setRenameHostHeader('')
|
||||
setRenameTLSServerName('')
|
||||
setRenameTLSInsecureSkipVerify(false)
|
||||
if (repoId && entry.is_repo_dir) {
|
||||
try {
|
||||
const cfg = await api.getRpmSubdir(repoId, entry.path)
|
||||
setRenameMode(cfg.mode === 'mirror' ? 'mirror' : 'local')
|
||||
setRenameAllowDelete(Boolean(cfg.allow_delete))
|
||||
setRenameSyncIntervalSec(String(cfg.sync_interval_sec || 300))
|
||||
setRenameRemoteURL(cfg.remote_url || '')
|
||||
setRenameConnectHost(cfg.connect_host || '')
|
||||
setRenameHostHeader(cfg.host_header || '')
|
||||
setRenameTLSServerName(cfg.tls_server_name || '')
|
||||
setRenameTLSInsecureSkipVerify(Boolean(cfg.tls_insecure_skip_verify))
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : 'Failed to load repo directory settings'
|
||||
setRenameError(message)
|
||||
}
|
||||
}
|
||||
setRenameOpen(true)
|
||||
}}
|
||||
aria-label={`Rename folder ${entry.name}`}
|
||||
@@ -628,9 +980,9 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
|
||||
) : null}
|
||||
</ListItem>
|
||||
))}
|
||||
{!rpmTree.length && !rpmTreeError ? (
|
||||
{!filteredTree.length && !rpmTreeError ? (
|
||||
<Typography variant="body2" color="text.secondary" sx={{ px: 1, py: 1 }}>
|
||||
No files found.
|
||||
{normalizedQuery ? 'No matching files.' : 'No files found.'}
|
||||
</Typography>
|
||||
) : null}
|
||||
</List>
|
||||
@@ -658,6 +1010,7 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
|
||||
<Tabs value={rpmTab} onChange={(_, value) => setRpmTab(value)}>
|
||||
<Tab label="Metadata" value="meta" />
|
||||
<Tab label="Files" value="files" />
|
||||
<Tab label="Change Log" value="changelog" />
|
||||
</Tabs>
|
||||
{rpmDetailLoading ? (
|
||||
<Typography variant="body2" color="text.secondary" sx={{ mt: 1 }}>
|
||||
@@ -700,10 +1053,10 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
|
||||
Build Time: {rpmDetail.build_time ? new Date(rpmDetail.build_time * 1000).toLocaleString() : 'n/a'}
|
||||
</Typography>
|
||||
<Typography variant="body2">Size: {rpmDetail.size ? `${rpmDetail.size} bytes` : 'n/a'}</Typography>
|
||||
{rpmDetail.requires.length ? (
|
||||
{Array.isArray(rpmDetail.requires) && rpmDetail.requires.length ? (
|
||||
<Typography variant="body2">Requires: {rpmDetail.requires.join(', ')}</Typography>
|
||||
) : null}
|
||||
{rpmDetail.provides.length ? (
|
||||
{Array.isArray(rpmDetail.provides) && rpmDetail.provides.length ? (
|
||||
<Typography variant="body2">Provides: {rpmDetail.provides.join(', ')}</Typography>
|
||||
) : null}
|
||||
{rpmDetail.description ? (
|
||||
@@ -716,13 +1069,36 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
|
||||
{rpmDetail && rpmTab === 'files' ? (
|
||||
<Box sx={{ mt: 1, maxHeight: '60vh', overflow: 'auto' }}>
|
||||
<List dense>
|
||||
{rpmDetail.files.map((file) => (
|
||||
{(Array.isArray(rpmDetail.files) ? rpmDetail.files : []).map((file) => (
|
||||
<ListItem key={file}>
|
||||
<ListItemText primary={file} />
|
||||
</ListItem>
|
||||
))}
|
||||
</List>
|
||||
</Box>
|
||||
) : null}
|
||||
{rpmDetail && rpmTab === 'changelog' ? (
|
||||
<Box sx={{ mt: 1, maxHeight: '60vh', overflow: 'auto' }}>
|
||||
{Array.isArray(rpmDetail.changelogs) && rpmDetail.changelogs.length ? (
|
||||
<List dense>
|
||||
{rpmDetail.changelogs.map((item, index) => (
|
||||
<ListItem key={`${item.date}-${index}`} sx={{ display: 'block' }}>
|
||||
<Typography variant="body2" color="text.secondary">
|
||||
{item.date ? new Date(item.date * 1000).toLocaleString() : 'n/a'}
|
||||
{item.author ? ` · ${item.author}` : ''}
|
||||
</Typography>
|
||||
<Typography variant="body2" sx={{ whiteSpace: 'pre-wrap' }}>
|
||||
{item.text || ''}
|
||||
</Typography>
|
||||
</ListItem>
|
||||
))}
|
||||
</List>
|
||||
) : (
|
||||
<Typography variant="body2" color="text.secondary">
|
||||
No change log entries.
|
||||
</Typography>
|
||||
)}
|
||||
</Box>
|
||||
) : null}
|
||||
{!rpmDetail && !rpmDetailLoading && rpmError ? (
|
||||
<Alert severity="warning" sx={{ mt: 1 }}>
|
||||
@@ -793,6 +1169,73 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
|
||||
<MenuItem value="container">Container</MenuItem>
|
||||
<MenuItem value="repo">RPM Repo</MenuItem>
|
||||
</TextField>
|
||||
{subdirType === 'repo' ? (
|
||||
<TextField
|
||||
select
|
||||
label="Repo Mode"
|
||||
value={subdirMode}
|
||||
onChange={(event) => setSubdirMode(event.target.value as 'local' | 'mirror')}
|
||||
helperText={subdirMode === 'mirror' ? 'Mirror repos are read-only from UI writes.' : 'Local repos allow upload/delete from UI.'}
|
||||
fullWidth
|
||||
>
|
||||
<MenuItem value="local">local</MenuItem>
|
||||
<MenuItem value="mirror">mirror</MenuItem>
|
||||
</TextField>
|
||||
) : null}
|
||||
{subdirType === 'repo' && subdirMode === 'mirror' ? (
|
||||
<TextField
|
||||
label="Sync Interval (seconds)"
|
||||
value={subdirSyncIntervalSec}
|
||||
onChange={(event) => setSubdirSyncIntervalSec(event.target.value)}
|
||||
helperText="Used for mirror mode periodic pull. Minimum 10."
|
||||
fullWidth
|
||||
/>
|
||||
) : null}
|
||||
{subdirType === 'repo' && subdirMode === 'mirror' ? (
|
||||
<>
|
||||
<TextField
|
||||
label="Remote URL"
|
||||
value={subdirRemoteURL}
|
||||
onChange={(event) => setSubdirRemoteURL(event.target.value)}
|
||||
helperText="Example: https://rpm.repo.com/base/path"
|
||||
fullWidth
|
||||
/>
|
||||
<TextField
|
||||
label="Connect Host (optional)"
|
||||
value={subdirConnectHost}
|
||||
onChange={(event) => setSubdirConnectHost(event.target.value)}
|
||||
helperText="Optional host:port override, e.g. 127.0.0.1:443"
|
||||
fullWidth
|
||||
/>
|
||||
<TextField
|
||||
label="Host Header (optional)"
|
||||
value={subdirHostHeader}
|
||||
onChange={(event) => setSubdirHostHeader(event.target.value)}
|
||||
helperText="Optional Host header override, e.g. rpm.repo.com"
|
||||
fullWidth
|
||||
/>
|
||||
<TextField
|
||||
label="TLS Server Name (optional)"
|
||||
value={subdirTLSServerName}
|
||||
onChange={(event) => setSubdirTLSServerName(event.target.value)}
|
||||
helperText="Optional SNI/verify server name override"
|
||||
fullWidth
|
||||
/>
|
||||
<FormControlLabel
|
||||
control={<Checkbox checked={subdirAllowDelete} onChange={(event) => setSubdirAllowDelete(event.target.checked)} />}
|
||||
label="Allow delete (files/container dirs) in mirror subtree"
|
||||
/>
|
||||
<FormControlLabel
|
||||
control={
|
||||
<Checkbox
|
||||
checked={subdirTLSInsecureSkipVerify}
|
||||
onChange={(event) => setSubdirTLSInsecureSkipVerify(event.target.checked)}
|
||||
/>
|
||||
}
|
||||
label="Skip TLS certificate verification"
|
||||
/>
|
||||
</>
|
||||
) : null}
|
||||
</Box>
|
||||
</DialogContent>
|
||||
<DialogActions>
|
||||
@@ -858,25 +1301,231 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
|
||||
</Button>
|
||||
</DialogActions>
|
||||
</Dialog>
|
||||
<Dialog open={renameOpen} onClose={() => setRenameOpen(false)} maxWidth="xs" fullWidth>
|
||||
<DialogTitle>Rename folder</DialogTitle>
|
||||
<Dialog
|
||||
open={statusOpen}
|
||||
onClose={() => {
|
||||
setStatusOpen(false)
|
||||
setStatusError(null)
|
||||
setClearRunsConfirmOpen(false)
|
||||
}}
|
||||
maxWidth="sm"
|
||||
fullWidth
|
||||
>
|
||||
<DialogTitle>Repo directory status</DialogTitle>
|
||||
<DialogContent>
|
||||
{renameError ? <Alert severity="error">{renameError}</Alert> : null}
|
||||
<Typography variant="body2" sx={{ mt: 1 }}>
|
||||
Current name: {renameName}
|
||||
</Typography>
|
||||
<TextField
|
||||
label="New name"
|
||||
value={renameNewName}
|
||||
onChange={(event) => setRenameNewName(event.target.value)}
|
||||
fullWidth
|
||||
sx={{ mt: 1 }}
|
||||
/>
|
||||
{statusError ? <Alert severity="error">{statusError}</Alert> : null}
|
||||
<Box sx={{ display: 'grid', gap: 0.5, mt: 1 }}>
|
||||
<Typography variant="body2">Directory: {statusName}</Typography>
|
||||
<Typography variant="body2">Path: {statusPath || '.'}</Typography>
|
||||
<Typography variant="body2">Mode: {statusMode}</Typography>
|
||||
{statusMode === 'mirror' ? (
|
||||
<>
|
||||
<Typography variant="body2">
|
||||
Sync: {statusSyncEnabled ? 'enabled' : 'suspended'} · status: {statusSyncStatus || '-'} {statusSyncStep ? `(${statusSyncStep})` : ''}
|
||||
</Typography>
|
||||
<Typography variant="body2">
|
||||
Progress: {statusSyncDone}/{statusSyncTotal} · failed: {statusSyncFailed} · deleted: {statusSyncDeleted}
|
||||
</Typography>
|
||||
{statusSyncError ? (
|
||||
<Typography variant="body2" color="error">
|
||||
Last error: {statusSyncError}
|
||||
</Typography>
|
||||
) : null}
|
||||
<Box sx={{ pt: 1 }}>
|
||||
<Button size="small" variant="outlined" onClick={handleStatusToggleSyncEnabled} disabled={statusSyncBusy}>
|
||||
{statusSyncEnabled ? 'Suspend' : 'Resume'}
|
||||
</Button>
|
||||
<Button
|
||||
size="small"
|
||||
variant="outlined"
|
||||
color="warning"
|
||||
onClick={() => setClearRunsConfirmOpen(true)}
|
||||
disabled={statusSyncBusy}
|
||||
sx={{ ml: 1 }}
|
||||
>
|
||||
Clear runs
|
||||
</Button>
|
||||
<Button
|
||||
size="small"
|
||||
variant="outlined"
|
||||
onClick={handleStatusRebuildMetadata}
|
||||
disabled={statusSyncBusy}
|
||||
sx={{ ml: 1 }}
|
||||
>
|
||||
Rebuild metadata
|
||||
</Button>
|
||||
<Button
|
||||
size="small"
|
||||
variant="outlined"
|
||||
onClick={async () => {
|
||||
if (!statusPath) return
|
||||
try {
|
||||
await loadStatus(statusPath)
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : 'Failed to refresh mirror status'
|
||||
setStatusError(message)
|
||||
}
|
||||
}}
|
||||
disabled={statusSyncBusy}
|
||||
sx={{ ml: 1 }}
|
||||
>
|
||||
Refresh
|
||||
</Button>
|
||||
</Box>
|
||||
<Box sx={{ display: 'grid', gap: 0.25, pt: 1 }}>
|
||||
<Typography variant="caption" color="text.secondary">Recent runs</Typography>
|
||||
{statusRuns.length ? (
|
||||
statusRuns.map((run) => (
|
||||
<Typography key={run.id} variant="caption" color={run.status === 'failed' ? 'error' : 'text.secondary'}>
|
||||
{new Date((run.started_at || 0) * 1000).toLocaleString()} · {run.status} · {run.done}/{run.total} · fail {run.failed} · del {run.deleted}
|
||||
</Typography>
|
||||
))
|
||||
) : (
|
||||
<Typography variant="caption" color="text.secondary">
|
||||
No runs yet.
|
||||
</Typography>
|
||||
)}
|
||||
</Box>
|
||||
</>
|
||||
) : (
|
||||
<Typography variant="body2" color="text.secondary">
|
||||
This directory is local and does not run mirror sync.
|
||||
</Typography>
|
||||
)}
|
||||
</Box>
|
||||
</DialogContent>
|
||||
<DialogActions>
|
||||
<Button onClick={() => setRenameOpen(false)}>Cancel</Button>
|
||||
<Button
|
||||
onClick={() => {
|
||||
setStatusOpen(false)
|
||||
setStatusError(null)
|
||||
setClearRunsConfirmOpen(false)
|
||||
}}
|
||||
>
|
||||
Close
|
||||
</Button>
|
||||
</DialogActions>
|
||||
</Dialog>
|
||||
<Dialog
|
||||
open={renameOpen}
|
||||
onClose={() => {
|
||||
setRenameOpen(false)
|
||||
setRenameError(null)
|
||||
}}
|
||||
maxWidth="sm"
|
||||
fullWidth
|
||||
>
|
||||
<DialogTitle>{renameIsRepoDir ? 'Edit repo directory' : 'Rename folder'}</DialogTitle>
|
||||
<DialogContent>
|
||||
{renameError ? <Alert severity="error">{renameError}</Alert> : null}
|
||||
<Box sx={{ display: 'grid', gap: 1, mt: 1 }}>
|
||||
<Typography variant="body2">
|
||||
Current name: {renameName}
|
||||
</Typography>
|
||||
<TextField
|
||||
label="New name"
|
||||
value={renameNewName}
|
||||
onChange={(event) => setRenameNewName(event.target.value)}
|
||||
fullWidth
|
||||
/>
|
||||
{renameIsRepoDir ? (
|
||||
<TextField
|
||||
select
|
||||
label="Repo Mode"
|
||||
value={renameMode}
|
||||
onChange={(event) => setRenameMode(event.target.value as 'local' | 'mirror')}
|
||||
fullWidth
|
||||
>
|
||||
<MenuItem value="local">local</MenuItem>
|
||||
<MenuItem value="mirror">mirror</MenuItem>
|
||||
</TextField>
|
||||
) : null}
|
||||
{renameIsRepoDir && renameMode === 'mirror' ? (
|
||||
<TextField
|
||||
label="Sync Interval (seconds)"
|
||||
value={renameSyncIntervalSec}
|
||||
onChange={(event) => setRenameSyncIntervalSec(event.target.value)}
|
||||
helperText="Used for periodic pull. Minimum 10."
|
||||
fullWidth
|
||||
/>
|
||||
) : null}
|
||||
{renameIsRepoDir && renameMode === 'mirror' ? (
|
||||
<>
|
||||
<TextField
|
||||
label="Remote URL"
|
||||
value={renameRemoteURL}
|
||||
onChange={(event) => setRenameRemoteURL(event.target.value)}
|
||||
helperText="Example: https://rpm.repo.com/base/path"
|
||||
fullWidth
|
||||
/>
|
||||
<TextField
|
||||
label="Connect Host (optional)"
|
||||
value={renameConnectHost}
|
||||
onChange={(event) => setRenameConnectHost(event.target.value)}
|
||||
helperText="Optional host:port override, e.g. 127.0.0.1:443"
|
||||
fullWidth
|
||||
/>
|
||||
<TextField
|
||||
label="Host Header (optional)"
|
||||
value={renameHostHeader}
|
||||
onChange={(event) => setRenameHostHeader(event.target.value)}
|
||||
fullWidth
|
||||
/>
|
||||
<TextField
|
||||
label="TLS Server Name (optional)"
|
||||
value={renameTLSServerName}
|
||||
onChange={(event) => setRenameTLSServerName(event.target.value)}
|
||||
fullWidth
|
||||
/>
|
||||
<FormControlLabel
|
||||
control={<Checkbox checked={renameAllowDelete} onChange={(event) => setRenameAllowDelete(event.target.checked)} />}
|
||||
label="Allow delete (files/container dirs) in mirror subtree"
|
||||
/>
|
||||
<FormControlLabel
|
||||
control={<Checkbox checked={renameTLSInsecureSkipVerify} onChange={(event) => setRenameTLSInsecureSkipVerify(event.target.checked)} />}
|
||||
label="Skip TLS certificate verification"
|
||||
/>
|
||||
</>
|
||||
) : null}
|
||||
</Box>
|
||||
</DialogContent>
|
||||
<DialogActions>
|
||||
<Button
|
||||
onClick={() => {
|
||||
setRenameOpen(false)
|
||||
setRenameError(null)
|
||||
}}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button onClick={handleRenameSubdir} variant="contained" disabled={renaming || !renameNewName.trim()}>
|
||||
{renaming ? 'Renaming...' : 'Rename'}
|
||||
{renaming ? (renameIsRepoDir ? 'Saving...' : 'Renaming...') : (renameIsRepoDir ? 'Save' : 'Rename')}
|
||||
</Button>
|
||||
</DialogActions>
|
||||
</Dialog>
|
||||
<Dialog
|
||||
open={clearRunsConfirmOpen}
|
||||
onClose={() => setClearRunsConfirmOpen(false)}
|
||||
maxWidth="xs"
|
||||
fullWidth
|
||||
>
|
||||
<DialogTitle>Clear runs</DialogTitle>
|
||||
<DialogContent>
|
||||
<Typography variant="body2" sx={{ mt: 1 }}>
|
||||
Clear recent mirror runs for "{statusName}"?
|
||||
</Typography>
|
||||
</DialogContent>
|
||||
<DialogActions>
|
||||
<Button onClick={() => setClearRunsConfirmOpen(false)}>Cancel</Button>
|
||||
<Button
|
||||
color="warning"
|
||||
variant="contained"
|
||||
onClick={async () => {
|
||||
setClearRunsConfirmOpen(false)
|
||||
await handleStatusClearRuns()
|
||||
}}
|
||||
>
|
||||
Clear
|
||||
</Button>
|
||||
</DialogActions>
|
||||
</Dialog>
|
||||
|
||||
Reference in New Issue
Block a user