Compare commits

...

3 Commits

15 changed files with 1499 additions and 308 deletions

View File

@@ -272,6 +272,10 @@ func main() {
rpmMirror = rpm.NewMirrorManager(store, logger, rpmMeta) rpmMirror = rpm.NewMirrorManager(store, logger, rpmMeta)
var uploadStore storage.FileStore var uploadStore storage.FileStore
uploadStore = storage.FileStore{BaseDir: filepath.Join(cfg.DataDir, "uploads")} uploadStore = storage.FileStore{BaseDir: filepath.Join(cfg.DataDir, "uploads")}
err = os.MkdirAll(repoManager.BaseDir, 0o755)
if err != nil {
log.Fatalf("git dir error: %v", err)
}
err = os.MkdirAll(rpmBase, 0o755) err = os.MkdirAll(rpmBase, 0o755)
if err != nil { if err != nil {
log.Fatalf("rpm dir error: %v", err) log.Fatalf("rpm dir error: %v", err)
@@ -288,6 +292,7 @@ func main() {
Repos: repoManager, Repos: repoManager,
RpmBase: rpmBase, RpmBase: rpmBase,
RpmMeta: rpmMeta, RpmMeta: rpmMeta,
RpmMirror: rpmMirror,
DockerBase: dockerBase, DockerBase: dockerBase,
Uploads: uploadStore, Uploads: uploadStore,
Logger: logger, Logger: logger,
@@ -448,10 +453,13 @@ func main() {
router.Handle("GET", "/api/repos/:id/rpm/package", api.RepoRPMPackage) router.Handle("GET", "/api/repos/:id/rpm/package", api.RepoRPMPackage)
router.Handle("POST", "/api/repos/:id/rpm/subdirs", api.RepoRPMCreateSubdir) router.Handle("POST", "/api/repos/:id/rpm/subdirs", api.RepoRPMCreateSubdir)
router.Handle("GET", "/api/repos/:id/rpm/subdir", api.RepoRPMGetSubdir) router.Handle("GET", "/api/repos/:id/rpm/subdir", api.RepoRPMGetSubdir)
router.Handle("POST", "/api/repos/:id/rpm/subdir/update", api.RepoRPMRenameSubdir)
router.Handle("POST", "/api/repos/:id/rpm/subdir/rename", api.RepoRPMRenameSubdir) router.Handle("POST", "/api/repos/:id/rpm/subdir/rename", api.RepoRPMRenameSubdir)
router.Handle("POST", "/api/repos/:id/rpm/subdir/sync", api.RepoRPMSyncSubdir) router.Handle("POST", "/api/repos/:id/rpm/subdir/sync", api.RepoRPMSyncSubdir)
router.Handle("POST", "/api/repos/:id/rpm/subdir/suspend", api.RepoRPMSuspendSubdir) router.Handle("POST", "/api/repos/:id/rpm/subdir/suspend", api.RepoRPMSuspendSubdir)
router.Handle("POST", "/api/repos/:id/rpm/subdir/resume", api.RepoRPMResumeSubdir) router.Handle("POST", "/api/repos/:id/rpm/subdir/resume", api.RepoRPMResumeSubdir)
router.Handle("POST", "/api/repos/:id/rpm/subdir/rebuild-metadata", api.RepoRPMRebuildSubdirMetadata)
router.Handle("POST", "/api/repos/:id/rpm/subdir/cancel", api.RepoRPMCancelSubdirSync)
router.Handle("GET", "/api/repos/:id/rpm/subdir/runs", api.RepoRPMMirrorRuns) router.Handle("GET", "/api/repos/:id/rpm/subdir/runs", api.RepoRPMMirrorRuns)
router.Handle("DELETE", "/api/repos/:id/rpm/subdir/runs", api.RepoRPMClearMirrorRuns) router.Handle("DELETE", "/api/repos/:id/rpm/subdir/runs", api.RepoRPMClearMirrorRuns)
router.Handle("DELETE", "/api/repos/:id/rpm/subdir", api.RepoRPMDeleteSubdir) router.Handle("DELETE", "/api/repos/:id/rpm/subdir", api.RepoRPMDeleteSubdir)
@@ -510,7 +518,7 @@ func main() {
mux.Handle("/api/auth/oidc/login", middleware.WithUser(store, middleware.AccessLog(logger, router))) mux.Handle("/api/auth/oidc/login", middleware.WithUser(store, middleware.AccessLog(logger, router)))
mux.Handle("/api/auth/oidc/callback", middleware.WithUser(store, middleware.AccessLog(logger, router))) mux.Handle("/api/auth/oidc/callback", middleware.WithUser(store, middleware.AccessLog(logger, router)))
mux.Handle("/api/health", middleware.AccessLog(logger, router)) mux.Handle("/api/health", middleware.AccessLog(logger, router))
mux.Handle("/", middleware.WithUser(store, spaHandler(filepath.Join("..", "frontend", "dist")))) mux.Handle("/", middleware.WithUser(store, spaHandler(cfg.FrontendDir)))
extraListenerManager = newAdditionalListenerManager(store, mux, logger) extraListenerManager = newAdditionalListenerManager(store, mux, logger)
api.OnTLSListenersChanged = extraListenerManager.NotifyReload api.OnTLSListenersChanged = extraListenerManager.NotifyReload
api.OnTLSListenerRuntimeStatus = extraListenerManager.ListenerEndpointCounts api.OnTLSListenerRuntimeStatus = extraListenerManager.ListenerEndpointCounts

View File

@@ -3,6 +3,7 @@ package config
import "encoding/json" import "encoding/json"
import "errors" import "errors"
import "os" import "os"
import "path/filepath"
import "strings" import "strings"
import "time" import "time"
import "strconv" import "strconv"
@@ -12,6 +13,7 @@ type Config struct {
HTTPSAddrs []string `json:"https_addrs"` HTTPSAddrs []string `json:"https_addrs"`
PublicBaseURL string `json:"public_base_url"` PublicBaseURL string `json:"public_base_url"`
DataDir string `json:"data_dir"` DataDir string `json:"data_dir"`
FrontendDir string `json:"frontend_dir"`
DBDriver string `json:"db_driver"` DBDriver string `json:"db_driver"`
DBDSN string `json:"db_dsn"` DBDSN string `json:"db_dsn"`
SessionTTL Duration `json:"session_ttl"` SessionTTL Duration `json:"session_ttl"`
@@ -51,6 +53,7 @@ func Load(path string) (Config, error) {
HTTPAddrs: []string{":1080"}, HTTPAddrs: []string{":1080"},
HTTPSAddrs: []string{}, HTTPSAddrs: []string{},
DataDir: "./codit-data", DataDir: "./codit-data",
FrontendDir: filepath.Join("..", "frontend", "dist"),
DBDriver: "sqlite", DBDriver: "sqlite",
DBDSN: "file:./codit-data/codit.db?_pragma=foreign_keys(1)", DBDSN: "file:./codit-data/codit.db?_pragma=foreign_keys(1)",
SessionTTL: Duration(24 * time.Hour), SessionTTL: Duration(24 * time.Hour),
@@ -106,6 +109,10 @@ func override(cfg *Config) {
if v != "" { if v != "" {
cfg.DataDir = v cfg.DataDir = v
} }
v = os.Getenv("CODIT_FRONTEND_DIR")
if v != "" {
cfg.FrontendDir = v
}
v = os.Getenv("CODIT_DB_DRIVER") v = os.Getenv("CODIT_DB_DRIVER")
if v != "" { if v != "" {
cfg.DBDriver = v cfg.DBDriver = v

View File

@@ -18,6 +18,9 @@ func TestLoadDefaults(t *testing.T) {
if cfg.GitHTTPPrefix != "/git" { if cfg.GitHTTPPrefix != "/git" {
t.Fatalf("unexpected git prefix default: %s", cfg.GitHTTPPrefix) t.Fatalf("unexpected git prefix default: %s", cfg.GitHTTPPrefix)
} }
if cfg.FrontendDir == "" {
t.Fatalf("frontend_dir default missing")
}
} }
func TestLoadFromJSONAndEnvOverride(t *testing.T) { func TestLoadFromJSONAndEnvOverride(t *testing.T) {
@@ -34,6 +37,7 @@ func TestLoadFromJSONAndEnvOverride(t *testing.T) {
t.Fatalf("write config file: %v", err) t.Fatalf("write config file: %v", err)
} }
t.Setenv("CODIT_DB_DSN", "file:override.db") t.Setenv("CODIT_DB_DSN", "file:override.db")
t.Setenv("CODIT_FRONTEND_DIR", "/srv/codit/frontend")
cfg, err = Load(path) cfg, err = Load(path)
if err != nil { if err != nil {
t.Fatalf("Load() error: %v", err) t.Fatalf("Load() error: %v", err)
@@ -44,6 +48,9 @@ func TestLoadFromJSONAndEnvOverride(t *testing.T) {
if cfg.AuthMode != "hybrid" { if cfg.AuthMode != "hybrid" {
t.Fatalf("auth_mode normalization failed: %s", cfg.AuthMode) t.Fatalf("auth_mode normalization failed: %s", cfg.AuthMode)
} }
if cfg.FrontendDir != "/srv/codit/frontend" {
t.Fatalf("frontend_dir env override failed: %s", cfg.FrontendDir)
}
} }
func TestDurationUnmarshalJSON(t *testing.T) { func TestDurationUnmarshalJSON(t *testing.T) {

View File

@@ -12,13 +12,13 @@ func (s *Store) ListRPMRepoDirs(repoID string) ([]models.RPMRepoDir, error) {
var items []models.RPMRepoDir var items []models.RPMRepoDir
var item models.RPMRepoDir var item models.RPMRepoDir
var err error var err error
rows, err = s.DB.Query(`SELECT repo_id, path, mode, remote_url, connect_host, host_header, tls_server_name, tls_insecure_skip_verify, sync_interval_sec, sync_enabled, dirty, next_sync_at, sync_running, sync_status, sync_error, sync_step, sync_total, sync_done, sync_failed, sync_deleted, last_sync_started_at, last_sync_finished_at, last_sync_success_at, last_synced_revision, created_at, updated_at FROM rpm_repo_dirs WHERE repo_id = ? ORDER BY LENGTH(path), path`, repoID) rows, err = s.DB.Query(`SELECT repo_id, path, mode, allow_delete, remote_url, connect_host, host_header, tls_server_name, tls_insecure_skip_verify, sync_interval_sec, sync_enabled, dirty, next_sync_at, sync_running, sync_status, sync_error, sync_step, sync_total, sync_done, sync_failed, sync_deleted, last_sync_started_at, last_sync_finished_at, last_sync_success_at, last_synced_revision, created_at, updated_at FROM rpm_repo_dirs WHERE repo_id = ? ORDER BY LENGTH(path), path`, repoID)
if err != nil { if err != nil {
return nil, err return nil, err
} }
defer rows.Close() defer rows.Close()
for rows.Next() { for rows.Next() {
err = rows.Scan(&item.RepoID, &item.Path, &item.Mode, &item.RemoteURL, &item.ConnectHost, &item.HostHeader, &item.TLSServerName, &item.TLSInsecureSkipVerify, &item.SyncIntervalSec, &item.SyncEnabled, &item.Dirty, &item.NextSyncAt, &item.SyncRunning, &item.SyncStatus, &item.SyncError, &item.SyncStep, &item.SyncTotal, &item.SyncDone, &item.SyncFailed, &item.SyncDeleted, &item.LastSyncStartedAt, &item.LastSyncFinishedAt, &item.LastSyncSuccessAt, &item.LastSyncedRevision, &item.CreatedAt, &item.UpdatedAt) err = rows.Scan(&item.RepoID, &item.Path, &item.Mode, &item.AllowDelete, &item.RemoteURL, &item.ConnectHost, &item.HostHeader, &item.TLSServerName, &item.TLSInsecureSkipVerify, &item.SyncIntervalSec, &item.SyncEnabled, &item.Dirty, &item.NextSyncAt, &item.SyncRunning, &item.SyncStatus, &item.SyncError, &item.SyncStep, &item.SyncTotal, &item.SyncDone, &item.SyncFailed, &item.SyncDeleted, &item.LastSyncStartedAt, &item.LastSyncFinishedAt, &item.LastSyncSuccessAt, &item.LastSyncedRevision, &item.CreatedAt, &item.UpdatedAt)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -39,10 +39,11 @@ func (s *Store) UpsertRPMRepoDir(item models.RPMRepoDir) error {
item.SyncIntervalSec = 300 item.SyncIntervalSec = 300
} }
_, err = s.DB.Exec(` _, err = s.DB.Exec(`
INSERT INTO rpm_repo_dirs (repo_id, path, mode, remote_url, connect_host, host_header, tls_server_name, tls_insecure_skip_verify, sync_interval_sec, sync_enabled, dirty, next_sync_at, created_at, updated_at) INSERT INTO rpm_repo_dirs (repo_id, path, mode, allow_delete, remote_url, connect_host, host_header, tls_server_name, tls_insecure_skip_verify, sync_interval_sec, sync_enabled, dirty, next_sync_at, created_at, updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(repo_id, path) DO UPDATE SET ON CONFLICT(repo_id, path) DO UPDATE SET
mode = excluded.mode, mode = excluded.mode,
allow_delete = excluded.allow_delete,
remote_url = excluded.remote_url, remote_url = excluded.remote_url,
connect_host = excluded.connect_host, connect_host = excluded.connect_host,
host_header = excluded.host_header, host_header = excluded.host_header,
@@ -57,6 +58,7 @@ func (s *Store) UpsertRPMRepoDir(item models.RPMRepoDir) error {
item.RepoID, item.RepoID,
item.Path, item.Path,
item.Mode, item.Mode,
item.AllowDelete,
item.RemoteURL, item.RemoteURL,
item.ConnectHost, item.ConnectHost,
item.HostHeader, item.HostHeader,
@@ -75,8 +77,8 @@ func (s *Store) GetRPMRepoDir(repoID string, path string) (models.RPMRepoDir, er
var row *sql.Row var row *sql.Row
var item models.RPMRepoDir var item models.RPMRepoDir
var err error var err error
row = s.DB.QueryRow(`SELECT repo_id, path, mode, remote_url, connect_host, host_header, tls_server_name, tls_insecure_skip_verify, sync_interval_sec, sync_enabled, dirty, next_sync_at, sync_running, sync_status, sync_error, sync_step, sync_total, sync_done, sync_failed, sync_deleted, last_sync_started_at, last_sync_finished_at, last_sync_success_at, last_synced_revision, created_at, updated_at FROM rpm_repo_dirs WHERE repo_id = ? AND path = ?`, repoID, path) row = s.DB.QueryRow(`SELECT repo_id, path, mode, allow_delete, remote_url, connect_host, host_header, tls_server_name, tls_insecure_skip_verify, sync_interval_sec, sync_enabled, dirty, next_sync_at, sync_running, sync_status, sync_error, sync_step, sync_total, sync_done, sync_failed, sync_deleted, last_sync_started_at, last_sync_finished_at, last_sync_success_at, last_synced_revision, created_at, updated_at FROM rpm_repo_dirs WHERE repo_id = ? AND path = ?`, repoID, path)
err = row.Scan(&item.RepoID, &item.Path, &item.Mode, &item.RemoteURL, &item.ConnectHost, &item.HostHeader, &item.TLSServerName, &item.TLSInsecureSkipVerify, &item.SyncIntervalSec, &item.SyncEnabled, &item.Dirty, &item.NextSyncAt, &item.SyncRunning, &item.SyncStatus, &item.SyncError, &item.SyncStep, &item.SyncTotal, &item.SyncDone, &item.SyncFailed, &item.SyncDeleted, &item.LastSyncStartedAt, &item.LastSyncFinishedAt, &item.LastSyncSuccessAt, &item.LastSyncedRevision, &item.CreatedAt, &item.UpdatedAt) err = row.Scan(&item.RepoID, &item.Path, &item.Mode, &item.AllowDelete, &item.RemoteURL, &item.ConnectHost, &item.HostHeader, &item.TLSServerName, &item.TLSInsecureSkipVerify, &item.SyncIntervalSec, &item.SyncEnabled, &item.Dirty, &item.NextSyncAt, &item.SyncRunning, &item.SyncStatus, &item.SyncError, &item.SyncStep, &item.SyncTotal, &item.SyncDone, &item.SyncFailed, &item.SyncDeleted, &item.LastSyncStartedAt, &item.LastSyncFinishedAt, &item.LastSyncSuccessAt, &item.LastSyncedRevision, &item.CreatedAt, &item.UpdatedAt)
if err != nil { if err != nil {
return item, err return item, err
} }
@@ -120,7 +122,7 @@ func (s *Store) TryStartRPMMirrorTask(repoID string, path string, now int64) (bo
var res sql.Result var res sql.Result
var rows int64 var rows int64
var err error var err error
res, err = s.DB.Exec(`UPDATE rpm_repo_dirs SET sync_running = 1, sync_status = 'running', sync_error = '', sync_step = 'start', sync_total = 0, sync_done = 0, sync_failed = 0, sync_deleted = 0, last_sync_started_at = ?, updated_at = ? WHERE repo_id = ? AND path = ? AND mode = 'mirror' AND sync_running = 0`, now, now, repoID, path) res, err = s.DB.Exec(`UPDATE rpm_repo_dirs SET sync_running = 1, sync_status = 'running', sync_error = '', sync_step = 'start', sync_total = 0, sync_done = 0, sync_failed = 0, sync_deleted = 0, last_sync_started_at = ?, updated_at = ? WHERE repo_id = ? AND path = ? AND mode = 'mirror' AND sync_enabled = 1 AND sync_running = 0`, now, now, repoID, path)
if err != nil { if err != nil {
return false, err return false, err
} }
@@ -222,6 +224,18 @@ func (s *Store) ListRPMMirrorPaths() ([]models.RPMMirrorTask, error) {
return out, nil return out, nil
} }
func (s *Store) HasRunningRPMMirrorTask(repoID string) (bool, error) {
var row *sql.Row
var count int64
var err error
row = s.DB.QueryRow(`SELECT COUNT(1) FROM rpm_repo_dirs WHERE repo_id = ? AND mode = 'mirror' AND sync_running = 1`, repoID)
err = row.Scan(&count)
if err != nil {
return false, err
}
return count > 0, nil
}
func (s *Store) CreateRPMMirrorRun(repoID string, path string, startedAt int64) (string, error) { func (s *Store) CreateRPMMirrorRun(repoID string, path string, startedAt int64) (string, error) {
var id string var id string
var err error var err error
@@ -346,13 +360,18 @@ func (s *Store) MoveRPMRepoDir(repoID string, oldPath string, newPath string) er
return err return err
} }
now = time.Now().UTC().Unix() now = time.Now().UTC().Unix()
oldPrefix = oldPath + "/"
newPrefix = newPath + "/"
_, err = tx.Exec(`DELETE FROM rpm_mirror_runs WHERE repo_id = ? AND (path = ? OR path LIKE (? || '%'))`, repoID, oldPath, oldPrefix)
if err != nil {
_ = tx.Rollback()
return err
}
_, err = tx.Exec(`UPDATE rpm_repo_dirs SET path = ?, updated_at = ? WHERE repo_id = ? AND path = ?`, newPath, now, repoID, oldPath) _, err = tx.Exec(`UPDATE rpm_repo_dirs SET path = ?, updated_at = ? WHERE repo_id = ? AND path = ?`, newPath, now, repoID, oldPath)
if err != nil { if err != nil {
_ = tx.Rollback() _ = tx.Rollback()
return err return err
} }
oldPrefix = oldPath + "/"
newPrefix = newPath + "/"
_, err = tx.Exec(`UPDATE rpm_repo_dirs SET path = (? || SUBSTR(path, ?)), updated_at = ? WHERE repo_id = ? AND path LIKE (? || '%')`, newPrefix, len(oldPrefix)+1, now, repoID, oldPrefix) _, err = tx.Exec(`UPDATE rpm_repo_dirs SET path = (? || SUBSTR(path, ?)), updated_at = ? WHERE repo_id = ? AND path LIKE (? || '%')`, newPrefix, len(oldPrefix)+1, now, repoID, oldPrefix)
if err != nil { if err != nil {
_ = tx.Rollback() _ = tx.Rollback()

View File

@@ -31,6 +31,7 @@ type API struct {
Repos git.RepoManager Repos git.RepoManager
RpmBase string RpmBase string
RpmMeta *rpm.MetaManager RpmMeta *rpm.MetaManager
RpmMirror *rpm.MirrorManager
DockerBase string DockerBase string
Uploads storage.FileStore Uploads storage.FileStore
Logger *util.Logger Logger *util.Logger
@@ -219,29 +220,31 @@ type repoBranchCreateRequest struct {
From string `json:"from"` From string `json:"from"`
} }
type repoRPMSubdirRequest struct { type repoRPMCreateRequest struct {
Name string `json:"name"` Name string `json:"name"`
Type string `json:"type"` Type string `json:"type"`
Parent string `json:"parent"` Parent string `json:"parent"`
Mode string `json:"mode"` Mode string `json:"mode"`
AllowDelete bool `json:"allow_delete"`
RemoteURL string `json:"remote_url"` RemoteURL string `json:"remote_url"`
ConnectHost string `json:"connect_host"` ConnectHost string `json:"connect_host"`
HostHeader string `json:"host_header"` HostHeader string `json:"host_header"`
TLSServerName string `json:"tls_server_name"` TLSServerName string `json:"tls_server_name"`
TLSInsecureSkipVerify bool `json:"tls_insecure_skip_verify"` TLSInsecureSkipVerify bool `json:"tls_insecure_skip_verify"`
SyncIntervalSec int64 `json:"sync_interval_sec"` SyncIntervalSec int64 `json:"sync_interval_sec"`
} }
type repoRPMRenameRequest struct { type repoRPMUpdateRequest struct {
Path string `json:"path"` Path *string `json:"path"`
Name string `json:"name"` Name *string `json:"name"`
Mode string `json:"mode"` Mode *string `json:"mode"`
RemoteURL string `json:"remote_url"` AllowDelete *bool `json:"allow_delete"`
ConnectHost string `json:"connect_host"` RemoteURL *string `json:"remote_url"`
HostHeader string `json:"host_header"` ConnectHost *string `json:"connect_host"`
TLSServerName string `json:"tls_server_name"` HostHeader *string `json:"host_header"`
TLSInsecureSkipVerify bool `json:"tls_insecure_skip_verify"` TLSServerName *string `json:"tls_server_name"`
SyncIntervalSec int64 `json:"sync_interval_sec"` TLSInsecureSkipVerify *bool `json:"tls_insecure_skip_verify"`
SyncIntervalSec *int64 `json:"sync_interval_sec"`
} }
type createAPIKeyRequest struct { type createAPIKeyRequest struct {
@@ -1322,6 +1325,7 @@ func (api *API) UpdateProject(w http.ResponseWriter, r *http.Request, params map
func (api *API) DeleteProject(w http.ResponseWriter, r *http.Request, params map[string]string) { func (api *API) DeleteProject(w http.ResponseWriter, r *http.Request, params map[string]string) {
var err error var err error
var repos []models.Repo var repos []models.Repo
var running bool
var i int var i int
var tempPaths []string var tempPaths []string
var sourcePaths []string var sourcePaths []string
@@ -1334,6 +1338,20 @@ func (api *API) DeleteProject(w http.ResponseWriter, r *http.Request, params map
WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()}) WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()})
return return
} }
for i = 0; i < len(repos); i++ {
if repos[i].Type != "rpm" {
continue
}
running, err = api.Store.HasRunningRPMMirrorTask(repos[i].ID)
if err != nil {
WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()})
return
}
if running {
WriteJSON(w, http.StatusConflict, map[string]string{"error": "cannot delete project while rpm mirror sync is running", "repo_id": repos[i].ID, "repo_name": repos[i].Name})
return
}
}
tempPaths = make([]string, 0, len(repos)) tempPaths = make([]string, 0, len(repos))
sourcePaths = make([]string, 0, len(repos)) sourcePaths = make([]string, 0, len(repos))
for i = 0; i < len(repos); i++ { for i = 0; i < len(repos); i++ {
@@ -1979,6 +1997,7 @@ func (api *API) UpdateRepo(w http.ResponseWriter, r *http.Request, params map[st
func (api *API) DeleteRepo(w http.ResponseWriter, r *http.Request, params map[string]string) { func (api *API) DeleteRepo(w http.ResponseWriter, r *http.Request, params map[string]string) {
var repo models.Repo var repo models.Repo
var project models.Project var project models.Project
var running bool
var err error var err error
var temp string var temp string
repo, err = api.Store.GetRepo(params["id"]) repo, err = api.Store.GetRepo(params["id"])
@@ -1989,6 +2008,17 @@ func (api *API) DeleteRepo(w http.ResponseWriter, r *http.Request, params map[st
if !api.requireProjectRole(w, r, repo.ProjectID, "writer") { if !api.requireProjectRole(w, r, repo.ProjectID, "writer") {
return return
} }
if repo.Type == "rpm" {
running, err = api.Store.HasRunningRPMMirrorTask(repo.ID)
if err != nil {
WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()})
return
}
if running {
WriteJSON(w, http.StatusConflict, map[string]string{"error": "cannot delete repository while rpm mirror sync is running"})
return
}
}
project, err = api.Store.GetProject(repo.ProjectID) project, err = api.Store.GetProject(repo.ProjectID)
if err != nil { if err != nil {
WriteJSON(w, http.StatusNotFound, map[string]string{"error": "project not found"}) WriteJSON(w, http.StatusNotFound, map[string]string{"error": "project not found"})
@@ -2687,7 +2717,7 @@ func (api *API) RepoRPMCreateSubdir(w http.ResponseWriter, r *http.Request, para
var writeBlocked bool var writeBlocked bool
var writeBlockedPath string var writeBlockedPath string
var err error var err error
var req repoRPMSubdirRequest var req repoRPMCreateRequest
var name string var name string
var dirType string var dirType string
var mode string var mode string
@@ -2699,6 +2729,9 @@ func (api *API) RepoRPMCreateSubdir(w http.ResponseWriter, r *http.Request, para
var fullRelLower string var fullRelLower string
var absParent string var absParent string
var hasRepoAncestor bool var hasRepoAncestor bool
var allowDelete bool
var tlsInsecureSkipVerify bool
var syncIntervalSec int64
repo, err = api.Store.GetRepo(params["id"]) repo, err = api.Store.GetRepo(params["id"])
if err != nil { if err != nil {
WriteJSON(w, http.StatusNotFound, map[string]string{"error": "repo not found"}) WriteJSON(w, http.StatusNotFound, map[string]string{"error": "repo not found"})
@@ -2766,6 +2799,13 @@ func (api *API) RepoRPMCreateSubdir(w http.ResponseWriter, r *http.Request, para
} }
if dirType == "repo" { if dirType == "repo" {
mode = normalizeRPMRepoDirMode(req.Mode) mode = normalizeRPMRepoDirMode(req.Mode)
allowDelete = req.AllowDelete
tlsInsecureSkipVerify = req.TLSInsecureSkipVerify
syncIntervalSec = req.SyncIntervalSec
if syncIntervalSec == 0 {
syncIntervalSec = 300
}
syncIntervalSec = normalizeRPMMirrorIntervalSec(syncIntervalSec)
absParent = filepath.Join(repo.Path, parentPath) absParent = filepath.Join(repo.Path, parentPath)
hasRepoAncestor, err = hasRepodataAncestor(repo.Path, absParent) hasRepoAncestor, err = hasRepodataAncestor(repo.Path, absParent)
if err != nil { if err != nil {
@@ -2799,12 +2839,13 @@ func (api *API) RepoRPMCreateSubdir(w http.ResponseWriter, r *http.Request, para
RepoID: repo.ID, RepoID: repo.ID,
Path: fullRel, Path: fullRel,
Mode: mode, Mode: mode,
AllowDelete: allowDelete,
RemoteURL: strings.TrimSpace(req.RemoteURL), RemoteURL: strings.TrimSpace(req.RemoteURL),
ConnectHost: strings.TrimSpace(req.ConnectHost), ConnectHost: strings.TrimSpace(req.ConnectHost),
HostHeader: strings.TrimSpace(req.HostHeader), HostHeader: strings.TrimSpace(req.HostHeader),
TLSServerName: strings.TrimSpace(req.TLSServerName), TLSServerName: strings.TrimSpace(req.TLSServerName),
TLSInsecureSkipVerify: req.TLSInsecureSkipVerify, TLSInsecureSkipVerify: tlsInsecureSkipVerify,
SyncIntervalSec: normalizeRPMMirrorIntervalSec(req.SyncIntervalSec), SyncIntervalSec: syncIntervalSec,
SyncEnabled: true, SyncEnabled: true,
} }
err = api.Store.UpsertRPMRepoDir(dirConfig) err = api.Store.UpsertRPMRepoDir(dirConfig)
@@ -2917,11 +2958,118 @@ func (api *API) RepoRPMResumeSubdir(w http.ResponseWriter, r *http.Request, para
api.repoRPMSetSyncEnabled(w, r, params, true) api.repoRPMSetSyncEnabled(w, r, params, true)
} }
func (api *API) RepoRPMRebuildSubdirMetadata(w http.ResponseWriter, r *http.Request, params map[string]string) {
var repo models.Repo
var relPath string
var normalizedPath string
var fullPath string
var repodataPath string
var err error
repo, err = api.Store.GetRepo(params["id"])
if err != nil {
WriteJSON(w, http.StatusNotFound, map[string]string{"error": "repo not found"})
return
}
if !api.requireRepoRole(w, r, repo.ID, "writer") {
return
}
if repo.Type != "rpm" {
WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "repo is not rpm"})
return
}
relPath = strings.TrimSpace(r.URL.Query().Get("path"))
if relPath == "" {
WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "path required"})
return
}
if !isSafeSubdirPath(relPath) {
WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "invalid path"})
return
}
normalizedPath = normalizeRPMPath(relPath)
fullPath = filepath.Join(repo.Path, filepath.FromSlash(normalizedPath))
repodataPath = filepath.Join(fullPath, "repodata")
_, err = os.Stat(repodataPath)
if err != nil {
if os.IsNotExist(err) {
WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "path is not a repository directory"})
return
}
WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()})
return
}
if api.RpmMeta == nil {
WriteJSON(w, http.StatusServiceUnavailable, map[string]string{"error": "metadata manager unavailable"})
return
}
api.RpmMeta.Schedule(fullPath)
WriteJSON(w, http.StatusOK, map[string]string{"status": "scheduled"})
}
func (api *API) RepoRPMCancelSubdirSync(w http.ResponseWriter, r *http.Request, params map[string]string) {
var repo models.Repo
var relPath string
var normalizedPath string
var config models.RPMRepoDir
var canceled bool
var err error
repo, err = api.Store.GetRepo(params["id"])
if err != nil {
WriteJSON(w, http.StatusNotFound, map[string]string{"error": "repo not found"})
return
}
if !api.requireRepoRole(w, r, repo.ID, "writer") {
return
}
if repo.Type != "rpm" {
WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "repo is not rpm"})
return
}
relPath = strings.TrimSpace(r.URL.Query().Get("path"))
if relPath == "" {
WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "path required"})
return
}
if !isSafeSubdirPath(relPath) {
WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "invalid path"})
return
}
normalizedPath = normalizeRPMPath(relPath)
config, err = api.Store.GetRPMRepoDir(repo.ID, normalizedPath)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
WriteJSON(w, http.StatusNotFound, map[string]string{"error": "repo directory config not found"})
return
}
WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()})
return
}
if normalizeRPMRepoDirMode(config.Mode) != "mirror" {
WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "sync control is only supported for mirror mode"})
return
}
if !config.SyncRunning {
WriteJSON(w, http.StatusConflict, map[string]string{"error": "sync is not running"})
return
}
if api.RpmMirror == nil {
WriteJSON(w, http.StatusServiceUnavailable, map[string]string{"error": "mirror manager unavailable"})
return
}
canceled = api.RpmMirror.CancelTask(repo.ID, normalizedPath)
if !canceled {
WriteJSON(w, http.StatusConflict, map[string]string{"error": "sync is not running"})
return
}
WriteJSON(w, http.StatusOK, map[string]string{"status": "cancel_requested"})
}
func (api *API) repoRPMSetSyncEnabled(w http.ResponseWriter, r *http.Request, params map[string]string, enabled bool) { func (api *API) repoRPMSetSyncEnabled(w http.ResponseWriter, r *http.Request, params map[string]string, enabled bool) {
var repo models.Repo var repo models.Repo
var relPath string var relPath string
var normalizedPath string var normalizedPath string
var config models.RPMRepoDir var config models.RPMRepoDir
var cancelRequested bool
var err error var err error
repo, err = api.Store.GetRepo(params["id"]) repo, err = api.Store.GetRepo(params["id"])
if err != nil { if err != nil {
@@ -2963,7 +3111,11 @@ func (api *API) repoRPMSetSyncEnabled(w http.ResponseWriter, r *http.Request, pa
WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()}) WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()})
return return
} }
WriteJSON(w, http.StatusOK, map[string]any{"status": "ok", "sync_enabled": enabled}) cancelRequested = false
if !enabled && api.RpmMirror != nil {
cancelRequested = api.RpmMirror.CancelTask(repo.ID, normalizedPath)
}
WriteJSON(w, http.StatusOK, map[string]any{"status": "ok", "sync_enabled": enabled, "cancel_requested": cancelRequested})
} }
func (api *API) RepoRPMMirrorRuns(w http.ResponseWriter, r *http.Request, params map[string]string) { func (api *API) RepoRPMMirrorRuns(w http.ResponseWriter, r *http.Request, params map[string]string) {
@@ -3050,6 +3202,14 @@ func (api *API) RepoRPMDeleteSubdir(w http.ResponseWriter, r *http.Request, para
var repo models.Repo var repo models.Repo
var writeBlocked bool var writeBlocked bool
var writeBlockedPath string var writeBlockedPath string
var targetConfig models.RPMRepoDir
var targetHasConfig bool
var mirrorRoot string
var allowMirrorRootDelete bool
var allowMirrorDelete bool
var busy bool
var busyPath string
var busyReason string
var err error var err error
var relPath string var relPath string
var fullPath string var fullPath string
@@ -3084,16 +3244,54 @@ func (api *API) RepoRPMDeleteSubdir(w http.ResponseWriter, r *http.Request, para
} }
relPathClean = filepath.ToSlash(filepath.Clean(filepath.FromSlash(relPath))) relPathClean = filepath.ToSlash(filepath.Clean(filepath.FromSlash(relPath)))
relPathClean = strings.TrimPrefix(relPathClean, "./") relPathClean = strings.TrimPrefix(relPathClean, "./")
busy, busyPath, busyReason, err = api.hasBusyMirrorRootUnder(repo, relPathClean)
if err != nil {
WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()})
return
}
if busy {
WriteJSON(w, http.StatusConflict, map[string]string{"error": "cannot delete directory while mirror activity is running", "mirror_root": busyPath, "reason": busyReason})
return
}
targetConfig, err = api.Store.GetRPMRepoDir(repo.ID, relPathClean)
if err == nil {
targetHasConfig = true
} else if err != nil && !errors.Is(err, sql.ErrNoRows) {
WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()})
return
}
writeBlocked, writeBlockedPath, err = api.isRPMWriteBlocked(repo, relPathClean) writeBlocked, writeBlockedPath, err = api.isRPMWriteBlocked(repo, relPathClean)
if err != nil { if err != nil {
WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()}) WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()})
return return
} }
if writeBlocked { if writeBlocked {
WriteJSON(w, http.StatusForbidden, map[string]string{"error": "writes are disabled for mirror repo subtree", "mirror_root": writeBlockedPath}) mirrorRoot = normalizeRPMPath(writeBlockedPath)
return allowMirrorRootDelete = targetHasConfig &&
normalizeRPMRepoDirMode(targetConfig.Mode) == "mirror" &&
normalizeRPMPath(targetConfig.Path) == normalizeRPMPath(relPathClean) &&
normalizeRPMPath(relPathClean) == mirrorRoot
if !allowMirrorRootDelete {
allowMirrorDelete, err = api.allowRPMMirrorDelete(repo, relPathClean, true)
if err != nil {
WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()})
return
}
if !allowMirrorDelete {
WriteJSON(w, http.StatusForbidden, map[string]string{"error": "writes are disabled for mirror repo subtree", "mirror_root": writeBlockedPath})
return
}
}
if targetConfig.SyncRunning {
WriteJSON(w, http.StatusConflict, map[string]string{"error": "cannot delete mirror repo directory while sync is running", "mirror_root": writeBlockedPath})
return
}
} }
fullPath = filepath.Join(repo.Path, filepath.FromSlash(relPathClean)) fullPath = filepath.Join(repo.Path, filepath.FromSlash(relPathClean))
if writeBlocked && allowMirrorRootDelete && api.RpmMeta != nil && api.RpmMeta.IsRunning(fullPath) {
WriteJSON(w, http.StatusConflict, map[string]string{"error": "cannot delete mirror repo directory while metadata update is running", "mirror_root": writeBlockedPath})
return
}
info, err = os.Stat(fullPath) info, err = os.Stat(fullPath)
if err != nil { if err != nil {
if os.IsNotExist(err) { if os.IsNotExist(err) {
@@ -3107,6 +3305,18 @@ func (api *API) RepoRPMDeleteSubdir(w http.ResponseWriter, r *http.Request, para
WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "path is not a directory"}) WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "path is not a directory"})
return return
} }
if writeBlocked && !allowMirrorRootDelete && allowMirrorDelete {
repodataPath = filepath.Join(fullPath, "repodata")
_, err = os.Stat(repodataPath)
if err == nil {
WriteJSON(w, http.StatusForbidden, map[string]string{"error": "only container directories can be deleted in mirror subtree", "mirror_root": writeBlockedPath})
return
}
if err != nil && !os.IsNotExist(err) {
WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()})
return
}
}
err = os.RemoveAll(fullPath) err = os.RemoveAll(fullPath)
if err != nil { if err != nil {
WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()}) WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()})
@@ -3136,7 +3346,7 @@ func (api *API) RepoRPMRenameSubdir(w http.ResponseWriter, r *http.Request, para
var renamed bool var renamed bool
var isRepoDir bool var isRepoDir bool
var err error var err error
var req repoRPMRenameRequest var req repoRPMUpdateRequest
var relPath string var relPath string
var relPathClean string var relPathClean string
var newName string var newName string
@@ -3149,6 +3359,19 @@ func (api *API) RepoRPMRenameSubdir(w http.ResponseWriter, r *http.Request, para
var repodataPath string var repodataPath string
var hasAncestor bool var hasAncestor bool
var absParent string var absParent string
var existingConfigLoaded bool
var targetConfigExists bool
var allowDelete bool
var tlsInsecureSkipVerify bool
var syncIntervalSec int64
var modeValue string
var remoteURL string
var connectHost string
var hostHeader string
var tlsServerName string
var busy bool
var busyPath string
var busyReason string
repo, err = api.Store.GetRepo(params["id"]) repo, err = api.Store.GetRepo(params["id"])
if err != nil { if err != nil {
WriteJSON(w, http.StatusNotFound, map[string]string{"error": "repo not found"}) WriteJSON(w, http.StatusNotFound, map[string]string{"error": "repo not found"})
@@ -3166,30 +3389,49 @@ func (api *API) RepoRPMRenameSubdir(w http.ResponseWriter, r *http.Request, para
WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "invalid json"}) WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "invalid json"})
return return
} }
relPath = strings.TrimSpace(req.Path) if req.Path == nil {
newName = strings.TrimSpace(req.Name) WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "path required"})
if relPath == "" || newName == "" {
WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "path and name required"})
return return
} }
if strings.EqualFold(newName, "repodata") { relPath = strings.TrimSpace(*req.Path)
WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "repodata is reserved"}) newName = ""
if req.Name != nil {
newName = strings.TrimSpace(*req.Name)
}
if relPath == "" {
WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "path required"})
return return
} }
if !isSafeSubdirPath(relPath) { if !isSafeSubdirPath(relPath) {
WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "invalid path"}) WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "invalid path"})
return return
} }
if !isSafeSubdirName(newName) {
WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "invalid name"})
return
}
if isRepodataPath(relPath) { if isRepodataPath(relPath) {
WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "repodata cannot be renamed"}) WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "repodata cannot be renamed"})
return return
} }
relPathClean = filepath.ToSlash(filepath.Clean(filepath.FromSlash(relPath))) relPathClean = filepath.ToSlash(filepath.Clean(filepath.FromSlash(relPath)))
relPathClean = strings.TrimPrefix(relPathClean, "./") relPathClean = strings.TrimPrefix(relPathClean, "./")
busy, busyPath, busyReason, err = api.hasBusyMirrorRootUnder(repo, relPathClean)
if err != nil {
WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()})
return
}
if busy {
WriteJSON(w, http.StatusConflict, map[string]string{"error": "cannot rename/update directory while mirror activity is running", "mirror_root": busyPath, "reason": busyReason})
return
}
if newName == "" {
newName = filepath.Base(filepath.FromSlash(relPathClean))
}
if strings.EqualFold(newName, "repodata") {
WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "repodata is reserved"})
return
}
if !isSafeSubdirName(newName) {
WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "invalid name"})
return
}
writeBlocked, writeBlockedPath, err = api.isRPMWriteBlocked(repo, relPathClean) writeBlocked, writeBlockedPath, err = api.isRPMWriteBlocked(repo, relPathClean)
if err != nil { if err != nil {
WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()}) WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()})
@@ -3220,15 +3462,95 @@ func (api *API) RepoRPMRenameSubdir(w http.ResponseWriter, r *http.Request, para
parentPath = filepath.FromSlash(parentRel) parentPath = filepath.FromSlash(parentRel)
newPath = filepath.Join(repo.Path, parentPath, newName) newPath = filepath.Join(repo.Path, parentPath, newName)
newRelPath = filepath.ToSlash(filepath.Join(parentRel, newName)) newRelPath = filepath.ToSlash(filepath.Join(parentRel, newName))
_, err = api.Store.GetRPMRepoDir(repo.ID, newRelPath)
if err == nil {
targetConfigExists = true
} else if err != nil && !errors.Is(err, sql.ErrNoRows) {
WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()})
return
}
repodataPath = filepath.Join(fullPath, "repodata") repodataPath = filepath.Join(fullPath, "repodata")
_, err = os.Stat(repodataPath) _, err = os.Stat(repodataPath)
if err == nil { if err == nil {
isRepoDir = true isRepoDir = true
newMode = normalizeRPMRepoDirMode(req.Mode) existingConfig, err = api.Store.GetRPMRepoDir(repo.ID, relPathClean)
if newMode == "" { if err == nil {
newMode = "local" existingConfigLoaded = true
if normalizeRPMRepoDirMode(existingConfig.Mode) == "mirror" && existingConfig.SyncRunning {
WriteJSON(w, http.StatusConflict, map[string]string{"error": "cannot rename mirror repo directory while sync is running"})
return
}
if normalizeRPMRepoDirMode(existingConfig.Mode) == "mirror" && api.RpmMeta != nil && api.RpmMeta.IsRunning(fullPath) {
WriteJSON(w, http.StatusConflict, map[string]string{"error": "cannot update mirror repo directory while metadata update is running"})
return
}
} else if err != nil && !errors.Is(err, sql.ErrNoRows) {
WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()})
return
} }
err = validateRPMMirrorConfig(newMode, strings.TrimSpace(req.RemoteURL), strings.TrimSpace(req.ConnectHost), strings.TrimSpace(req.HostHeader), strings.TrimSpace(req.TLSServerName)) modeValue = ""
if req.Mode != nil {
modeValue = *req.Mode
}
newMode = normalizeRPMRepoDirMode(modeValue)
if newMode == "" {
if existingConfigLoaded {
newMode = normalizeRPMRepoDirMode(existingConfig.Mode)
} else {
newMode = "local"
}
}
if req.AllowDelete != nil {
allowDelete = *req.AllowDelete
} else if existingConfigLoaded {
allowDelete = existingConfig.AllowDelete
} else {
allowDelete = false
}
if req.TLSInsecureSkipVerify != nil {
tlsInsecureSkipVerify = *req.TLSInsecureSkipVerify
} else if existingConfigLoaded {
tlsInsecureSkipVerify = existingConfig.TLSInsecureSkipVerify
} else {
tlsInsecureSkipVerify = false
}
if req.SyncIntervalSec != nil {
syncIntervalSec = *req.SyncIntervalSec
} else if existingConfigLoaded {
syncIntervalSec = existingConfig.SyncIntervalSec
} else {
syncIntervalSec = 300
}
syncIntervalSec = normalizeRPMMirrorIntervalSec(syncIntervalSec)
if req.RemoteURL != nil {
remoteURL = strings.TrimSpace(*req.RemoteURL)
} else if existingConfigLoaded {
remoteURL = existingConfig.RemoteURL
} else {
remoteURL = ""
}
if req.ConnectHost != nil {
connectHost = strings.TrimSpace(*req.ConnectHost)
} else if existingConfigLoaded {
connectHost = existingConfig.ConnectHost
} else {
connectHost = ""
}
if req.HostHeader != nil {
hostHeader = strings.TrimSpace(*req.HostHeader)
} else if existingConfigLoaded {
hostHeader = existingConfig.HostHeader
} else {
hostHeader = ""
}
if req.TLSServerName != nil {
tlsServerName = strings.TrimSpace(*req.TLSServerName)
} else if existingConfigLoaded {
tlsServerName = existingConfig.TLSServerName
} else {
tlsServerName = ""
}
err = validateRPMMirrorConfig(newMode, remoteURL, connectHost, hostHeader, tlsServerName)
if err != nil { if err != nil {
WriteJSON(w, http.StatusBadRequest, map[string]string{"error": err.Error()}) WriteJSON(w, http.StatusBadRequest, map[string]string{"error": err.Error()})
return return
@@ -3246,6 +3568,10 @@ func (api *API) RepoRPMRenameSubdir(w http.ResponseWriter, r *http.Request, para
} }
renamed = newPath != fullPath renamed = newPath != fullPath
if renamed { if renamed {
if targetConfigExists {
WriteJSON(w, http.StatusConflict, map[string]string{"error": "target repo directory config already exists"})
return
}
_, err = os.Stat(newPath) _, err = os.Stat(newPath)
if err == nil { if err == nil {
WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "target already exists"}) WriteJSON(w, http.StatusBadRequest, map[string]string{"error": "target already exists"})
@@ -3262,6 +3588,7 @@ func (api *API) RepoRPMRenameSubdir(w http.ResponseWriter, r *http.Request, para
} }
err = api.Store.MoveRPMRepoDir(repo.ID, relPathClean, newRelPath) err = api.Store.MoveRPMRepoDir(repo.ID, relPathClean, newRelPath)
if err != nil { if err != nil {
_ = os.Rename(newPath, fullPath)
WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()}) WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()})
return return
} }
@@ -3271,21 +3598,24 @@ func (api *API) RepoRPMRenameSubdir(w http.ResponseWriter, r *http.Request, para
RepoID: repo.ID, RepoID: repo.ID,
Path: newRelPath, Path: newRelPath,
Mode: newMode, Mode: newMode,
RemoteURL: strings.TrimSpace(req.RemoteURL), AllowDelete: allowDelete,
ConnectHost: strings.TrimSpace(req.ConnectHost), RemoteURL: remoteURL,
HostHeader: strings.TrimSpace(req.HostHeader), ConnectHost: connectHost,
TLSServerName: strings.TrimSpace(req.TLSServerName), HostHeader: hostHeader,
TLSInsecureSkipVerify: req.TLSInsecureSkipVerify, TLSServerName: tlsServerName,
SyncIntervalSec: normalizeRPMMirrorIntervalSec(req.SyncIntervalSec), TLSInsecureSkipVerify: tlsInsecureSkipVerify,
SyncIntervalSec: syncIntervalSec,
} }
existingConfig, err = api.Store.GetRPMRepoDir(repo.ID, relPathClean) if existingConfigLoaded {
if err == nil {
dirConfig.SyncEnabled = existingConfig.SyncEnabled dirConfig.SyncEnabled = existingConfig.SyncEnabled
} else if errors.Is(err, sql.ErrNoRows) { } else if errors.Is(err, sql.ErrNoRows) {
dirConfig.SyncEnabled = true dirConfig.SyncEnabled = true
} else { } else {
WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()}) if err != nil {
return WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()})
return
}
dirConfig.SyncEnabled = true
} }
err = api.Store.UpsertRPMRepoDir(dirConfig) err = api.Store.UpsertRPMRepoDir(dirConfig)
if err != nil { if err != nil {
@@ -3305,6 +3635,7 @@ func (api *API) RepoRPMDeleteFile(w http.ResponseWriter, r *http.Request, params
var repo models.Repo var repo models.Repo
var writeBlocked bool var writeBlocked bool
var writeBlockedPath string var writeBlockedPath string
var allowMirrorDelete bool
var err error var err error
var relPath string var relPath string
var relPathClean string var relPathClean string
@@ -3346,8 +3677,15 @@ func (api *API) RepoRPMDeleteFile(w http.ResponseWriter, r *http.Request, params
return return
} }
if writeBlocked { if writeBlocked {
WriteJSON(w, http.StatusForbidden, map[string]string{"error": "writes are disabled for mirror repo subtree", "mirror_root": writeBlockedPath}) allowMirrorDelete, err = api.allowRPMMirrorDelete(repo, relPathClean, false)
return if err != nil {
WriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()})
return
}
if !allowMirrorDelete {
WriteJSON(w, http.StatusForbidden, map[string]string{"error": "writes are disabled for mirror repo subtree", "mirror_root": writeBlockedPath})
return
}
} }
lower = strings.ToLower(relPathClean) lower = strings.ToLower(relPathClean)
if !strings.HasSuffix(lower, ".rpm") { if !strings.HasSuffix(lower, ".rpm") {
@@ -5063,6 +5401,78 @@ func (api *API) isRPMWriteBlocked(repo models.Repo, relPath string) (bool, strin
return true, root, nil return true, root, nil
} }
func (api *API) allowRPMMirrorDelete(repo models.Repo, relPath string, isDir bool) (bool, error) {
var root string
var cfg models.RPMRepoDir
var normalizedPath string
var err error
root, err = api.findRPMMirrorRoot(repo, relPath)
if err != nil {
return false, err
}
if root == "" {
return true, nil
}
normalizedPath = normalizeRPMPath(relPath)
if normalizedPath == normalizeRPMPath(root) {
return false, nil
}
cfg, err = api.Store.GetRPMRepoDir(repo.ID, normalizeRPMPath(root))
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return false, nil
}
return false, err
}
if !cfg.AllowDelete {
return false, nil
}
if !isDir {
return true, nil
}
_, err = api.Store.GetRPMRepoDir(repo.ID, normalizedPath)
if err == nil {
return false, nil
}
if err != nil && !errors.Is(err, sql.ErrNoRows) {
return false, err
}
return true, nil
}
func (api *API) hasBusyMirrorRootUnder(repo models.Repo, relPath string) (bool, string, string, error) {
var dirs []models.RPMRepoDir
var target string
var root string
var full string
var i int
var err error
target = normalizeRPMPath(relPath)
dirs, err = api.Store.ListRPMRepoDirs(repo.ID)
if err != nil {
return false, "", "", err
}
for i = 0; i < len(dirs); i++ {
if normalizeRPMRepoDirMode(dirs[i].Mode) != "mirror" {
continue
}
root = normalizeRPMPath(dirs[i].Path)
if !pathUnderRoot(root, target) {
continue
}
if dirs[i].SyncRunning {
return true, root, "sync_running", nil
}
if api.RpmMeta != nil {
full = filepath.Join(repo.Path, filepath.FromSlash(root))
if api.RpmMeta.IsRunning(full) {
return true, root, "metadata_running", nil
}
}
}
return false, "", "", nil
}
func nameHasWhitespace(name string) bool { func nameHasWhitespace(name string) bool {
return strings.IndexFunc(name, unicode.IsSpace) >= 0 return strings.IndexFunc(name, unicode.IsSpace) >= 0
} }

View File

@@ -48,6 +48,7 @@ type RPMRepoDir struct {
RepoID string `json:"repo_id"` RepoID string `json:"repo_id"`
Path string `json:"path"` Path string `json:"path"`
Mode string `json:"mode"` Mode string `json:"mode"`
AllowDelete bool `json:"allow_delete"`
RemoteURL string `json:"remote_url"` RemoteURL string `json:"remote_url"`
ConnectHost string `json:"connect_host"` ConnectHost string `json:"connect_host"`
HostHeader string `json:"host_header"` HostHeader string `json:"host_header"`

View File

@@ -1,8 +1,11 @@
package rpm package rpm
import "log" import "log"
import "os"
import "path/filepath"
import "strings" import "strings"
import "sync" import "sync"
import "time"
import repokit "repokit" import repokit "repokit"
@@ -24,6 +27,18 @@ func NewMetaManager() *MetaManager {
return mgr return mgr
} }
func (m *MetaManager) IsRunning(dir string) bool {
var state *metaState
var ok bool
m.mutex.Lock()
defer m.mutex.Unlock()
state, ok = m.states[dir]
if !ok || state == nil {
return false
}
return state.inProgress
}
func (m *MetaManager) Schedule(dir string) { func (m *MetaManager) Schedule(dir string) {
var state *metaState var state *metaState
var ok bool var ok bool
@@ -46,7 +61,15 @@ func (m *MetaManager) Schedule(dir string) {
func (m *MetaManager) run(dir string) { func (m *MetaManager) run(dir string) {
var err error var err error
var opts repokit.RpmRepoOptions var opts repokit.RpmRepoOptions
var state *metaState
var ok bool
var repodataDir string
var repomdPath string
var entries []os.DirEntry
var repomdInfo os.FileInfo
var statErr error
for { for {
log.Printf("rpm metadata: job begin dir=%s", dir)
opts = repokit.RpmDefaultRepoOptions() opts = repokit.RpmDefaultRepoOptions()
opts.LockMode = repokit.RpmLockFail opts.LockMode = repokit.RpmLockFail
opts.AllowMissingRepomd = true opts.AllowMissingRepomd = true
@@ -57,24 +80,55 @@ func (m *MetaManager) run(dir string) {
if err != nil { if err != nil {
if isLockError(err) { if isLockError(err) {
log.Printf("rpm metadata: lock busy dir=%s err=%v", dir, err) log.Printf("rpm metadata: lock busy dir=%s err=%v", dir, err)
m.states[dir].pending = true log.Printf("rpm metadata: job end dir=%s result=lock_busy", dir)
m.states[dir].inProgress = false state, ok = m.states[dir]
if ok {
state.pending = true
state.inProgress = false
}
m.mutex.Unlock() m.mutex.Unlock()
time.AfterFunc(2*time.Second, func() {
m.Schedule(dir)
})
return return
} }
log.Printf("rpm metadata: build failed dir=%s err=%v", dir, err) log.Printf("rpm metadata: build failed dir=%s err=%v", dir, err)
m.states[dir].inProgress = false log.Printf("rpm metadata: job end dir=%s result=failed err=%v", dir, err)
state, ok = m.states[dir]
if ok {
state.inProgress = false
}
m.mutex.Unlock() m.mutex.Unlock()
return return
} }
repodataDir = filepath.Join(dir, "repodata")
repomdPath = filepath.Join(repodataDir, "repomd.xml")
entries, err = os.ReadDir(repodataDir)
if err != nil {
log.Printf("rpm metadata: post-check dir=%s repodata_dir=%s read_err=%v", dir, repodataDir, err)
} else {
statErr = nil
repomdInfo = nil
repomdInfo, statErr = os.Stat(repomdPath)
if statErr != nil {
log.Printf("rpm metadata: post-check dir=%s repodata_entries=%d repomd_path=%s repomd_err=%v", dir, len(entries), repomdPath, statErr)
} else {
log.Printf("rpm metadata: post-check dir=%s repodata_entries=%d repomd_path=%s repomd_size=%d", dir, len(entries), repomdPath, repomdInfo.Size())
}
}
log.Printf("rpm metadata: build done dir=%s", dir) log.Printf("rpm metadata: build done dir=%s", dir)
if m.states[dir].pending { state, ok = m.states[dir]
m.states[dir].pending = false if ok && state.pending {
log.Printf("rpm metadata: job end dir=%s result=pending_rerun", dir)
state.pending = false
m.mutex.Unlock() m.mutex.Unlock()
continue continue
} }
m.states[dir].inProgress = false if ok {
state.inProgress = false
}
m.mutex.Unlock() m.mutex.Unlock()
log.Printf("rpm metadata: job end dir=%s result=success", dir)
return return
} }
} }

View File

@@ -2,7 +2,10 @@ package rpm
import "compress/gzip" import "compress/gzip"
import "context" import "context"
import "crypto/md5"
import "crypto/sha1"
import "crypto/sha256" import "crypto/sha256"
import "crypto/sha512"
import "crypto/tls" import "crypto/tls"
import "bytes" import "bytes"
import "encoding/hex" import "encoding/hex"
@@ -16,7 +19,9 @@ import "net/http"
import "net/url" import "net/url"
import "os" import "os"
import "path/filepath" import "path/filepath"
import "strconv"
import "strings" import "strings"
import "sync"
import "time" import "time"
import "codit/internal/db" import "codit/internal/db"
@@ -28,6 +33,8 @@ type MirrorManager struct {
logger *util.Logger logger *util.Logger
meta *MetaManager meta *MetaManager
stopCh chan struct{} stopCh chan struct{}
cancelMu sync.Mutex
cancelByKey map[string]context.CancelFunc
} }
type repomdDoc struct { type repomdDoc struct {
@@ -50,6 +57,7 @@ type primaryDoc struct {
type primaryPackage struct { type primaryPackage struct {
Location primaryLocation `xml:"location"` Location primaryLocation `xml:"location"`
Checksum primaryChecksum `xml:"checksum"` Checksum primaryChecksum `xml:"checksum"`
Time primaryTime `xml:"time"`
} }
type primaryLocation struct { type primaryLocation struct {
@@ -61,6 +69,18 @@ type primaryChecksum struct {
Value string `xml:",chardata"` Value string `xml:",chardata"`
} }
type mirrorChecksum struct {
Algo string
Value string
BuildTime int64
FileTime int64
}
type primaryTime struct {
File string `xml:"file,attr"`
Build string `xml:"build,attr"`
}
type mirrorHTTPConfig struct { type mirrorHTTPConfig struct {
BaseURL string BaseURL string
ConnectHost string ConnectHost string
@@ -78,10 +98,28 @@ func NewMirrorManager(store *db.Store, logger *util.Logger, meta *MetaManager) *
logger: logger, logger: logger,
meta: meta, meta: meta,
stopCh: make(chan struct{}), stopCh: make(chan struct{}),
cancelByKey: make(map[string]context.CancelFunc),
} }
return m return m
} }
func (m *MirrorManager) CancelTask(repoID string, path string) bool {
var key string
var cancel context.CancelFunc
if m == nil {
return false
}
key = mirrorTaskKey(repoID, path)
m.cancelMu.Lock()
cancel = m.cancelByKey[key]
m.cancelMu.Unlock()
if cancel == nil {
return false
}
cancel()
return true
}
func (m *MirrorManager) Start() { func (m *MirrorManager) Start() {
var err error var err error
var tasks []models.RPMMirrorTask var tasks []models.RPMMirrorTask
@@ -161,16 +199,33 @@ func (m *MirrorManager) syncOne(task models.RPMMirrorTask) {
var revision string var revision string
var primaryHref string var primaryHref string
var primaryData []byte var primaryData []byte
var expected map[string]string var expected map[string]mirrorChecksum
var duplicateCount int
var runID string var runID string
var startedAt int64 var startedAt int64
var total int64 var total int64
var done int64 var done int64
var failed int64 var failed int64
var deleted int64 var deleted int64
var changed int64
var err error var err error
var syncCtx context.Context
var syncCancel context.CancelFunc
var canceled bool
var key string
localRoot = filepath.Join(task.RepoPath, filepath.FromSlash(task.MirrorPath)) localRoot = filepath.Join(task.RepoPath, filepath.FromSlash(task.MirrorPath))
startedAt = time.Now().UTC().Unix() startedAt = time.Now().UTC().Unix()
syncCtx, syncCancel = context.WithCancel(context.Background())
key = mirrorTaskKey(task.RepoID, task.MirrorPath)
m.cancelMu.Lock()
m.cancelByKey[key] = syncCancel
m.cancelMu.Unlock()
defer func() {
m.cancelMu.Lock()
delete(m.cancelByKey, key)
m.cancelMu.Unlock()
syncCancel()
}()
runID, err = m.store.CreateRPMMirrorRun(task.RepoID, task.MirrorPath, startedAt) runID, err = m.store.CreateRPMMirrorRun(task.RepoID, task.MirrorPath, startedAt)
if err != nil { if err != nil {
_ = m.store.FinishRPMMirrorTask(task.RepoID, task.MirrorPath, false, "", err.Error()) _ = m.store.FinishRPMMirrorTask(task.RepoID, task.MirrorPath, false, "", err.Error())
@@ -190,7 +245,7 @@ func (m *MirrorManager) syncOne(task models.RPMMirrorTask) {
m.logger.Write("rpm-mirror", util.LOG_INFO, "sync start repo=%s path=%s remote=%s", task.RepoID, task.MirrorPath, task.RemoteURL) m.logger.Write("rpm-mirror", util.LOG_INFO, "sync start repo=%s path=%s remote=%s", task.RepoID, task.MirrorPath, task.RemoteURL)
} }
_ = m.store.UpdateRPMMirrorTaskProgress(task.RepoID, task.MirrorPath, "fetch_repodata", 0, 0, 0, 0) _ = m.store.UpdateRPMMirrorTaskProgress(task.RepoID, task.MirrorPath, "fetch_repodata", 0, 0, 0, 0)
repomdData, err = mirrorFetch(client, cfg, "repodata/repomd.xml") repomdData, err = mirrorFetch(syncCtx, client, cfg, "repodata/repomd.xml")
if err != nil { if err != nil {
if m.logger != nil { if m.logger != nil {
m.logger.Write("rpm-mirror", util.LOG_ERROR, "sync failed repo=%s path=%s step=fetch_repodata err=%v", task.RepoID, task.MirrorPath, err) m.logger.Write("rpm-mirror", util.LOG_ERROR, "sync failed repo=%s path=%s step=fetch_repodata err=%v", task.RepoID, task.MirrorPath, err)
@@ -201,6 +256,9 @@ func (m *MirrorManager) syncOne(task models.RPMMirrorTask) {
} }
revision = sha256HexBytes(repomdData) revision = sha256HexBytes(repomdData)
if !task.Dirty && task.LastSyncedRevision != "" && task.LastSyncedRevision == revision { if !task.Dirty && task.LastSyncedRevision != "" && task.LastSyncedRevision == revision {
if m.meta != nil {
ensureRepodata(task, localRoot, m.meta, m.logger)
}
if m.logger != nil { if m.logger != nil {
m.logger.Write("rpm-mirror", util.LOG_INFO, "sync done repo=%s path=%s status=no_change revision=%s", task.RepoID, task.MirrorPath, revision) m.logger.Write("rpm-mirror", util.LOG_INFO, "sync done repo=%s path=%s status=no_change revision=%s", task.RepoID, task.MirrorPath, revision)
} }
@@ -218,7 +276,7 @@ func (m *MirrorManager) syncOne(task models.RPMMirrorTask) {
return return
} }
_ = m.store.UpdateRPMMirrorTaskProgress(task.RepoID, task.MirrorPath, "fetch_primary", 0, 0, 0, 0) _ = m.store.UpdateRPMMirrorTaskProgress(task.RepoID, task.MirrorPath, "fetch_primary", 0, 0, 0, 0)
primaryData, err = mirrorFetch(client, cfg, primaryHref) primaryData, err = mirrorFetch(syncCtx, client, cfg, primaryHref)
if err != nil { if err != nil {
if m.logger != nil { if m.logger != nil {
m.logger.Write("rpm-mirror", util.LOG_ERROR, "sync failed repo=%s path=%s step=fetch_primary err=%v", task.RepoID, task.MirrorPath, err) m.logger.Write("rpm-mirror", util.LOG_ERROR, "sync failed repo=%s path=%s step=fetch_primary err=%v", task.RepoID, task.MirrorPath, err)
@@ -238,7 +296,7 @@ func (m *MirrorManager) syncOne(task models.RPMMirrorTask) {
return return
} }
} }
expected, err = parsePrimaryPackages(primaryData) expected, duplicateCount, err = parsePrimaryPackages(primaryData)
if err != nil { if err != nil {
if m.logger != nil { if m.logger != nil {
m.logger.Write("rpm-mirror", util.LOG_ERROR, "sync failed repo=%s path=%s step=parse_primary err=%v", task.RepoID, task.MirrorPath, err) m.logger.Write("rpm-mirror", util.LOG_ERROR, "sync failed repo=%s path=%s step=parse_primary err=%v", task.RepoID, task.MirrorPath, err)
@@ -247,16 +305,35 @@ func (m *MirrorManager) syncOne(task models.RPMMirrorTask) {
_ = m.store.FinishRPMMirrorRun(runID, time.Now().UTC().Unix(), "failed", "fetch_primary", 0, 0, 0, 0, "", err.Error()) _ = m.store.FinishRPMMirrorRun(runID, time.Now().UTC().Unix(), "failed", "fetch_primary", 0, 0, 0, 0, "", err.Error())
return return
} }
total, done, failed, deleted, err = m.applyMirror(task, localRoot, client, cfg, expected) if m.logger != nil {
if err != nil { m.logger.Write("rpm-mirror", util.LOG_INFO, "primary parsed repo=%s path=%s primary_href=%s packages=%d", task.RepoID, task.MirrorPath, primaryHref, len(expected))
if m.logger != nil { if duplicateCount > 0 {
m.logger.Write("rpm-mirror", util.LOG_ERROR, "sync failed repo=%s path=%s step=apply total=%d done=%d failed=%d deleted=%d err=%v", task.RepoID, task.MirrorPath, total, done, failed, deleted, err) m.logger.Write("rpm-mirror", util.LOG_WARN, "primary has duplicate package paths repo=%s path=%s primary_href=%s duplicates=%d", task.RepoID, task.MirrorPath, primaryHref, duplicateCount)
}
}
total, done, failed, deleted, changed, err = m.applyMirror(syncCtx, task, localRoot, client, cfg, expected)
if err != nil {
canceled = errors.Is(err, context.Canceled)
if m.logger != nil {
if canceled {
m.logger.Write("rpm-mirror", util.LOG_WARN, "sync canceled repo=%s path=%s step=apply total=%d done=%d failed=%d deleted=%d err=%v", task.RepoID, task.MirrorPath, total, done, failed, deleted, err)
} else {
m.logger.Write("rpm-mirror", util.LOG_ERROR, "sync failed repo=%s path=%s step=apply total=%d done=%d failed=%d deleted=%d err=%v", task.RepoID, task.MirrorPath, total, done, failed, deleted, err)
}
}
if canceled {
_ = m.store.FinishRPMMirrorTask(task.RepoID, task.MirrorPath, false, "", "sync canceled by user")
_ = m.store.FinishRPMMirrorRun(runID, time.Now().UTC().Unix(), "failed", "canceled", total, done, failed, deleted, "", "sync canceled by user")
} else {
_ = m.store.FinishRPMMirrorTask(task.RepoID, task.MirrorPath, false, "", err.Error())
_ = m.store.FinishRPMMirrorRun(runID, time.Now().UTC().Unix(), "failed", "apply", total, done, failed, deleted, "", err.Error())
} }
_ = m.store.FinishRPMMirrorTask(task.RepoID, task.MirrorPath, false, "", err.Error())
_ = m.store.FinishRPMMirrorRun(runID, time.Now().UTC().Unix(), "failed", "apply", total, done, failed, deleted, "", err.Error())
return return
} }
if m.meta != nil { if m.meta != nil && changed > 0 {
if m.logger != nil {
m.logger.Write("rpm-mirror", util.LOG_INFO, "repodata schedule repo=%s path=%s reason=sync_changed changed=%d", task.RepoID, task.MirrorPath, changed)
}
m.meta.Schedule(localRoot) m.meta.Schedule(localRoot)
} }
_ = m.store.FinishRPMMirrorTask(task.RepoID, task.MirrorPath, true, revision, "") _ = m.store.FinishRPMMirrorTask(task.RepoID, task.MirrorPath, true, revision, "")
@@ -267,58 +344,91 @@ func (m *MirrorManager) syncOne(task models.RPMMirrorTask) {
} }
} }
func (m *MirrorManager) applyMirror(task models.RPMMirrorTask, localRoot string, client *http.Client, cfg mirrorHTTPConfig, expected map[string]string) (int64, int64, int64, int64, error) { func (m *MirrorManager) applyMirror(ctx context.Context, task models.RPMMirrorTask, localRoot string, client *http.Client, cfg mirrorHTTPConfig, expected map[string]mirrorChecksum) (int64, int64, int64, int64, int64, error) {
var local map[string]bool var local map[string]bool
var total int64 var total int64
var done int64 var done int64
var failed int64 var failed int64
var deleted int64 var deleted int64
var changed int64
var path string var path string
var checksum string var checksum mirrorChecksum
var fullPath string var fullPath string
var localSum string var localSum string
var needDownload bool var needDownload bool
var err error var err error
local, err = listLocalRPMs(localRoot) local, err = listLocalRPMs(localRoot)
if err != nil { if err != nil {
return 0, 0, 0, 0, err return 0, 0, 0, 0, 0, err
} }
total = int64(len(expected)) total = int64(len(expected))
_ = m.store.UpdateRPMMirrorTaskProgress(task.RepoID, task.MirrorPath, "apply", total, 0, 0, 0) _ = m.store.UpdateRPMMirrorTaskProgress(task.RepoID, task.MirrorPath, "apply", total, 0, 0, 0)
for path = range local { for path = range local {
if expected[path] != "" { select {
case <-ctx.Done():
return total, done, failed, deleted, changed, ctx.Err()
default:
}
if expected[path].Value != "" {
continue continue
} }
if m.logger != nil {
m.logger.Write("rpm-mirror", util.LOG_DEBUG, "delete local stale repo=%s path=%s file=%s", task.RepoID, task.MirrorPath, path)
}
err = os.Remove(filepath.Join(localRoot, filepath.FromSlash(path))) err = os.Remove(filepath.Join(localRoot, filepath.FromSlash(path)))
if err == nil || os.IsNotExist(err) { if err == nil || os.IsNotExist(err) {
deleted = deleted + 1 deleted = deleted + 1
changed = changed + 1
} else {
if m.logger != nil {
m.logger.Write("rpm-mirror", util.LOG_WARN, "delete local stale failed repo=%s path=%s file=%s err=%v", task.RepoID, task.MirrorPath, path, err)
}
} }
} }
for path, checksum = range expected { for path, checksum = range expected {
select {
case <-ctx.Done():
return total, done, failed, deleted, changed, ctx.Err()
default:
}
fullPath = filepath.Join(localRoot, filepath.FromSlash(path)) fullPath = filepath.Join(localRoot, filepath.FromSlash(path))
needDownload = true needDownload = true
_, err = os.Stat(fullPath) _, err = os.Stat(fullPath)
if err == nil { if err == nil {
localSum, err = sha256HexFile(fullPath) localSum, err = fileHexByAlgo(fullPath, checksum.Algo)
if err == nil && (checksum == "" || strings.EqualFold(localSum, checksum)) { if err == nil && (checksum.Value == "" || strings.EqualFold(localSum, checksum.Value)) {
needDownload = false needDownload = false
} }
} }
if needDownload { if needDownload {
err = mirrorDownload(client, cfg, path, fullPath, checksum) if m.logger != nil {
m.logger.Write("rpm-mirror", util.LOG_DEBUG, "download start repo=%s path=%s file=%s checksum_type=%s checksum=%s", task.RepoID, task.MirrorPath, path, checksum.Algo, checksum.Value)
}
err = mirrorDownload(ctx, client, cfg, path, fullPath, checksum.Algo, checksum.Value)
if err != nil { if err != nil {
failed = failed + 1 failed = failed + 1
if m.logger != nil {
m.logger.Write("rpm-mirror", util.LOG_WARN, "download failed repo=%s path=%s file=%s err=%v", task.RepoID, task.MirrorPath, path, err)
}
_ = m.store.UpdateRPMMirrorTaskProgress(task.RepoID, task.MirrorPath, "apply", total, done, failed, deleted) _ = m.store.UpdateRPMMirrorTaskProgress(task.RepoID, task.MirrorPath, "apply", total, done, failed, deleted)
continue continue
} }
changed = changed + 1
if m.logger != nil {
m.logger.Write("rpm-mirror", util.LOG_DEBUG, "download done repo=%s path=%s file=%s", task.RepoID, task.MirrorPath, path)
}
} else {
if m.logger != nil {
m.logger.Write("rpm-mirror", util.LOG_DEBUG, "download skip repo=%s path=%s file=%s reason=up-to-date", task.RepoID, task.MirrorPath, path)
}
} }
done = done + 1 done = done + 1
_ = m.store.UpdateRPMMirrorTaskProgress(task.RepoID, task.MirrorPath, "apply", total, done, failed, deleted) _ = m.store.UpdateRPMMirrorTaskProgress(task.RepoID, task.MirrorPath, "apply", total, done, failed, deleted)
} }
if failed > 0 { if failed > 0 {
return total, done, failed, deleted, errors.New("some mirror files failed to sync") return total, done, failed, deleted, changed, errors.New("some mirror files failed to sync")
} }
return total, done, failed, deleted, nil return total, done, failed, deleted, changed, nil
} }
func buildMirrorHTTPConfig(task models.RPMMirrorTask) (mirrorHTTPConfig, error) { func buildMirrorHTTPConfig(task models.RPMMirrorTask) (mirrorHTTPConfig, error) {
@@ -391,14 +501,14 @@ func effectiveServerName(cfg mirrorHTTPConfig) string {
return cfg.DefaultServer return cfg.DefaultServer
} }
func mirrorFetch(client *http.Client, cfg mirrorHTTPConfig, rel string) ([]byte, error) { func mirrorFetch(ctx context.Context, client *http.Client, cfg mirrorHTTPConfig, rel string) ([]byte, error) {
var fullURL string var fullURL string
var req *http.Request var req *http.Request
var res *http.Response var res *http.Response
var body []byte var body []byte
var err error var err error
fullURL = joinRemoteURL(cfg.BaseURL, rel) fullURL = joinRemoteURL(cfg.BaseURL, rel)
req, err = http.NewRequest(http.MethodGet, fullURL, nil) req, err = http.NewRequestWithContext(ctx, http.MethodGet, fullURL, nil)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -418,7 +528,7 @@ func mirrorFetch(client *http.Client, cfg mirrorHTTPConfig, rel string) ([]byte,
return body, nil return body, nil
} }
func mirrorDownload(client *http.Client, cfg mirrorHTTPConfig, rel string, dstPath string, checksum string) error { func mirrorDownload(ctx context.Context, client *http.Client, cfg mirrorHTTPConfig, rel string, dstPath string, checksumType string, checksum string) error {
var fullURL string var fullURL string
var req *http.Request var req *http.Request
var res *http.Response var res *http.Response
@@ -426,13 +536,16 @@ func mirrorDownload(client *http.Client, cfg mirrorHTTPConfig, rel string, dstPa
var out *os.File var out *os.File
var hash hashWriter var hash hashWriter
var copied int64 var copied int64
var actualSum string
var contentType string
var finalURL string
var err error var err error
err = os.MkdirAll(filepath.Dir(dstPath), 0o755) err = os.MkdirAll(filepath.Dir(dstPath), 0o755)
if err != nil { if err != nil {
return err return err
} }
fullURL = joinRemoteURL(cfg.BaseURL, rel) fullURL = joinRemoteURL(cfg.BaseURL, rel)
req, err = http.NewRequest(http.MethodGet, fullURL, nil) req, err = http.NewRequestWithContext(ctx, http.MethodGet, fullURL, nil)
if err != nil { if err != nil {
return err return err
} }
@@ -445,13 +558,22 @@ func mirrorDownload(client *http.Client, cfg mirrorHTTPConfig, rel string, dstPa
if res.StatusCode < 200 || res.StatusCode >= 300 { if res.StatusCode < 200 || res.StatusCode >= 300 {
return errors.New("upstream request failed: " + res.Status) return errors.New("upstream request failed: " + res.Status)
} }
contentType = strings.TrimSpace(res.Header.Get("Content-Type"))
finalURL = ""
if res.Request != nil && res.Request.URL != nil {
finalURL = res.Request.URL.String()
}
tempPath = dstPath + ".mirror.tmp" tempPath = dstPath + ".mirror.tmp"
out, err = os.Create(tempPath) out, err = os.Create(tempPath)
if err != nil { if err != nil {
return err return err
} }
defer out.Close() defer out.Close()
hash = newHashWriter() hash, err = newHashWriter(checksumType)
if err != nil {
_ = os.Remove(tempPath)
return err
}
copied, err = io.Copy(io.MultiWriter(out, hash), res.Body) copied, err = io.Copy(io.MultiWriter(out, hash), res.Body)
_ = copied _ = copied
if err != nil { if err != nil {
@@ -464,9 +586,17 @@ func mirrorDownload(client *http.Client, cfg mirrorHTTPConfig, rel string, dstPa
return err return err
} }
if strings.TrimSpace(checksum) != "" { if strings.TrimSpace(checksum) != "" {
if !strings.EqualFold(hash.Sum(), strings.TrimSpace(checksum)) { actualSum = hash.Sum()
if !strings.EqualFold(actualSum, strings.TrimSpace(checksum)) {
_ = os.Remove(tempPath) _ = os.Remove(tempPath)
return errors.New("download checksum mismatch for " + rel) return errors.New(
"download checksum mismatch for " + rel +
" type=" + normalizeChecksumAlgo(checksumType) +
" expected=" + strings.TrimSpace(checksum) +
" actual=" + actualSum +
" bytes=" + int64ToString(copied) +
" content_type=" + contentType +
" url=" + finalURL)
} }
} }
err = os.Rename(tempPath, dstPath) err = os.Rename(tempPath, dstPath)
@@ -478,9 +608,26 @@ func mirrorDownload(client *http.Client, cfg mirrorHTTPConfig, rel string, dstPa
} }
func joinRemoteURL(base string, rel string) string { func joinRemoteURL(base string, rel string) string {
var baseURL *url.URL
var relURL *url.URL
var cleanRel string var cleanRel string
cleanRel = strings.TrimLeft(strings.ReplaceAll(rel, "\\", "/"), "/") var err error
return strings.TrimRight(base, "/") + "/" + cleanRel cleanRel = strings.ReplaceAll(rel, "\\", "/")
baseURL, err = url.Parse(strings.TrimSpace(base))
if err != nil || baseURL == nil {
cleanRel = strings.TrimLeft(cleanRel, "/")
return strings.TrimRight(base, "/") + "/" + cleanRel
}
// Treat base as a directory root for repository-relative href resolution.
if !strings.HasSuffix(baseURL.Path, "/") {
baseURL.Path = baseURL.Path + "/"
}
relURL, err = url.Parse(strings.TrimSpace(cleanRel))
if err != nil || relURL == nil {
cleanRel = strings.TrimLeft(cleanRel, "/")
return strings.TrimRight(base, "/") + "/" + cleanRel
}
return baseURL.ResolveReference(relURL).String()
} }
func parseRepomdPrimaryHref(data []byte) (string, error) { func parseRepomdPrimaryHref(data []byte) (string, error) {
@@ -503,18 +650,24 @@ func parseRepomdPrimaryHref(data []byte) (string, error) {
return "", errors.New("primary metadata not found in repomd") return "", errors.New("primary metadata not found in repomd")
} }
func parsePrimaryPackages(data []byte) (map[string]string, error) { func parsePrimaryPackages(data []byte) (map[string]mirrorChecksum, int, error) {
var doc primaryDoc var doc primaryDoc
var out map[string]string var out map[string]mirrorChecksum
var i int var i int
var path string var path string
var checksum string var checksum string
var checksumType string
var fileTime int64
var buildTime int64
var existing mirrorChecksum
var ok bool
var duplicates int
var err error var err error
err = xml.Unmarshal(data, &doc) err = xml.Unmarshal(data, &doc)
if err != nil { if err != nil {
return nil, err return nil, 0, err
} }
out = make(map[string]string) out = make(map[string]mirrorChecksum)
for i = 0; i < len(doc.Packages); i++ { for i = 0; i < len(doc.Packages); i++ {
path = strings.TrimSpace(doc.Packages[i].Location.Href) path = strings.TrimSpace(doc.Packages[i].Location.Href)
if path == "" { if path == "" {
@@ -524,9 +677,23 @@ func parsePrimaryPackages(data []byte) (map[string]string, error) {
continue continue
} }
checksum = strings.TrimSpace(doc.Packages[i].Checksum.Value) checksum = strings.TrimSpace(doc.Packages[i].Checksum.Value)
out[path] = strings.ToLower(checksum) checksumType = strings.TrimSpace(doc.Packages[i].Checksum.Type)
fileTime = parseTimeAttr(doc.Packages[i].Time.File)
buildTime = parseTimeAttr(doc.Packages[i].Time.Build)
if existing, ok = out[path]; ok {
duplicates = duplicates + 1
if !shouldReplaceDuplicate(existing, buildTime, fileTime, checksum) {
continue
}
}
out[path] = mirrorChecksum{
Algo: normalizeChecksumAlgo(checksumType),
Value: strings.ToLower(checksum),
BuildTime: buildTime,
FileTime: fileTime,
}
} }
return out, nil return out, duplicates, nil
} }
func listLocalRPMs(root string) (map[string]bool, error) { func listLocalRPMs(root string) (map[string]bool, error) {
@@ -569,7 +736,7 @@ func sha256HexBytes(data []byte) string {
return hex.EncodeToString(sum[:]) return hex.EncodeToString(sum[:])
} }
func sha256HexFile(path string) (string, error) { func fileHexByAlgo(path string, algo string) (string, error) {
var file *os.File var file *os.File
var hash hashWriter var hash hashWriter
var copied int64 var copied int64
@@ -579,7 +746,10 @@ func sha256HexFile(path string) (string, error) {
return "", err return "", err
} }
defer file.Close() defer file.Close()
hash = newHashWriter() hash, err = newHashWriter(algo)
if err != nil {
return "", err
}
copied, err = io.Copy(hash, file) copied, err = io.Copy(hash, file)
_ = copied _ = copied
if err != nil { if err != nil {
@@ -615,10 +785,29 @@ type shaWriter struct {
h hash.Hash h hash.Hash
} }
func newHashWriter() hashWriter { func newHashWriter(algo string) (hashWriter, error) {
var w *shaWriter var w *shaWriter
w = &shaWriter{h: sha256.New()} var normalized string
return w var h hash.Hash
normalized = normalizeChecksumAlgo(algo)
switch normalized {
case "", "sha256":
h = sha256.New()
case "sha", "sha1":
h = sha1.New()
case "sha224":
h = sha256.New224()
case "sha384":
h = sha512.New384()
case "sha512":
h = sha512.New()
case "md5":
h = md5.New()
default:
return nil, errors.New("unsupported checksum type: " + normalized)
}
w = &shaWriter{h: h}
return w, nil
} }
func (w *shaWriter) Write(p []byte) (int, error) { func (w *shaWriter) Write(p []byte) (int, error) {
@@ -630,3 +819,90 @@ func (w *shaWriter) Sum() string {
raw = w.h.Sum(nil) raw = w.h.Sum(nil)
return hex.EncodeToString(raw) return hex.EncodeToString(raw)
} }
func normalizeChecksumAlgo(algo string) string {
var out string
out = strings.ToLower(strings.TrimSpace(algo))
out = strings.ReplaceAll(out, "-", "")
out = strings.ReplaceAll(out, "_", "")
if out == "sha1" {
return "sha1"
}
if out == "sha" {
return "sha"
}
if out == "sha224" {
return "sha224"
}
if out == "sha256" {
return "sha256"
}
if out == "sha384" {
return "sha384"
}
if out == "sha512" {
return "sha512"
}
if out == "md5" {
return "md5"
}
return out
}
func int64ToString(v int64) string {
return strconv.FormatInt(v, 10)
}
func mirrorTaskKey(repoID string, path string) string {
return repoID + "\x00" + path
}
func ensureRepodata(task models.RPMMirrorTask, localRoot string, meta *MetaManager, logger *util.Logger) {
var repomdPath string
var statErr error
repomdPath = filepath.Join(localRoot, "repodata", "repomd.xml")
_, statErr = os.Stat(repomdPath)
if statErr == nil {
return
}
if logger != nil {
logger.Write("rpm-mirror", util.LOG_INFO, "repodata schedule repo=%s path=%s reason=missing repomd=%s", task.RepoID, task.MirrorPath, repomdPath)
}
meta.Schedule(localRoot)
}
func parseTimeAttr(value string) int64 {
var trimmed string
var parsed int64
var err error
trimmed = strings.TrimSpace(value)
if trimmed == "" {
return 0
}
parsed, err = strconv.ParseInt(trimmed, 10, 64)
if err != nil {
return 0
}
return parsed
}
func shouldReplaceDuplicate(existing mirrorChecksum, newBuildTime int64, newFileTime int64, newChecksum string) bool {
var existingChecksum string
if newBuildTime > existing.BuildTime {
return true
}
if newBuildTime < existing.BuildTime {
return false
}
if newFileTime > existing.FileTime {
return true
}
if newFileTime < existing.FileTime {
return false
}
existingChecksum = strings.TrimSpace(existing.Value)
if existingChecksum == "" && strings.TrimSpace(newChecksum) != "" {
return true
}
return false
}

View File

@@ -1,15 +1,11 @@
package rpm package rpm
import "bufio"
import "bytes"
import "errors"
import "io/fs" import "io/fs"
import "os/exec"
import "path/filepath" import "path/filepath"
import "sort" import "sort"
import "strconv"
import "strings" import "strings"
import "sync"
import repokit "repokit"
type PackageSummary struct { type PackageSummary struct {
Filename string `json:"filename"` Filename string `json:"filename"`
@@ -30,35 +26,23 @@ type PackageDetail struct {
Files []string `json:"files"` Files []string `json:"files"`
Requires []string `json:"requires"` Requires []string `json:"requires"`
Provides []string `json:"provides"` Provides []string `json:"provides"`
Changelogs []PackageChangeLog `json:"changelogs"`
} }
var rpmPath string type PackageChangeLog struct {
var rpmOnce sync.Once Author string `json:"author"`
var rpmErr error Date int64 `json:"date"`
Text string `json:"text"`
func ensureRPM() error {
rpmOnce.Do(func() {
var path string
path, rpmErr = exec.LookPath("rpm")
if rpmErr != nil {
return
}
rpmPath = path
})
return rpmErr
} }
func ListPackages(repoPath string) ([]PackageSummary, error) { func ListPackages(repoPath string) ([]PackageSummary, error) {
var err error
var packages []PackageSummary var packages []PackageSummary
var walkErr error var walkErr error
err = ensureRPM() var err error
if err != nil {
return nil, err
}
walkErr = filepath.WalkDir(repoPath, func(path string, entry fs.DirEntry, entryErr error) error { walkErr = filepath.WalkDir(repoPath, func(path string, entry fs.DirEntry, entryErr error) error {
var lower string var lower string
var rel string var rel string
var pkg *repokit.RpmPackage
var summary PackageSummary var summary PackageSummary
if entryErr != nil { if entryErr != nil {
return entryErr return entryErr
@@ -74,11 +58,11 @@ func ListPackages(repoPath string) ([]PackageSummary, error) {
if err != nil { if err != nil {
return err return err
} }
summary, err = querySummary(path) pkg, err = repokit.RpmPackageFromRpmBase(path, 0)
if err != nil { if err != nil {
return nil return nil
} }
summary.Filename = filepath.ToSlash(rel) summary = packageSummaryFromRepokit(pkg, filepath.ToSlash(rel))
packages = append(packages, summary) packages = append(packages, summary)
return nil return nil
}) })
@@ -86,137 +70,135 @@ func ListPackages(repoPath string) ([]PackageSummary, error) {
return nil, walkErr return nil, walkErr
} }
sort.Slice(packages, func(i int, j int) bool { sort.Slice(packages, func(i int, j int) bool {
if packages[i].Name == packages[j].Name {
return packages[i].Filename < packages[j].Filename
}
return packages[i].Name < packages[j].Name return packages[i].Name < packages[j].Name
}) })
return packages, nil return packages, nil
} }
func GetPackageDetail(repoPath string, filename string) (PackageDetail, error) { func GetPackageDetail(repoPath string, filename string) (PackageDetail, error) {
var err error
var detail PackageDetail var detail PackageDetail
var fullPath string var fullPath string
var data []string var pkg *repokit.RpmPackage
var fileList []string var err error
var requires []string
var provides []string
var buildTime int64
var size int64
err = ensureRPM()
if err != nil {
return detail, err
}
fullPath = filepath.Join(repoPath, filepath.FromSlash(filename)) fullPath = filepath.Join(repoPath, filepath.FromSlash(filename))
data, err = queryFields(fullPath, "%{NAME}\n%{VERSION}\n%{RELEASE}\n%{ARCH}\n%{SUMMARY}\n%{DESCRIPTION}\n%{LICENSE}\n%{URL}\n%{BUILDTIME}\n%{SIZE}\n") pkg, err = repokit.RpmPackageFromRpmBase(fullPath, 256)
if err != nil { if err != nil {
return detail, err return detail, err
} }
if len(data) < 10 { detail = packageDetailFromRepokit(pkg, filename)
return detail, errors.New("rpm query returned incomplete metadata")
}
buildTime, _ = strconv.ParseInt(strings.TrimSpace(data[8]), 10, 64)
size, _ = strconv.ParseInt(strings.TrimSpace(data[9]), 10, 64)
fileList, _ = queryList(fullPath)
requires, _ = queryLines(fullPath, "--requires")
provides, _ = queryLines(fullPath, "--provides")
detail = PackageDetail{
PackageSummary: PackageSummary{
Filename: filename,
Name: strings.TrimSpace(data[0]),
Version: strings.TrimSpace(data[1]),
Release: strings.TrimSpace(data[2]),
Arch: strings.TrimSpace(data[3]),
Summary: strings.TrimSpace(data[4]),
},
Description: strings.TrimSpace(data[5]),
License: strings.TrimSpace(data[6]),
URL: strings.TrimSpace(data[7]),
BuildTime: buildTime,
Size: size,
Files: fileList,
Requires: requires,
Provides: provides,
}
return detail, nil return detail, nil
} }
func querySummary(path string) (PackageSummary, error) { func packageSummaryFromRepokit(pkg *repokit.RpmPackage, filename string) PackageSummary {
var fields []string
var err error
var summary PackageSummary var summary PackageSummary
fields, err = queryFields(path, "%{NAME}\n%{VERSION}\n%{RELEASE}\n%{ARCH}\n%{SUMMARY}\n") summary = PackageSummary{
if err != nil { Filename: filename,
return summary, err Name: strings.TrimSpace(pkg.Name),
Version: strings.TrimSpace(pkg.Version),
Release: strings.TrimSpace(pkg.Release),
Arch: strings.TrimSpace(pkg.Arch),
Summary: strings.TrimSpace(pkg.Summary),
} }
if len(fields) < 5 { return summary
return summary, errors.New("rpm query returned incomplete metadata")
}
summary.Name = strings.TrimSpace(fields[0])
summary.Version = strings.TrimSpace(fields[1])
summary.Release = strings.TrimSpace(fields[2])
summary.Arch = strings.TrimSpace(fields[3])
summary.Summary = strings.TrimSpace(fields[4])
return summary, nil
} }
func queryFields(path string, format string) ([]string, error) { func packageDetailFromRepokit(pkg *repokit.RpmPackage, filename string) PackageDetail {
var output []byte
var err error
var list []string
output, err = runRPM(path, "-qp", "--qf", format)
if err != nil {
return nil, err
}
list = strings.Split(strings.TrimSuffix(string(output), "\n"), "\n")
return list, nil
}
func queryList(path string) ([]string, error) {
var output []byte
var err error
var scanner *bufio.Scanner
var files []string var files []string
output, err = runRPM(path, "-qlp") var file repokit.RpmPackageFile
if err != nil { var changelogs []PackageChangeLog
return nil, err var changelog repokit.RpmChangelogEntry
var detail PackageDetail
files = make([]string, 0, len(pkg.Files))
for _, file = range pkg.Files {
if file.FullPath == "" {
continue
}
files = append(files, file.FullPath)
} }
scanner = bufio.NewScanner(bytes.NewReader(output)) changelogs = make([]PackageChangeLog, 0, len(pkg.Changelogs))
for scanner.Scan() { for _, changelog = range pkg.Changelogs {
files = append(files, scanner.Text()) changelogs = append(changelogs, PackageChangeLog{
Author: strings.TrimSpace(changelog.Author),
Date: changelog.Date,
Text: strings.TrimSpace(changelog.Changelog),
})
} }
return files, nil sort.SliceStable(changelogs, func(i int, j int) bool {
return changelogs[i].Date > changelogs[j].Date
})
sort.Strings(files)
detail = PackageDetail{
PackageSummary: packageSummaryFromRepokit(pkg, filename),
Description: strings.TrimSpace(pkg.Description),
License: strings.TrimSpace(pkg.RpmLicense),
URL: strings.TrimSpace(pkg.Url),
BuildTime: pkg.TimeBuild,
Size: pkg.SizePackage,
Files: files,
Requires: dependencyListToStrings(pkg.Requires),
Provides: dependencyListToStrings(pkg.Provides),
Changelogs: changelogs,
}
return detail
} }
func queryLines(path string, flag string) ([]string, error) { func dependencyListToStrings(deps []repokit.RpmDependency) []string {
var output []byte
var err error
var scanner *bufio.Scanner
var lines []string var lines []string
output, err = runRPM(path, "-qp", flag) var dep repokit.RpmDependency
if err != nil { lines = make([]string, 0, len(deps))
return nil, err for _, dep = range deps {
lines = append(lines, dependencyToString(dep))
} }
scanner = bufio.NewScanner(bytes.NewReader(output)) return lines
for scanner.Scan() {
lines = append(lines, scanner.Text())
}
return lines, nil
} }
func runRPM(path string, args ...string) ([]byte, error) { func dependencyToString(dep repokit.RpmDependency) string {
var err error var op string
var cmd *exec.Cmd var evr string
var output []byte var line string
var full []string op = normalizeDependencyOp(dep.Flags)
err = ensureRPM() evr = dependencyEVR(dep)
if err != nil { line = dep.Name
return nil, err if op == "" || evr == "" {
return line
} }
full = append([]string{}, args...) line = line + " " + op + " " + evr
full = append(full, path) return line
cmd = exec.Command(rpmPath, full...) }
output, err = cmd.Output()
if err != nil { func normalizeDependencyOp(flag string) string {
return nil, err switch strings.ToUpper(strings.TrimSpace(flag)) {
} case "LT":
return output, nil return "<"
case "GT":
return ">"
case "EQ":
return "="
case "LE":
return "<="
case "GE":
return ">="
default:
return strings.TrimSpace(flag)
}
}
func dependencyEVR(dep repokit.RpmDependency) string {
var value string
var version string
version = strings.TrimSpace(dep.Version)
if version == "" {
return ""
}
value = version
if strings.TrimSpace(dep.Release) != "" {
value = value + "-" + strings.TrimSpace(dep.Release)
}
if strings.TrimSpace(dep.Epoch) != "" && strings.TrimSpace(dep.Epoch) != "0" {
value = strings.TrimSpace(dep.Epoch) + ":" + value
}
return value
} }

View File

@@ -1,19 +1,96 @@
ALTER TABLE rpm_repo_dirs ADD COLUMN sync_interval_sec INTEGER NOT NULL DEFAULT 300; PRAGMA foreign_keys = OFF;
ALTER TABLE rpm_repo_dirs ADD COLUMN dirty INTEGER NOT NULL DEFAULT 1;
ALTER TABLE rpm_repo_dirs ADD COLUMN next_sync_at INTEGER NOT NULL DEFAULT 0;
ALTER TABLE rpm_repo_dirs ADD COLUMN sync_running INTEGER NOT NULL DEFAULT 0;
ALTER TABLE rpm_repo_dirs ADD COLUMN sync_status TEXT NOT NULL DEFAULT 'idle';
ALTER TABLE rpm_repo_dirs ADD COLUMN sync_error TEXT NOT NULL DEFAULT '';
ALTER TABLE rpm_repo_dirs ADD COLUMN sync_step TEXT NOT NULL DEFAULT '';
ALTER TABLE rpm_repo_dirs ADD COLUMN sync_total INTEGER NOT NULL DEFAULT 0;
ALTER TABLE rpm_repo_dirs ADD COLUMN sync_done INTEGER NOT NULL DEFAULT 0;
ALTER TABLE rpm_repo_dirs ADD COLUMN sync_failed INTEGER NOT NULL DEFAULT 0;
ALTER TABLE rpm_repo_dirs ADD COLUMN sync_deleted INTEGER NOT NULL DEFAULT 0;
ALTER TABLE rpm_repo_dirs ADD COLUMN last_sync_started_at INTEGER NOT NULL DEFAULT 0;
ALTER TABLE rpm_repo_dirs ADD COLUMN last_sync_finished_at INTEGER NOT NULL DEFAULT 0;
ALTER TABLE rpm_repo_dirs ADD COLUMN last_sync_success_at INTEGER NOT NULL DEFAULT 0;
ALTER TABLE rpm_repo_dirs ADD COLUMN last_synced_revision TEXT NOT NULL DEFAULT '';
UPDATE rpm_repo_dirs BEGIN TRANSACTION;
SET dirty = 1, next_sync_at = 0
WHERE mode = 'mirror'; ALTER TABLE rpm_repo_dirs RENAME TO rpm_repo_dirs_old;
CREATE TABLE rpm_repo_dirs (
repo_id TEXT NOT NULL,
path TEXT NOT NULL,
mode TEXT NOT NULL DEFAULT 'local',
remote_url TEXT NOT NULL DEFAULT '',
connect_host TEXT NOT NULL DEFAULT '',
host_header TEXT NOT NULL DEFAULT '',
tls_server_name TEXT NOT NULL DEFAULT '',
tls_insecure_skip_verify INTEGER NOT NULL DEFAULT 0,
sync_interval_sec INTEGER NOT NULL DEFAULT 300,
dirty INTEGER NOT NULL DEFAULT 1,
next_sync_at INTEGER NOT NULL DEFAULT 0,
sync_running INTEGER NOT NULL DEFAULT 0,
sync_status TEXT NOT NULL DEFAULT 'idle',
sync_error TEXT NOT NULL DEFAULT '',
sync_step TEXT NOT NULL DEFAULT '',
sync_total INTEGER NOT NULL DEFAULT 0,
sync_done INTEGER NOT NULL DEFAULT 0,
sync_failed INTEGER NOT NULL DEFAULT 0,
sync_deleted INTEGER NOT NULL DEFAULT 0,
last_sync_started_at INTEGER NOT NULL DEFAULT 0,
last_sync_finished_at INTEGER NOT NULL DEFAULT 0,
last_sync_success_at INTEGER NOT NULL DEFAULT 0,
last_synced_revision TEXT NOT NULL DEFAULT '',
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL,
PRIMARY KEY (repo_id, path),
FOREIGN KEY (repo_id) REFERENCES repos(id) ON DELETE CASCADE
);
INSERT INTO rpm_repo_dirs (
repo_id,
path,
mode,
remote_url,
connect_host,
host_header,
tls_server_name,
tls_insecure_skip_verify,
sync_interval_sec,
dirty,
next_sync_at,
sync_running,
sync_status,
sync_error,
sync_step,
sync_total,
sync_done,
sync_failed,
sync_deleted,
last_sync_started_at,
last_sync_finished_at,
last_sync_success_at,
last_synced_revision,
created_at,
updated_at
)
SELECT
repo_id,
path,
mode,
remote_url,
connect_host,
host_header,
tls_server_name,
tls_insecure_skip_verify,
300,
CASE WHEN mode = 'mirror' THEN 1 ELSE 0 END,
CASE WHEN mode = 'mirror' THEN 0 ELSE 0 END,
0,
'idle',
'',
'',
0,
0,
0,
0,
0,
0,
0,
'',
created_at,
updated_at
FROM rpm_repo_dirs_old;
DROP TABLE rpm_repo_dirs_old;
COMMIT;
PRAGMA foreign_keys = ON;

View File

@@ -1,5 +1,99 @@
ALTER TABLE rpm_repo_dirs ADD COLUMN sync_enabled INTEGER NOT NULL DEFAULT 1; PRAGMA foreign_keys = OFF;
UPDATE rpm_repo_dirs BEGIN TRANSACTION;
SET sync_enabled = 1
WHERE mode = 'mirror'; CREATE TABLE rpm_repo_dirs_new (
repo_id TEXT NOT NULL,
path TEXT NOT NULL,
mode TEXT NOT NULL DEFAULT 'local',
remote_url TEXT NOT NULL DEFAULT '',
connect_host TEXT NOT NULL DEFAULT '',
host_header TEXT NOT NULL DEFAULT '',
tls_server_name TEXT NOT NULL DEFAULT '',
tls_insecure_skip_verify INTEGER NOT NULL DEFAULT 0,
sync_interval_sec INTEGER NOT NULL DEFAULT 300,
sync_enabled INTEGER NOT NULL DEFAULT 1,
dirty INTEGER NOT NULL DEFAULT 1,
next_sync_at INTEGER NOT NULL DEFAULT 0,
sync_running INTEGER NOT NULL DEFAULT 0,
sync_status TEXT NOT NULL DEFAULT 'idle',
sync_error TEXT NOT NULL DEFAULT '',
sync_step TEXT NOT NULL DEFAULT '',
sync_total INTEGER NOT NULL DEFAULT 0,
sync_done INTEGER NOT NULL DEFAULT 0,
sync_failed INTEGER NOT NULL DEFAULT 0,
sync_deleted INTEGER NOT NULL DEFAULT 0,
last_sync_started_at INTEGER NOT NULL DEFAULT 0,
last_sync_finished_at INTEGER NOT NULL DEFAULT 0,
last_sync_success_at INTEGER NOT NULL DEFAULT 0,
last_synced_revision TEXT NOT NULL DEFAULT '',
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL,
PRIMARY KEY (repo_id, path),
FOREIGN KEY (repo_id) REFERENCES repos(id) ON DELETE CASCADE
);
INSERT INTO rpm_repo_dirs_new (
repo_id,
path,
mode,
remote_url,
connect_host,
host_header,
tls_server_name,
tls_insecure_skip_verify,
sync_interval_sec,
sync_enabled,
dirty,
next_sync_at,
sync_running,
sync_status,
sync_error,
sync_step,
sync_total,
sync_done,
sync_failed,
sync_deleted,
last_sync_started_at,
last_sync_finished_at,
last_sync_success_at,
last_synced_revision,
created_at,
updated_at
)
SELECT
repo_id,
path,
mode,
remote_url,
connect_host,
host_header,
tls_server_name,
tls_insecure_skip_verify,
sync_interval_sec,
1,
dirty,
next_sync_at,
sync_running,
sync_status,
sync_error,
sync_step,
sync_total,
sync_done,
sync_failed,
sync_deleted,
last_sync_started_at,
last_sync_finished_at,
last_sync_success_at,
last_synced_revision,
created_at,
updated_at
FROM rpm_repo_dirs;
DROP TABLE rpm_repo_dirs;
ALTER TABLE rpm_repo_dirs_new RENAME TO rpm_repo_dirs;
COMMIT;
PRAGMA foreign_keys = ON;

View File

@@ -0,0 +1,102 @@
PRAGMA foreign_keys = OFF;
BEGIN TRANSACTION;
CREATE TABLE rpm_repo_dirs_new (
repo_id TEXT NOT NULL,
path TEXT NOT NULL,
mode TEXT NOT NULL DEFAULT 'local',
allow_delete INTEGER NOT NULL DEFAULT 0,
remote_url TEXT NOT NULL DEFAULT '',
connect_host TEXT NOT NULL DEFAULT '',
host_header TEXT NOT NULL DEFAULT '',
tls_server_name TEXT NOT NULL DEFAULT '',
tls_insecure_skip_verify INTEGER NOT NULL DEFAULT 0,
sync_interval_sec INTEGER NOT NULL DEFAULT 300,
sync_enabled INTEGER NOT NULL DEFAULT 1,
dirty INTEGER NOT NULL DEFAULT 1,
next_sync_at INTEGER NOT NULL DEFAULT 0,
sync_running INTEGER NOT NULL DEFAULT 0,
sync_status TEXT NOT NULL DEFAULT 'idle',
sync_error TEXT NOT NULL DEFAULT '',
sync_step TEXT NOT NULL DEFAULT '',
sync_total INTEGER NOT NULL DEFAULT 0,
sync_done INTEGER NOT NULL DEFAULT 0,
sync_failed INTEGER NOT NULL DEFAULT 0,
sync_deleted INTEGER NOT NULL DEFAULT 0,
last_sync_started_at INTEGER NOT NULL DEFAULT 0,
last_sync_finished_at INTEGER NOT NULL DEFAULT 0,
last_sync_success_at INTEGER NOT NULL DEFAULT 0,
last_synced_revision TEXT NOT NULL DEFAULT '',
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL,
PRIMARY KEY (repo_id, path),
FOREIGN KEY (repo_id) REFERENCES repos(id) ON DELETE CASCADE
);
INSERT INTO rpm_repo_dirs_new (
repo_id,
path,
mode,
allow_delete,
remote_url,
connect_host,
host_header,
tls_server_name,
tls_insecure_skip_verify,
sync_interval_sec,
sync_enabled,
dirty,
next_sync_at,
sync_running,
sync_status,
sync_error,
sync_step,
sync_total,
sync_done,
sync_failed,
sync_deleted,
last_sync_started_at,
last_sync_finished_at,
last_sync_success_at,
last_synced_revision,
created_at,
updated_at
)
SELECT
repo_id,
path,
mode,
0,
remote_url,
connect_host,
host_header,
tls_server_name,
tls_insecure_skip_verify,
sync_interval_sec,
sync_enabled,
dirty,
next_sync_at,
sync_running,
sync_status,
sync_error,
sync_step,
sync_total,
sync_done,
sync_failed,
sync_deleted,
last_sync_started_at,
last_sync_finished_at,
last_sync_success_at,
last_synced_revision,
created_at,
updated_at
FROM rpm_repo_dirs;
DROP TABLE rpm_repo_dirs;
ALTER TABLE rpm_repo_dirs_new RENAME TO rpm_repo_dirs;
COMMIT;
PRAGMA foreign_keys = ON;

View File

@@ -65,6 +65,7 @@ export interface RpmPackageDetail extends RpmPackageSummary {
files: string[] files: string[]
requires: string[] requires: string[]
provides: string[] provides: string[]
changelogs: { author: string; date: number; text: string }[]
} }
export interface DockerTagInfo { export interface DockerTagInfo {
@@ -104,6 +105,7 @@ export interface RpmRepoDirConfig {
repo_id: string repo_id: string
path: string path: string
mode: 'local' | 'mirror' | '' mode: 'local' | 'mirror' | ''
allow_delete: boolean
remote_url: string remote_url: string
connect_host: string connect_host: string
host_header: string host_header: string
@@ -665,6 +667,7 @@ export const api = {
type: string, type: string,
parent?: string, parent?: string,
mode?: 'local' | 'mirror', mode?: 'local' | 'mirror',
allow_delete?: boolean,
remote_url?: string, remote_url?: string,
connect_host?: string, connect_host?: string,
host_header?: string, host_header?: string,
@@ -674,18 +677,19 @@ export const api = {
) => ) =>
request<{ status: string }>(`/api/repos/${repoId}/rpm/subdirs`, { request<{ status: string }>(`/api/repos/${repoId}/rpm/subdirs`, {
method: 'POST', method: 'POST',
body: JSON.stringify({ name, type, parent, mode, remote_url, connect_host, host_header, tls_server_name, tls_insecure_skip_verify, sync_interval_sec }) body: JSON.stringify({ name, type, parent, mode, allow_delete, remote_url, connect_host, host_header, tls_server_name, tls_insecure_skip_verify, sync_interval_sec })
}), }),
getRpmSubdir: (repoId: string, path: string) => { getRpmSubdir: (repoId: string, path: string) => {
const params = new URLSearchParams() const params = new URLSearchParams()
params.set('path', path) params.set('path', path)
return request<RpmRepoDirConfig>(`/api/repos/${repoId}/rpm/subdir?${params.toString()}`) return request<RpmRepoDirConfig>(`/api/repos/${repoId}/rpm/subdir?${params.toString()}`)
}, },
renameRpmSubdir: ( updateRpmSubdir: (
repoId: string, repoId: string,
path: string, path: string,
name: string, name?: string,
mode?: 'local' | 'mirror', mode?: 'local' | 'mirror',
allow_delete?: boolean,
remote_url?: string, remote_url?: string,
connect_host?: string, connect_host?: string,
host_header?: string, host_header?: string,
@@ -693,9 +697,9 @@ export const api = {
tls_insecure_skip_verify?: boolean, tls_insecure_skip_verify?: boolean,
sync_interval_sec?: number sync_interval_sec?: number
) => ) =>
request<{ status: string }>(`/api/repos/${repoId}/rpm/subdir/rename`, { request<{ status: string }>(`/api/repos/${repoId}/rpm/subdir/update`, {
method: 'POST', method: 'POST',
body: JSON.stringify({ path, name, mode, remote_url, connect_host, host_header, tls_server_name, tls_insecure_skip_verify, sync_interval_sec }) body: JSON.stringify({ path, name, mode, allow_delete, remote_url, connect_host, host_header, tls_server_name, tls_insecure_skip_verify, sync_interval_sec })
}), }),
syncRpmSubdir: (repoId: string, path: string) => { syncRpmSubdir: (repoId: string, path: string) => {
const params = new URLSearchParams() const params = new URLSearchParams()
@@ -718,6 +722,13 @@ export const api = {
method: 'POST' method: 'POST'
}) })
}, },
rebuildRpmSubdirMetadata: (repoId: string, path: string) => {
const params = new URLSearchParams()
params.set('path', path)
return request<{ status: string }>(`/api/repos/${repoId}/rpm/subdir/rebuild-metadata?${params.toString()}`, {
method: 'POST'
})
},
listRpmMirrorRuns: (repoId: string, path: string, limit?: number) => { listRpmMirrorRuns: (repoId: string, path: string, limit?: number) => {
const params = new URLSearchParams() const params = new URLSearchParams()
params.set('path', path) params.set('path', path)

View File

@@ -33,6 +33,7 @@ export default function RepoGitDetailPage(props: RepoGitDetailPageProps) {
const [defaultBranch, setDefaultBranch] = useState<string>('') const [defaultBranch, setDefaultBranch] = useState<string>('')
const [tree, setTree] = useState<RepoTreeEntry[]>([]) const [tree, setTree] = useState<RepoTreeEntry[]>([])
const [treeError, setTreeError] = useState<string | null>(null) const [treeError, setTreeError] = useState<string | null>(null)
const [treeReloadTick, setTreeReloadTick] = useState(0)
const [fileQuery, setFileQuery] = useState('') const [fileQuery, setFileQuery] = useState('')
const [path, setPath] = useState('') const [path, setPath] = useState('')
const [pathSegments, setPathSegments] = useState<string[]>([]) const [pathSegments, setPathSegments] = useState<string[]>([])
@@ -142,7 +143,7 @@ export default function RepoGitDetailPage(props: RepoGitDetailPageProps) {
if (!ref && branches.length === 0) return if (!ref && branches.length === 0) return
if (!repo) return if (!repo) return
if (repo && repo.type && repo.type !== 'git') return if (repo && repo.type && repo.type !== 'git') return
const key = `${repoId}:${ref}:${path}` const key = `${repoId}:${ref}:${path}:${treeReloadTick}`
if (lastTreeKey.current === key) return if (lastTreeKey.current === key) return
lastTreeKey.current = key lastTreeKey.current = key
api.listRepoTree(repoId, ref || undefined, path) api.listRepoTree(repoId, ref || undefined, path)
@@ -163,7 +164,7 @@ export default function RepoGitDetailPage(props: RepoGitDetailPageProps) {
setSelectedCommit(null) setSelectedCommit(null)
} }
}) })
}, [repoId, ref, path, branches]) }, [repoId, ref, path, branches, treeReloadTick])
useEffect(() => { useEffect(() => {
if (!repoId || !ref) { if (!repoId || !ref) {
@@ -270,6 +271,10 @@ export default function RepoGitDetailPage(props: RepoGitDetailPageProps) {
} }
const handleBreadcrumb = (nextPath: string) => { const handleBreadcrumb = (nextPath: string) => {
if (nextPath === path) {
setTreeReloadTick((prev) => prev + 1)
return
}
setPath(nextPath) setPath(nextPath)
if (nextPath === '') { if (nextPath === '') {
setPathSegments([]) setPathSegments([])

View File

@@ -54,12 +54,13 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
const [rpmMetaContent, setRpmMetaContent] = useState('') const [rpmMetaContent, setRpmMetaContent] = useState('')
const [rpmMetaError, setRpmMetaError] = useState<string | null>(null) const [rpmMetaError, setRpmMetaError] = useState<string | null>(null)
const [rpmMetaLoading, setRpmMetaLoading] = useState(false) const [rpmMetaLoading, setRpmMetaLoading] = useState(false)
const [rpmTab, setRpmTab] = useState<'meta' | 'files'>('meta') const [rpmTab, setRpmTab] = useState<'meta' | 'files' | 'changelog'>('meta')
const [sidebarOpen, setSidebarOpen] = useState(true) const [sidebarOpen, setSidebarOpen] = useState(true)
const [subdirOpen, setSubdirOpen] = useState(false) const [subdirOpen, setSubdirOpen] = useState(false)
const [subdirName, setSubdirName] = useState('') const [subdirName, setSubdirName] = useState('')
const [subdirType, setSubdirType] = useState<'container' | 'repo'>('container') const [subdirType, setSubdirType] = useState<'container' | 'repo'>('container')
const [subdirMode, setSubdirMode] = useState<'local' | 'mirror'>('local') const [subdirMode, setSubdirMode] = useState<'local' | 'mirror'>('local')
const [subdirAllowDelete, setSubdirAllowDelete] = useState(false)
const [subdirSyncIntervalSec, setSubdirSyncIntervalSec] = useState('300') const [subdirSyncIntervalSec, setSubdirSyncIntervalSec] = useState('300')
const [subdirRemoteURL, setSubdirRemoteURL] = useState('') const [subdirRemoteURL, setSubdirRemoteURL] = useState('')
const [subdirConnectHost, setSubdirConnectHost] = useState('') const [subdirConnectHost, setSubdirConnectHost] = useState('')
@@ -86,6 +87,7 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
const [renameNewName, setRenameNewName] = useState('') const [renameNewName, setRenameNewName] = useState('')
const [renameIsRepoDir, setRenameIsRepoDir] = useState(false) const [renameIsRepoDir, setRenameIsRepoDir] = useState(false)
const [renameMode, setRenameMode] = useState<'local' | 'mirror'>('local') const [renameMode, setRenameMode] = useState<'local' | 'mirror'>('local')
const [renameAllowDelete, setRenameAllowDelete] = useState(false)
const [renameSyncIntervalSec, setRenameSyncIntervalSec] = useState('300') const [renameSyncIntervalSec, setRenameSyncIntervalSec] = useState('300')
const [renameRemoteURL, setRenameRemoteURL] = useState('') const [renameRemoteURL, setRenameRemoteURL] = useState('')
const [renameConnectHost, setRenameConnectHost] = useState('') const [renameConnectHost, setRenameConnectHost] = useState('')
@@ -112,8 +114,10 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
const [clearRunsConfirmOpen, setClearRunsConfirmOpen] = useState(false) const [clearRunsConfirmOpen, setClearRunsConfirmOpen] = useState(false)
const [rpmPath, setRpmPath] = useState('') const [rpmPath, setRpmPath] = useState('')
const [rpmPathSegments, setRpmPathSegments] = useState<string[]>([]) const [rpmPathSegments, setRpmPathSegments] = useState<string[]>([])
const [rpmFileQuery, setRpmFileQuery] = useState('')
const [rpmTree, setRpmTree] = useState<RpmTreeEntry[]>([]) const [rpmTree, setRpmTree] = useState<RpmTreeEntry[]>([])
const [rpmTreeError, setRpmTreeError] = useState<string | null>(null) const [rpmTreeError, setRpmTreeError] = useState<string | null>(null)
const [rpmTreeReloadTick, setRpmTreeReloadTick] = useState(0)
const [rpmSelectedEntry, setRpmSelectedEntry] = useState<RpmTreeEntry | null>(null) const [rpmSelectedEntry, setRpmSelectedEntry] = useState<RpmTreeEntry | null>(null)
const [canWrite, setCanWrite] = useState(false) const [canWrite, setCanWrite] = useState(false)
const initRepoRef = useRef<string | null>(null) const initRepoRef = useRef<string | null>(null)
@@ -185,7 +189,7 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
setRpmTreeError(message) setRpmTreeError(message)
setRpmTree([]) setRpmTree([])
}) })
}, [repoId, repo, rpmPath]) }, [repoId, repo, rpmPath, rpmTreeReloadTick])
const handleSelectRpm = async (pkg: RpmPackageSummary) => { const handleSelectRpm = async (pkg: RpmPackageSummary) => {
if (!repoId) return if (!repoId) return
@@ -193,7 +197,6 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
setRpmDetail(null) setRpmDetail(null)
setRpmDetailLoading(true) setRpmDetailLoading(true)
setRpmError(null) setRpmError(null)
setRpmTab('meta')
try { try {
const detail = await api.getRpmPackage(repoId, pkg.filename) const detail = await api.getRpmPackage(repoId, pkg.filename)
setRpmSelected({ setRpmSelected({
@@ -264,9 +267,12 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
setSubdirError(null) setSubdirError(null)
setSubdirSaving(true) setSubdirSaving(true)
try { try {
syncIntervalSec = Number(subdirSyncIntervalSec) syncIntervalSec = 0
if (!Number.isFinite(syncIntervalSec) || syncIntervalSec <= 0) { if (subdirType === 'repo' && subdirMode === 'mirror') {
syncIntervalSec = 300 syncIntervalSec = Number(subdirSyncIntervalSec)
if (!Number.isFinite(syncIntervalSec) || syncIntervalSec <= 0) {
syncIntervalSec = 300
}
} }
const parent = rpmPath const parent = rpmPath
await api.createRpmSubdir( await api.createRpmSubdir(
@@ -275,6 +281,7 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
subdirType, subdirType,
parent, parent,
subdirMode, subdirMode,
subdirAllowDelete,
subdirRemoteURL.trim(), subdirRemoteURL.trim(),
subdirConnectHost.trim(), subdirConnectHost.trim(),
subdirHostHeader.trim(), subdirHostHeader.trim(),
@@ -295,6 +302,7 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
setSubdirName('') setSubdirName('')
setSubdirType('container') setSubdirType('container')
setSubdirMode('local') setSubdirMode('local')
setSubdirAllowDelete(false)
setSubdirSyncIntervalSec('300') setSubdirSyncIntervalSec('300')
setSubdirRemoteURL('') setSubdirRemoteURL('')
setSubdirConnectHost('') setSubdirConnectHost('')
@@ -359,21 +367,25 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
setRenameError(null) setRenameError(null)
setRenaming(true) setRenaming(true)
try { try {
syncIntervalSec = Number(renameSyncIntervalSec) syncIntervalSec = 0
if (!Number.isFinite(syncIntervalSec) || syncIntervalSec <= 0) { if (renameIsRepoDir && renameMode === 'mirror') {
syncIntervalSec = 300 syncIntervalSec = Number(renameSyncIntervalSec)
if (!Number.isFinite(syncIntervalSec) || syncIntervalSec <= 0) {
syncIntervalSec = 300
}
} }
await api.renameRpmSubdir( await api.updateRpmSubdir(
repoId, repoId,
renamePath, renamePath,
renameNewName.trim(), renameNewName.trim(),
renameIsRepoDir ? renameMode : undefined, renameIsRepoDir ? renameMode : undefined,
renameIsRepoDir ? renameAllowDelete : undefined,
renameIsRepoDir ? renameRemoteURL.trim() : undefined, renameIsRepoDir ? renameRemoteURL.trim() : undefined,
renameIsRepoDir ? renameConnectHost.trim() : undefined, renameIsRepoDir ? renameConnectHost.trim() : undefined,
renameIsRepoDir ? renameHostHeader.trim() : undefined, renameIsRepoDir ? renameHostHeader.trim() : undefined,
renameIsRepoDir ? renameTLSServerName.trim() : undefined, renameIsRepoDir ? renameTLSServerName.trim() : undefined,
renameIsRepoDir ? renameTLSInsecureSkipVerify : undefined, renameIsRepoDir ? renameTLSInsecureSkipVerify : undefined,
renameIsRepoDir ? syncIntervalSec : undefined renameIsRepoDir && renameMode === 'mirror' ? syncIntervalSec : undefined
) )
setRenameOpen(false) setRenameOpen(false)
setRenamePath('') setRenamePath('')
@@ -381,6 +393,7 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
setRenameNewName('') setRenameNewName('')
setRenameIsRepoDir(false) setRenameIsRepoDir(false)
setRenameMode('local') setRenameMode('local')
setRenameAllowDelete(false)
setRenameSyncIntervalSec('300') setRenameSyncIntervalSec('300')
setRenameRemoteURL('') setRenameRemoteURL('')
setRenameConnectHost('') setRenameConnectHost('')
@@ -450,26 +463,6 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
} }
} }
const handleStatusSyncNow = async () => {
if (!repoId || !statusPath || statusMode !== 'mirror') return
setStatusSyncBusy(true)
setStatusError(null)
try {
await api.syncRpmSubdir(repoId, statusPath)
setStatusSyncEnabled(true)
setStatusSyncStatus('scheduled')
setStatusSyncStep('queued')
setStatusSyncError('')
const runs = await api.listRpmMirrorRuns(repoId, statusPath, 10)
setStatusRuns(Array.isArray(runs) ? runs : [])
} catch (err) {
const message = err instanceof Error ? err.message : 'Failed to schedule sync'
setStatusError(message)
} finally {
setStatusSyncBusy(false)
}
}
const handleStatusToggleSyncEnabled = async () => { const handleStatusToggleSyncEnabled = async () => {
if (!repoId || !statusPath || statusMode !== 'mirror') return if (!repoId || !statusPath || statusMode !== 'mirror') return
setStatusSyncBusy(true) setStatusSyncBusy(true)
@@ -477,13 +470,10 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
try { try {
if (statusSyncEnabled) { if (statusSyncEnabled) {
await api.suspendRpmSubdir(repoId, statusPath) await api.suspendRpmSubdir(repoId, statusPath)
setStatusSyncEnabled(false)
setStatusSyncStatus('suspended')
} else { } else {
await api.resumeRpmSubdir(repoId, statusPath) await api.resumeRpmSubdir(repoId, statusPath)
setStatusSyncEnabled(true)
setStatusSyncStatus('scheduled')
} }
await loadStatus(statusPath)
} catch (err) { } catch (err) {
const message = err instanceof Error ? err.message : 'Failed to change mirror sync state' const message = err instanceof Error ? err.message : 'Failed to change mirror sync state'
setStatusError(message) setStatusError(message)
@@ -513,21 +503,41 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
} }
} }
const handleStatusRebuildMetadata = async () => {
if (!repoId || !statusPath || statusMode !== 'mirror') return
setStatusSyncBusy(true)
setStatusError(null)
try {
await api.rebuildRpmSubdirMetadata(repoId, statusPath)
} catch (err) {
const message = err instanceof Error ? err.message : 'Failed to schedule metadata rebuild'
setStatusError(message)
} finally {
setStatusSyncBusy(false)
}
}
const handleRpmBack = () => { const handleRpmBack = () => {
if (!rpmPath) return if (!rpmPath) return
const nextSegments = rpmPathSegments.slice(0, -1) const nextSegments = rpmPathSegments.slice(0, -1)
setRpmPath(nextSegments.join('/')) setRpmPath(nextSegments.join('/'))
setRpmPathSegments(nextSegments) setRpmPathSegments(nextSegments)
setRpmFileQuery('')
setRpmSelectedEntry(null) setRpmSelectedEntry(null)
} }
const handleRpmBreadcrumb = (nextPath: string) => { const handleRpmBreadcrumb = (nextPath: string) => {
if (nextPath === rpmPath) {
setRpmTreeReloadTick((prev) => prev + 1)
return
}
setRpmPath(nextPath) setRpmPath(nextPath)
if (nextPath === '') { if (nextPath === '') {
setRpmPathSegments([]) setRpmPathSegments([])
} else { } else {
setRpmPathSegments(nextPath.split('/').filter(Boolean)) setRpmPathSegments(nextPath.split('/').filter(Boolean))
} }
setRpmFileQuery('')
setRpmSelectedEntry(null) setRpmSelectedEntry(null)
} }
@@ -597,6 +607,7 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
if (entry.type === 'dir') { if (entry.type === 'dir') {
setRpmPath(entry.path) setRpmPath(entry.path)
setRpmPathSegments(entry.path.split('/').filter(Boolean)) setRpmPathSegments(entry.path.split('/').filter(Boolean))
setRpmFileQuery('')
setRpmSelectedEntry(null) setRpmSelectedEntry(null)
return return
} }
@@ -639,6 +650,84 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
} }
const rpmPathParts = rpmPathSegments const rpmPathParts = rpmPathSegments
const normalizedQuery = rpmFileQuery.trim().toLowerCase()
const globToRegex = (query: string) => {
let i = 0
let out = '^'
while (i < query.length) {
const ch = query[i]
if (ch === '*') {
out += '.*'
i += 1
continue
}
if (ch === '?') {
out += '.'
i += 1
continue
}
if (ch === '[') {
const classStart = i
i += 1
if (i >= query.length) {
out += '\\['
break
}
let negate = false
if (query[i] === '!' || query[i] === '^') {
negate = true
i += 1
}
let classText = ''
let sawClass = false
while (i < query.length) {
const next = query[i]
if (next === ']') {
sawClass = true
i += 1
break
}
if (next === '\\') {
classText += '\\\\'
i += 1
continue
}
classText += next
i += 1
}
if (!sawClass) {
out += '\\['
i = classStart + 1
continue
}
out += negate ? `[^${classText}]` : `[${classText}]`
continue
}
if ('.+^$(){}|\\'.includes(ch)) {
out += `\\${ch}`
i += 1
continue
}
out += ch
i += 1
}
out += '$'
return out
}
const matchesFileQuery = (name: string, query: string) => {
if (!query) return true
if (query.includes('*') || query.includes('?') || query.includes('[')) {
try {
return new RegExp(globToRegex(query), 'i').test(name)
} catch {
return name.toLowerCase().includes(query)
}
}
return name.toLowerCase().includes(query)
}
const filteredTree = normalizedQuery
? rpmTree.filter((entry) => matchesFileQuery(entry.name, normalizedQuery))
: rpmTree
return ( return (
<Box> <Box>
@@ -694,6 +783,7 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
setSubdirName('') setSubdirName('')
setSubdirType('container') setSubdirType('container')
setSubdirMode('local') setSubdirMode('local')
setSubdirAllowDelete(false)
setSubdirSyncIntervalSec('300') setSubdirSyncIntervalSec('300')
setSubdirRemoteURL('') setSubdirRemoteURL('')
setSubdirConnectHost('') setSubdirConnectHost('')
@@ -745,6 +835,14 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
) )
})} })}
</Box> </Box>
<TextField
size="small"
placeholder="Search files"
value={rpmFileQuery}
onChange={(event) => setRpmFileQuery(event.target.value)}
fullWidth
sx={{ mb: 1, px: 0.5 }}
/>
{rpmTreeError ? ( {rpmTreeError ? (
<Alert severity="warning" sx={{ mb: 1 }}> <Alert severity="warning" sx={{ mb: 1 }}>
{rpmTreeError} {rpmTreeError}
@@ -765,7 +863,7 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
</ListItemButton> </ListItemButton>
</ListItem> </ListItem>
) : null} ) : null}
{rpmTree.map((entry) => ( {filteredTree.map((entry) => (
<ListItem key={entry.path} disablePadding> <ListItem key={entry.path} disablePadding>
<ListItemButton onClick={() => handleRpmEntry(entry)}> <ListItemButton onClick={() => handleRpmEntry(entry)}>
<ListItemText <ListItemText
@@ -814,6 +912,7 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
setRenameNewName(entry.name) setRenameNewName(entry.name)
setRenameIsRepoDir(Boolean(entry.is_repo_dir)) setRenameIsRepoDir(Boolean(entry.is_repo_dir))
setRenameMode(entry.repo_mode === 'mirror' ? 'mirror' : 'local') setRenameMode(entry.repo_mode === 'mirror' ? 'mirror' : 'local')
setRenameAllowDelete(false)
setRenameSyncIntervalSec('300') setRenameSyncIntervalSec('300')
setRenameRemoteURL('') setRenameRemoteURL('')
setRenameConnectHost('') setRenameConnectHost('')
@@ -824,6 +923,7 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
try { try {
const cfg = await api.getRpmSubdir(repoId, entry.path) const cfg = await api.getRpmSubdir(repoId, entry.path)
setRenameMode(cfg.mode === 'mirror' ? 'mirror' : 'local') setRenameMode(cfg.mode === 'mirror' ? 'mirror' : 'local')
setRenameAllowDelete(Boolean(cfg.allow_delete))
setRenameSyncIntervalSec(String(cfg.sync_interval_sec || 300)) setRenameSyncIntervalSec(String(cfg.sync_interval_sec || 300))
setRenameRemoteURL(cfg.remote_url || '') setRenameRemoteURL(cfg.remote_url || '')
setRenameConnectHost(cfg.connect_host || '') setRenameConnectHost(cfg.connect_host || '')
@@ -880,9 +980,9 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
) : null} ) : null}
</ListItem> </ListItem>
))} ))}
{!rpmTree.length && !rpmTreeError ? ( {!filteredTree.length && !rpmTreeError ? (
<Typography variant="body2" color="text.secondary" sx={{ px: 1, py: 1 }}> <Typography variant="body2" color="text.secondary" sx={{ px: 1, py: 1 }}>
No files found. {normalizedQuery ? 'No matching files.' : 'No files found.'}
</Typography> </Typography>
) : null} ) : null}
</List> </List>
@@ -910,6 +1010,7 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
<Tabs value={rpmTab} onChange={(_, value) => setRpmTab(value)}> <Tabs value={rpmTab} onChange={(_, value) => setRpmTab(value)}>
<Tab label="Metadata" value="meta" /> <Tab label="Metadata" value="meta" />
<Tab label="Files" value="files" /> <Tab label="Files" value="files" />
<Tab label="Change Log" value="changelog" />
</Tabs> </Tabs>
{rpmDetailLoading ? ( {rpmDetailLoading ? (
<Typography variant="body2" color="text.secondary" sx={{ mt: 1 }}> <Typography variant="body2" color="text.secondary" sx={{ mt: 1 }}>
@@ -952,10 +1053,10 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
Build Time: {rpmDetail.build_time ? new Date(rpmDetail.build_time * 1000).toLocaleString() : 'n/a'} Build Time: {rpmDetail.build_time ? new Date(rpmDetail.build_time * 1000).toLocaleString() : 'n/a'}
</Typography> </Typography>
<Typography variant="body2">Size: {rpmDetail.size ? `${rpmDetail.size} bytes` : 'n/a'}</Typography> <Typography variant="body2">Size: {rpmDetail.size ? `${rpmDetail.size} bytes` : 'n/a'}</Typography>
{rpmDetail.requires.length ? ( {Array.isArray(rpmDetail.requires) && rpmDetail.requires.length ? (
<Typography variant="body2">Requires: {rpmDetail.requires.join(', ')}</Typography> <Typography variant="body2">Requires: {rpmDetail.requires.join(', ')}</Typography>
) : null} ) : null}
{rpmDetail.provides.length ? ( {Array.isArray(rpmDetail.provides) && rpmDetail.provides.length ? (
<Typography variant="body2">Provides: {rpmDetail.provides.join(', ')}</Typography> <Typography variant="body2">Provides: {rpmDetail.provides.join(', ')}</Typography>
) : null} ) : null}
{rpmDetail.description ? ( {rpmDetail.description ? (
@@ -968,13 +1069,36 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
{rpmDetail && rpmTab === 'files' ? ( {rpmDetail && rpmTab === 'files' ? (
<Box sx={{ mt: 1, maxHeight: '60vh', overflow: 'auto' }}> <Box sx={{ mt: 1, maxHeight: '60vh', overflow: 'auto' }}>
<List dense> <List dense>
{rpmDetail.files.map((file) => ( {(Array.isArray(rpmDetail.files) ? rpmDetail.files : []).map((file) => (
<ListItem key={file}> <ListItem key={file}>
<ListItemText primary={file} /> <ListItemText primary={file} />
</ListItem> </ListItem>
))} ))}
</List> </List>
</Box> </Box>
) : null}
{rpmDetail && rpmTab === 'changelog' ? (
<Box sx={{ mt: 1, maxHeight: '60vh', overflow: 'auto' }}>
{Array.isArray(rpmDetail.changelogs) && rpmDetail.changelogs.length ? (
<List dense>
{rpmDetail.changelogs.map((item, index) => (
<ListItem key={`${item.date}-${index}`} sx={{ display: 'block' }}>
<Typography variant="body2" color="text.secondary">
{item.date ? new Date(item.date * 1000).toLocaleString() : 'n/a'}
{item.author ? ` · ${item.author}` : ''}
</Typography>
<Typography variant="body2" sx={{ whiteSpace: 'pre-wrap' }}>
{item.text || ''}
</Typography>
</ListItem>
))}
</List>
) : (
<Typography variant="body2" color="text.secondary">
No change log entries.
</Typography>
)}
</Box>
) : null} ) : null}
{!rpmDetail && !rpmDetailLoading && rpmError ? ( {!rpmDetail && !rpmDetailLoading && rpmError ? (
<Alert severity="warning" sx={{ mt: 1 }}> <Alert severity="warning" sx={{ mt: 1 }}>
@@ -1058,7 +1182,7 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
<MenuItem value="mirror">mirror</MenuItem> <MenuItem value="mirror">mirror</MenuItem>
</TextField> </TextField>
) : null} ) : null}
{subdirType === 'repo' ? ( {subdirType === 'repo' && subdirMode === 'mirror' ? (
<TextField <TextField
label="Sync Interval (seconds)" label="Sync Interval (seconds)"
value={subdirSyncIntervalSec} value={subdirSyncIntervalSec}
@@ -1097,6 +1221,10 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
helperText="Optional SNI/verify server name override" helperText="Optional SNI/verify server name override"
fullWidth fullWidth
/> />
<FormControlLabel
control={<Checkbox checked={subdirAllowDelete} onChange={(event) => setSubdirAllowDelete(event.target.checked)} />}
label="Allow delete (files/container dirs) in mirror subtree"
/>
<FormControlLabel <FormControlLabel
control={ control={
<Checkbox <Checkbox
@@ -1204,10 +1332,7 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
</Typography> </Typography>
) : null} ) : null}
<Box sx={{ pt: 1 }}> <Box sx={{ pt: 1 }}>
<Button size="small" variant="outlined" onClick={handleStatusSyncNow} disabled={statusSyncBusy}> <Button size="small" variant="outlined" onClick={handleStatusToggleSyncEnabled} disabled={statusSyncBusy}>
{statusSyncBusy ? 'Scheduling...' : 'Sync now'}
</Button>
<Button size="small" variant="outlined" onClick={handleStatusToggleSyncEnabled} disabled={statusSyncBusy} sx={{ ml: 1 }}>
{statusSyncEnabled ? 'Suspend' : 'Resume'} {statusSyncEnabled ? 'Suspend' : 'Resume'}
</Button> </Button>
<Button <Button
@@ -1220,6 +1345,15 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
> >
Clear runs Clear runs
</Button> </Button>
<Button
size="small"
variant="outlined"
onClick={handleStatusRebuildMetadata}
disabled={statusSyncBusy}
sx={{ ml: 1 }}
>
Rebuild metadata
</Button>
<Button <Button
size="small" size="small"
variant="outlined" variant="outlined"
@@ -1306,7 +1440,7 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
<MenuItem value="mirror">mirror</MenuItem> <MenuItem value="mirror">mirror</MenuItem>
</TextField> </TextField>
) : null} ) : null}
{renameIsRepoDir ? ( {renameIsRepoDir && renameMode === 'mirror' ? (
<TextField <TextField
label="Sync Interval (seconds)" label="Sync Interval (seconds)"
value={renameSyncIntervalSec} value={renameSyncIntervalSec}
@@ -1343,6 +1477,10 @@ export default function RepoRpmDetailPage(props: RepoRpmDetailPageProps) {
onChange={(event) => setRenameTLSServerName(event.target.value)} onChange={(event) => setRenameTLSServerName(event.target.value)}
fullWidth fullWidth
/> />
<FormControlLabel
control={<Checkbox checked={renameAllowDelete} onChange={(event) => setRenameAllowDelete(event.target.checked)} />}
label="Allow delete (files/container dirs) in mirror subtree"
/>
<FormControlLabel <FormControlLabel
control={<Checkbox checked={renameTLSInsecureSkipVerify} onChange={(event) => setRenameTLSInsecureSkipVerify(event.target.checked)} />} control={<Checkbox checked={renameTLSInsecureSkipVerify} onChange={(event) => setRenameTLSInsecureSkipVerify(event.target.checked)} />}
label="Skip TLS certificate verification" label="Skip TLS certificate verification"