diff --git a/action.yaml b/action.yaml index bf572d6..7afc838 100644 --- a/action.yaml +++ b/action.yaml @@ -19,6 +19,16 @@ action: description: Allow override committed version by current build value type: boolean default: false + - name: playbook-resources + title: Use playbook resources + description: Parse playbooks and propagate used resources only + type: boolean + default: true + - name: commits-after + title: Commits after + description: Use commits only after specific date + type: string + default: "" - name: vault-pass title: Vault password description: Password for Ansible Vault diff --git a/actionSync.go b/actionSync.go index 0057379..a3f939f 100644 --- a/actionSync.go +++ b/actionSync.go @@ -1,11 +1,12 @@ package plasmactlbump import ( - "context" "errors" "fmt" - "io" + "github.com/vbauerster/mpb/v8" + "github.com/vbauerster/mpb/v8/decor" "log/slog" + "math" "os" "path/filepath" "runtime" @@ -15,16 +16,9 @@ import ( async "sync" "time" - "github.com/cespare/xxhash/v2" - "github.com/go-git/go-git/v5" - "github.com/go-git/go-git/v5/plumbing/object" - "github.com/go-git/go-git/v5/plumbing/storer" "github.com/launchrctl/compose/compose" "github.com/launchrctl/keyring" "github.com/launchrctl/launchr" - "github.com/pterm/pterm" - - "github.com/skilld-labs/plasmactl-bump/v2/pkg/repository" "github.com/skilld-labs/plasmactl-bump/v2/pkg/sync" ) @@ -53,10 +47,12 @@ type SyncAction struct { timeline []sync.TimelineItem // options. - dryRun bool - allowOverride bool - showProgress bool - vaultPass string + dryRun bool + allowOverride bool + filterByResourceUsage bool + commitsAfter string + vaultPass string + showProgress bool } type hashStruct struct { @@ -88,45 +84,36 @@ func (s *SyncAction) Execute() error { } func (s *SyncAction) ensureVaultpassExists() error { - keyValueItem, errGet := s.getVaultPass(s.vaultPass) - if errGet != nil { - return errGet - } - - s.vaultPass = keyValueItem.Value - - return nil -} - -func (s *SyncAction) getVaultPass(vaultpass string) (keyring.KeyValueItem, error) { keyValueItem, errGet := s.keyring.GetForKey(vaultpassKey) if errGet != nil { if errors.Is(errGet, keyring.ErrEmptyPass) { - return keyValueItem, errGet + return errGet } else if !errors.Is(errGet, keyring.ErrNotFound) { launchr.Log().Debug("keyring error", "error", errGet) - return keyValueItem, errMalformedKeyring + return errMalformedKeyring } keyValueItem.Key = vaultpassKey - keyValueItem.Value = vaultpass + keyValueItem.Value = s.vaultPass if keyValueItem.Value == "" { launchr.Term().Printf("- Ansible vault password\n") err := keyring.RequestKeyValueFromTty(&keyValueItem) if err != nil { - return keyValueItem, err + return err } } err := s.keyring.AddItem(keyValueItem) if err != nil { - return keyValueItem, err + return err } s.saveKeyring = true } - return keyValueItem, nil + s.vaultPass = keyValueItem.Value + + return nil } func (s *SyncAction) propagate() error { @@ -138,10 +125,18 @@ func (s *SyncAction) propagate() error { return err } + if s.filterByResourceUsage { + launchr.Log().Info("Calculating resources usage") + err = inv.CalculateResourcesUsage() + if err != nil { + return fmt.Errorf("calculate resources usage > %w", err) + } + } + launchr.Log().Info("Calculating variables usage") err = inv.CalculateVariablesUsage(s.vaultPass) if err != nil { - return err + return fmt.Errorf("calculate variables usage > %w", err) } err = s.buildTimeline(inv) @@ -168,7 +163,7 @@ func (s *SyncAction) propagate() error { } func (s *SyncAction) buildTimeline(buildInv *sync.Inventory) error { - launchr.Log().Info("Gathering domain and package resources") + launchr.Log().Info("Gathering domain and packages resources") resourcesMap, packagePathMap, err := s.getResourcesMaps(buildInv) if err != nil { return fmt.Errorf("build resource map > %w", err) @@ -181,7 +176,7 @@ func (s *SyncAction) buildTimeline(buildInv *sync.Inventory) error { } launchr.Log().Info("Populate timeline with variables") - err = s.populateTimelineVars() + err = s.populateTimelineVars(buildInv) if err != nil { return fmt.Errorf("iteraring variables > %w", err) } @@ -251,10 +246,41 @@ func (s *SyncAction) getResourcesMaps(buildInv *sync.Inventory) (map[string]*syn } } - buildResources := buildInv.GetResourcesMap() - if err != nil { - return nil, nil, err + // Remove unused resources from packages maps. + if s.filterByResourceUsage { + usedResources := buildInv.GetUsedResources() + if len(usedResources) == 0 { + // Empty maps and return, as no resources are used in build. + resourcesMap = make(map[string]*sync.OrderedMap[*sync.Resource]) + packagePathMap = make(map[string]string) + + return resourcesMap, packagePathMap, nil + } + + launchr.Log().Info("List of used resources:") + var ur []string + for r := range usedResources { + ur = append(ur, r) + } + + sort.Strings(ur) + for _, r := range ur { + launchr.Log().Info(fmt.Sprintf("- %s", r)) + } + + launchr.Log().Info("List of unused resources:") + for p, resources := range resourcesMap { + launchr.Log().Info(fmt.Sprintf("- Package - %s -", p)) + for _, k := range resources.Keys() { + if _, ok := usedResources[k]; !ok { + launchr.Log().Info(fmt.Sprintf("- %s", k)) + resources.Unset(k) + } + } + } } + + buildResources := buildInv.GetResourcesMap() for _, resourceName := range buildResources.Keys() { conflicts := make(map[string]string) for name, resources := range resourcesMap { @@ -314,543 +340,69 @@ func (s *SyncAction) getResourcesMaps(buildInv *sync.Inventory) (map[string]*syn return resourcesMap, packagePathMap, nil } -func (s *SyncAction) populateTimelineResources(resources map[string]*sync.OrderedMap[*sync.Resource], packagePathMap map[string]string) error { - var wg async.WaitGroup - var mx async.Mutex - - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - - errorChan := make(chan error, 1) - maxWorkers := min(runtime.NumCPU(), len(packagePathMap)) - workChan := make(chan map[string]any, len(packagePathMap)) - - multi := pterm.DefaultMultiPrinter - - for i := 0; i < maxWorkers; i++ { - go func(workerID int) { - for { - select { - case <-ctx.Done(): - return - case domain, ok := <-workChan: - if !ok { - return - } - - name := domain["name"].(string) - path := domain["path"].(string) - pb := domain["pb"].(*pterm.ProgressbarPrinter) - - if err := s.findResourcesChangeTime(resources[name], path, &mx, pb); err != nil { - select { - case errorChan <- fmt.Errorf("worker %d error processing %s: %w", workerID, name, err): - cancel() - default: - } - return - } - wg.Done() - } - } - }(i) - } - - for name, path := range packagePathMap { - if resources[name].Len() == 0 { - // Skipping packages with 0 composed resources. - continue - } - - wg.Add(1) - - var p *pterm.ProgressbarPrinter - var err error - if s.showProgress { - p, err = pterm.DefaultProgressbar.WithTotal(resources[name].Len()).WithWriter(multi.NewWriter()).Start(fmt.Sprintf("Collecting resources from %s", name)) - if err != nil { - return err - } - } - - workChan <- map[string]any{"name": name, "path": path, "pb": p} - } - close(workChan) - go func() { - if s.showProgress { - _, err := multi.Start() - if err != nil { - errorChan <- fmt.Errorf("error starting multi progress bar: %w", err) - } - } - - wg.Wait() - close(errorChan) - }() - - for err := range errorChan { - if err != nil { - return err - } - } - - // Sleep to re-render progress bar. Needed to achieve latest state. - if s.showProgress { - time.Sleep(multi.UpdateDelay) - multi.Stop() //nolint - } - - return nil -} - -func (s *SyncAction) collectCommits(r *git.Repository) (map[string]bool, error) { - // @todo get commits per files meta/vars to iterate only commits where files were changed. - result := make(map[string]bool) - ref, err := r.Head() - if err != nil { - return result, fmt.Errorf("error getting HEAD commit > %w", err) - } - - // start from the latest commit and iterate to the past - cIter, err := r.Log(&git.LogOptions{From: ref.Hash()}) - if err != nil { - return result, fmt.Errorf("git log error > %w", err) - } - - _ = cIter.ForEach(func(c *object.Commit) error { - hash := c.Hash.String() - hash = hash[:13] - result[hash] = true - return nil - }) - - return result, nil -} - -func (s *SyncAction) findResourcesChangeTime(namespaceResources *sync.OrderedMap[*sync.Resource], gitPath string, mx *async.Mutex, p *pterm.ProgressbarPrinter) error { - repo, err := git.PlainOpen(gitPath) - if err != nil { - return fmt.Errorf("%s - %w", gitPath, err) - } - - commitsMap, err := s.collectCommits(repo) - if err != nil { - return err - } - - ref, err := repo.Head() - if err != nil { - return fmt.Errorf("error getting HEAD commit > %w", err) - } - - // start from the latest commit and iterate to the past - cIter, err := repo.Log(&git.LogOptions{From: ref.Hash()}) - if err != nil { - return fmt.Errorf("git log error > %w", err) - } - - hashesMap := make(map[string]*hashStruct) - toIterate := namespaceResources.ToDict() - currentVersions := map[string]string{} - - for k, resource := range toIterate { - if _, ok := hashesMap[k]; !ok { - hashesMap[k] = &hashStruct{} - } - - hashesMap[k].hash = buildHackAuthor - hashesMap[k].hashTime = time.Now() - hashesMap[k].author = buildHackAuthor - - buildResource := sync.NewResource(resource.GetName(), s.buildDir) - version, err := buildResource.GetVersion() - if err != nil { - return fmt.Errorf("can't get build version of %s > %w", resource.GetName(), err) - } - - split := strings.Split(version, "-") - if len(split) > 1 { - return fmt.Errorf("dual version %s is detected in build for %s, seems like resource was propagated before. Please re-compose and run sync again", version, k) - } - - currentVersions[k] = version - } - - remainingDebug := len(toIterate) - err = cIter.ForEach(func(c *object.Commit) error { - if len(toIterate) == 0 { - return storer.ErrStop - } - - if len(toIterate) != remainingDebug { - remainingDebug = len(toIterate) - launchr.Log().Debug(fmt.Sprintf("Remaining unidentified resources %d", remainingDebug), slog.String("source", gitPath)) - } - - for k, resource := range toIterate { - if _, ok := hashesMap[k]; !ok { - hashesMap[k] = &hashStruct{} - } - - resourceMetaPath := resource.BuildMetaPath() - resourceVersion, ok := currentVersions[k] - if !ok { - resourceVersion, err = resource.GetVersion() - if err != nil { - return err - } - currentVersions[k] = resourceVersion - } - - file, errIt := c.File(resourceMetaPath) - if errIt != nil { - if !errors.Is(errIt, object.ErrFileNotFound) { - return fmt.Errorf("open file %s in commit %s > %w", resourceMetaPath, c.Hash, errIt) - } - - if hashesMap[k].hash == "" { - hashesMap[k].hash = c.Hash.String() - hashesMap[k].hashTime = c.Author.When - hashesMap[k].author = c.Author.Name - } - - // File didn't exist before, take current hash as version, - delete(toIterate, k) - - if p != nil { - p.Increment() - } - - continue - } - - metaFile, errIt := s.loadYamlFileFromBytes(file, resourceMetaPath) - if errIt != nil { - return fmt.Errorf("commit %s > %w", c.Hash, errIt) - } - - prevVer := sync.GetMetaVersion(metaFile) - if resourceVersion != prevVer { - delete(toIterate, k) - if p != nil { - p.Increment() - } - continue - } - - hashesMap[k].hash = c.Hash.String() - hashesMap[k].hashTime = c.Author.When - hashesMap[k].author = c.Author.Name - } - - return nil - }) - - if err != nil { - return fmt.Errorf("error during git log iteration > %w", err) - } - - // Ensure progress bar showing correct progress. - if p != nil && p.Total != p.Current { - p.Add(p.Total - p.Current) - } - - mx.Lock() - defer mx.Unlock() - - for n, hm := range hashesMap { - r, _ := namespaceResources.Get(n) - resourceVersion := currentVersions[n] - - launchr.Log().Debug("add resource to timeline", - slog.String("mrn", r.GetName()), - slog.String("commit", hm.hash), - slog.String("version", resourceVersion), - slog.Time("date", hm.hashTime), - ) - - if hm.author == buildHackAuthor { - msg := fmt.Sprintf("Version of `%s` doesn't match HEAD commit", n) - if !s.allowOverride { - return errors.New(msg) - } - - launchr.Log().Warn(msg) - } else if hm.author != repository.Author { - launchr.Log().Warn(fmt.Sprintf("Latest commit of %s is not a bump commit", r.GetName())) - } - - if _, ok := commitsMap[resourceVersion]; !ok { - launchr.Log().Warn(fmt.Sprintf("Latest version of `%s` doesn't match any existing commit", r.GetName())) - } - - tri := sync.NewTimelineResourcesItem(resourceVersion, hm.hash, hm.hashTime) - tri.AddResource(r) - - s.timeline = sync.AddToTimeline(s.timeline, tri) - } - - return nil -} - -func (s *SyncAction) populateTimelineVars() error { - filesCrawler := sync.NewFilesCrawler(s.domainDir) - groupedFiles, err := filesCrawler.FindVarsFiles("") - if err != nil { - return err - } - - var varsFiles []string - for _, paths := range groupedFiles { - varsFiles = append(varsFiles, paths...) - } - - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - - var wg async.WaitGroup - var mx async.Mutex - - maxWorkers := min(runtime.NumCPU(), len(varsFiles)) - workChan := make(chan string, len(varsFiles)) - errorChan := make(chan error, 1) - - var p *pterm.ProgressbarPrinter - if s.showProgress { - p, _ = pterm.DefaultProgressbar.WithTotal(len(varsFiles)).WithTitle("Processing variables files").Start() - } - - for i := 0; i < maxWorkers; i++ { - go func(workerID int) { - for { - select { - case <-ctx.Done(): - return - case varsFile, ok := <-workChan: - if !ok { - return - } - if err = s.findVariableUpdateTime(varsFile, s.domainDir, &mx, p); err != nil { - select { - case errorChan <- fmt.Errorf("worker %d error processing %s: %w", workerID, varsFile, err): - cancel() - default: - } - return - } - wg.Done() - } - } - }(i) - } - - for _, f := range varsFiles { - wg.Add(1) - workChan <- f - } - close(workChan) - - go func() { - wg.Wait() - close(errorChan) - }() - - for err = range errorChan { - if err != nil { - return err - } - } - - return nil -} - -func (s *SyncAction) findVariableUpdateTime(varsFile string, gitPath string, mx *async.Mutex, p *pterm.ProgressbarPrinter) error { - repo, err := git.PlainOpen(gitPath) - if err != nil { - return fmt.Errorf("%s - %w", gitPath, err) - } - - ref, err := repo.Head() - if err != nil { - return err - } - - var varsYaml map[string]any - hashesMap := make(map[string]*hashStruct) - variablesMap := sync.NewOrderedMap[*sync.Variable]() - isVault := sync.IsVaultFile(varsFile) - - varsYaml, err = sync.LoadVariablesFile(filepath.Join(s.buildDir, varsFile), s.vaultPass, isVault) - if err != nil { - return err - } - - for k, value := range varsYaml { - v := sync.NewVariable(varsFile, k, HashString(fmt.Sprint(value)), isVault) - variablesMap.Set(k, v) - - if _, ok := hashesMap[k]; !ok { - hashesMap[k] = &hashStruct{} - } - - hashesMap[k].hash = fmt.Sprint(v.GetHash()) - hashesMap[k].hashTime = time.Now() - hashesMap[k].author = buildHackAuthor - } - - toIterate := variablesMap.ToDict() - - cIter, err := repo.Log(&git.LogOptions{From: ref.Hash()}) - if err != nil { - return err - } - - remainingDebug := len(toIterate) - err = cIter.ForEach(func(c *object.Commit) error { - if len(toIterate) == 0 { - return storer.ErrStop - } - - if len(toIterate) != remainingDebug { - remainingDebug = len(toIterate) - launchr.Log().Debug(fmt.Sprintf("Remaining unidentified variables, %s - %d", varsFile, remainingDebug)) - } - - file, errIt := c.File(varsFile) - if errIt != nil { - if !errors.Is(errIt, object.ErrFileNotFound) { - return fmt.Errorf("open file %s in commit %s > %w", varsFile, c.Hash, errIt) - } - - return storer.ErrStop - } - - varFile, errIt := s.loadVariablesFileFromBytes(file, varsFile, isVault) - if errIt != nil { - if strings.Contains(errIt.Error(), "did not find expected key") || strings.Contains(errIt.Error(), "could not find expected") { - launchr.Log().Warn("Bad YAML structured detected", - slog.String("file", varsFile), - slog.String("commit", c.Hash.String()), - slog.String("error", errIt.Error()), - ) - - return nil - } - - if strings.Contains(errIt.Error(), "invalid password for vault") { - launchr.Log().Warn("Invalid password for vault", - slog.String("file", varsFile), - slog.String("commit", c.Hash.String()), - ) - - return storer.ErrStop - } - - if strings.Contains(errIt.Error(), "invalid secret format") { - launchr.Log().Warn("invalid secret format for vault", - slog.String("file", varsFile), - slog.String("commit", c.Hash.String()), - ) - return nil - } - - return fmt.Errorf("commit %s > %w", c.Hash, errIt) - } - - for k, hh := range toIterate { - prevVar, exists := varFile[k] - if !exists { - // Variable didn't exist before, take current hash as version - delete(toIterate, k) - continue - } - - prevVarHash := HashString(fmt.Sprint(prevVar)) - if hh.GetHash() != prevVarHash { - // Variable exists, hashes don't match, stop iterating - delete(toIterate, k) - continue - } - - hashesMap[k].hash = c.Hash.String() - hashesMap[k].hashTime = c.Author.When - hashesMap[k].author = c.Author.Name - } - - return nil - }) - - if err != nil { - return err - } - - mx.Lock() - defer mx.Unlock() - - for n, hm := range hashesMap { - v, _ := variablesMap.Get(n) - version := hm.hash[:13] - launchr.Log().Debug("add variable to timeline", - slog.String("variable", v.GetName()), - slog.String("version", version), - slog.Time("date", hm.hashTime), - slog.String("path", v.GetPath()), - ) - - if hm.author == buildHackAuthor { - msg := fmt.Sprintf("Value of `%s` doesn't match HEAD commit", n) - if !s.allowOverride { - if p != nil { - p.Stop() //nolint - } - - return errors.New(msg) - } - - launchr.Log().Warn(msg) - } - - tri := sync.NewTimelineVariablesItem(version, hm.hash, hm.hashTime) - tri.AddVariable(v) - - s.timeline = sync.AddToTimeline(s.timeline, tri) - } - - if p != nil { - p.Increment() - } - - return err -} - func (s *SyncAction) buildPropagationMap(buildInv *sync.Inventory, timeline []sync.TimelineItem) (*sync.OrderedMap[*sync.Resource], map[string]string, error) { resourceVersionMap := make(map[string]string) toPropagate := sync.NewOrderedMap[*sync.Resource]() resourcesMap := buildInv.GetResourcesMap() + processed := make(map[string]bool) + sync.SortTimeline(timeline, sync.SortDesc) + + usedResources := make(map[string]bool) + if s.filterByResourceUsage { + usedResources = buildInv.GetUsedResources() + } - sync.SortTimeline(timeline) launchr.Log().Info("Iterating timeline") for _, item := range timeline { + dependenciesLog := sync.NewOrderedMap[bool]() + switch i := item.(type) { case *sync.TimelineResourcesItem: resources := i.GetResources() resources.SortKeysAlphabetically() - dependenciesLog := sync.NewOrderedMap[bool]() - + var toProcess []string for _, key := range resources.Keys() { + if processed[key] { + continue + } + toProcess = append(toProcess, key) + } + + if len(toProcess) == 0 { + continue + } + + for _, key := range toProcess { r, ok := resources.Get(key) if !ok { return nil, nil, fmt.Errorf("unknown key %s detected during timeline iteration", key) } + processed[key] = true + dependentResources := buildInv.GetRequiredByResources(r.GetName(), -1) + if s.filterByResourceUsage { + for dr := range dependentResources { + if _, okU := usedResources[dr]; !okU { + delete(dependentResources, dr) + } + } + } + for dep := range dependentResources { depResource, okR := resourcesMap.Get(dep) if !okR { continue } + // Skip resource if it was processed by previous timeline item or previous resource (via deps). + if processed[dep] { + continue + } + + processed[dep] = true + toPropagate.Set(dep, depResource) resourceVersionMap[dep] = i.GetVersion() @@ -860,18 +412,20 @@ func (s *SyncAction) buildPropagationMap(buildInv *sync.Inventory, timeline []sy } } - for _, key := range resources.Keys() { + for _, key := range toProcess { // Ensure new version removes previous propagation for that resource. toPropagate.Unset(key) delete(resourceVersionMap, key) } - launchr.Log().Debug("timeline item (resources)", - slog.String("version", item.GetVersion()), - slog.Time("date", item.GetDate()), - slog.String("resources", fmt.Sprintf("%v", resources.Keys())), - slog.String("dependencies", fmt.Sprintf("%v", dependenciesLog.Keys())), - ) + if dependenciesLog.Len() > 0 { + launchr.Log().Debug("timeline item (resources)", + slog.String("version", item.GetVersion()), + slog.Time("date", item.GetDate()), + slog.String("resources", fmt.Sprintf("%v", toProcess)), + slog.String("dependencies", fmt.Sprintf("%v", dependenciesLog.Keys())), + ) + } case *sync.TimelineVariablesItem: variables := i.GetVariables() @@ -880,28 +434,47 @@ func (s *SyncAction) buildPropagationMap(buildInv *sync.Inventory, timeline []sy var resources []string for _, v := range variables.Keys() { variable, _ := variables.Get(v) - vr, err := buildInv.GetVariableResources(variable.GetName(), variable.GetPlatform()) - if err != nil { - return nil, nil, err + vr := buildInv.GetVariableResources(variable.GetName(), variable.GetPlatform()) + + if len(usedResources) == 0 { + resources = append(resources, vr...) + } else { + var usedVr []string + for _, r := range vr { + if _, ok := usedResources[r]; ok { + usedVr = append(usedVr, r) + } + } + resources = append(resources, usedVr...) } - resources = append(resources, vr...) } slices.Sort(resources) resources = slices.Compact(resources) - dependenciesLog := sync.NewOrderedMap[bool]() + var toProcess []string + for _, key := range resources { + if processed[key] { + continue + } + toProcess = append(toProcess, key) + } + + if len(toProcess) == 0 { + continue + } - for _, r := range resources { + for _, r := range toProcess { // First set version for main resource. mainResource, okM := resourcesMap.Get(r) if !okM { launchr.Log().Warn(fmt.Sprintf("skipping not valid resource %s (direct vars dependency)", r)) continue } + + processed[r] = true toPropagate.Set(r, mainResource) resourceVersionMap[r] = i.GetVersion() - dependenciesLog.Set(r, true) // Set versions for dependent resources. @@ -913,6 +486,13 @@ func (s *SyncAction) buildPropagationMap(buildInv *sync.Inventory, timeline []sy continue } + // Skip resource if it was processed by previous timeline item or previous resource (via deps). + if processed[dep] { + continue + } + + processed[dep] = true + toPropagate.Set(dep, depResource) resourceVersionMap[dep] = i.GetVersion() @@ -920,12 +500,14 @@ func (s *SyncAction) buildPropagationMap(buildInv *sync.Inventory, timeline []sy } } - launchr.Log().Debug("timeline item (variables)", - slog.String("version", item.GetVersion()), - slog.Time("date", item.GetDate()), - slog.String("variables", fmt.Sprintf("%v", variables.Keys())), - slog.String("resources", fmt.Sprintf("%v", dependenciesLog.Keys())), - ) + if dependenciesLog.Len() > 0 { + launchr.Log().Debug("timeline item (variables)", + slog.String("version", item.GetVersion()), + slog.Time("date", item.GetDate()), + slog.String("variables", fmt.Sprintf("%v", variables.Keys())), + slog.String("resources", fmt.Sprintf("%v", dependenciesLog.Keys())), + ) + } } } @@ -979,13 +561,29 @@ func (s *SyncAction) updateResources(resourceVersionMap map[string]string, toPro sort.Strings(sortList) launchr.Log().Info("Propagating versions") - var p *pterm.ProgressbarPrinter + //var p *pterm.ProgressbarPrinter + var p *mpb.Progress + var b *mpb.Bar if s.showProgress { - p, _ = pterm.DefaultProgressbar.WithTotal(len(sortList)).WithTitle("Updating resources").Start() + //p, _ = pterm.DefaultProgressbar.WithTotal(len(sortList)).WithTitle("Updating resources").Start() + p = mpb.New(mpb.WithWidth(64)) + b = p.New(int64(len(sortList)), + //mpb.BarStyle().Lbound("╢").Filler("▌").Tip("▌").Padding("░").Rbound("╟"), + mpb.BarStyle(), + mpb.PrependDecorators( + decor.Name("Updating resources:", decor.WC{C: decor.DindentRight | decor.DextraSpace}), + //decor.OnComplete(decor.AverageETA(decor.ET_STYLE_GO), "done in "), + decor.OnComplete(decor.Name("processing... "), "done in "), + mpbTimeDecorator(time.Now()), + decor.CountersNoUnit(" [%d/%d] "), + ), + mpb.AppendDecorators(decor.Percentage()), + ) } for _, key := range sortList { - if p != nil { - p.Increment() + if b != nil { + //p.Increment() + b.Increment() } val := updateMap[key] @@ -998,6 +596,8 @@ func (s *SyncAction) updateResources(resourceVersionMap map[string]string, toPro } launchr.Log().Info(fmt.Sprintf("%s from %s to %s", r.GetName(), currentVersion, newVersion)) + + //time.Sleep(100 * time.Millisecond) if s.dryRun { continue } @@ -1007,46 +607,11 @@ func (s *SyncAction) updateResources(resourceVersionMap map[string]string, toPro return err } } - - return nil -} - -func (s *SyncAction) loadYamlFileFromBytes(file *object.File, path string) (map[string]any, error) { - reader, errIt := file.Blob.Reader() - if errIt != nil { - return nil, fmt.Errorf("can't read %s > %w", path, errIt) - } - - contents, errIt := io.ReadAll(reader) - if errIt != nil { - return nil, fmt.Errorf("can't read %s > %w", path, errIt) - } - - yamlFile, errIt := sync.LoadYamlFileFromBytes(contents) - if errIt != nil { - return nil, fmt.Errorf("YAML load %s > %w", path, errIt) - } - - return yamlFile, nil -} - -func (s *SyncAction) loadVariablesFileFromBytes(file *object.File, path string, isVault bool) (map[string]any, error) { - reader, errIt := file.Blob.Reader() - if errIt != nil { - return nil, fmt.Errorf("can't read %s > %w", path, errIt) - } - - contents, errIt := io.ReadAll(reader) - if errIt != nil { - return nil, fmt.Errorf("can't read %s > %w", path, errIt) - } - - varFile, errIt := sync.LoadVariablesFileFromBytes(contents, s.vaultPass, isVault) - if errIt != nil { - return nil, fmt.Errorf("YAML load %s > %w", path, errIt) + if p != nil { + p.Wait() } - return varFile, nil + return nil } func composeVersion(oldVersion string, newVersion string) string { @@ -1078,120 +643,6 @@ func getResourcesMapFrom(dir string) (*sync.OrderedMap[*sync.Resource], error) { return rm, nil } -// HashString is wrapper for hashing string. -func HashString(item string) uint64 { - return xxhash.Sum64String(item) +func mpbTimeDecorator(t time.Time, wcc ...decor.WC) decor.Decorator { + return decor.Any(func(decor.Statistics) string { return fmt.Sprintf("%.2fs", math.RoundToEven(time.Since(t).Seconds())) }, wcc...) } - -//func (s *SyncAction) ensureResourceIsVersioned(resourceVersion, resourceMetaPath string, repo *git.Repository) (*plumbing.Reference, error) { -// ref, err := repo.Head() -// if err != nil { -// return nil, err -// } -// -// headCommit, err := repo.CommitObject(ref.Hash()) -// if err != nil { -// return nil, err -// } -// headMeta, err := headCommit.File(resourceMetaPath) -// if err != nil { -// return nil, fmt.Errorf("meta %s doesn't exist in HEAD commit", resourceMetaPath) -// } -// -// metaFile, err := s.loadYamlFileFromBytes(headMeta, resourceMetaPath) -// if err != nil { -// return nil, fmt.Errorf("%w", err) -// } -// -// headVersion := sync.GetMetaVersion(metaFile) -// if resourceVersion != headVersion { -// return nil, fmt.Errorf("version from %s doesn't match any existing commit", resourceMetaPath) -// } -// -// return ref, nil -//} - -//func (s *SyncAction) ensureResourceNonVersioned(mrn string, repo *git.Repository) error { -// resourcePath, err := sync.ConvertMRNtoPath(mrn) -// if err != nil { -// return err -// } -// -// buildPath := filepath.Join(s.buildDir, resourcePath) -// resourceFiles, err := sync.GetFiles(buildPath, []string{}) -// if err != nil { -// return err -// } -// -// ref, err := repo.Head() -// if err != nil { -// return err -// } -// -// headCommit, err := repo.CommitObject(ref.Hash()) -// if err != nil { -// return err -// } -// -// for f := range resourceFiles { -// buildHash, err := sync.HashFileByPath(filepath.Join(buildPath, f)) -// if err != nil { -// return err -// } -// -// launchr.Term().Warning().Printfln(filepath.Join(resourcePath, f)) -// headFile, err := headCommit.File(filepath.Join(resourcePath, f)) -// if err != nil { -// return err -// } -// -// reader, err := headFile.Blob.Reader() -// if err != nil { -// return err -// } -// -// headHash, err := sync.HashFileFromReader(reader) -// if err != nil { -// return err -// } -// -// if buildHash != headHash { -// return fmt.Errorf("resource %s has unversioned changes. You need to commit these changes", mrn) -// } -// } -// -// return nil -//} - -//func (s *SyncAction) ensureVariableIsVersioned(variable *sync.Variable, repo *git.Repository) (*plumbing.Reference, error) { -// ref, err := repo.Head() -// if err != nil { -// return nil, err -// } -// -// headCommit, err := repo.CommitObject(ref.Hash()) -// if err != nil { -// return nil, err -// } -// headVarsFile, err := headCommit.File(variable.GetPath()) -// if err != nil { -// return nil, fmt.Errorf("file %s doesn't exist in HEAD", variable.GetPath()) -// } -// -// varFile, errIt := s.loadVariablesFileFromBytes(headVarsFile, variable.GetPath(), variable.IsVault()) -// if errIt != nil { -// return nil, fmt.Errorf("%w", errIt) -// } -// -// headVar, exists := varFile[variable.GetName()] -// if !exists { -// return nil, fmt.Errorf("variable from %s doesn't exist in HEAD", variable.GetPath()) -// } -// -// headVarHash := sync.HashString(fmt.Sprint(headVar)) -// if variable.GetHash() != headVarHash { -// return nil, fmt.Errorf("variable from %s is an unversioned change. You need to commit variable change", variable.GetPath()) -// } -// -// return ref, nil -//} diff --git a/actionSync.resources.go b/actionSync.resources.go new file mode 100644 index 0000000..657c9f1 --- /dev/null +++ b/actionSync.resources.go @@ -0,0 +1,704 @@ +package plasmactlbump + +import ( + "context" + "errors" + "fmt" + "github.com/vbauerster/mpb/v8" + "github.com/vbauerster/mpb/v8/decor" + "io" + "log/slog" + "runtime" + async "sync" + "time" + + "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/plumbing" + "github.com/go-git/go-git/v5/plumbing/object" + "github.com/go-git/go-git/v5/plumbing/storer" + "github.com/launchrctl/launchr" + "github.com/skilld-labs/plasmactl-bump/v2/pkg/repository" + "github.com/skilld-labs/plasmactl-bump/v2/pkg/sync" +) + +var errRunBruteProcess = fmt.Errorf("run brute") + +const ( + headGroupName = "head" +) + +// CommitsGroup is simple struct that contains list of commits under some group. Group has name, date and parent commit. +type CommitsGroup struct { + name string + commit string + items []string + date time.Time +} + +func (s *SyncAction) populateTimelineResources(resources map[string]*sync.OrderedMap[*sync.Resource], packagePathMap map[string]string) error { + var wg async.WaitGroup + var mx async.Mutex + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + errorChan := make(chan error, 1) + maxWorkers := min(runtime.NumCPU(), len(packagePathMap)) + workChan := make(chan map[string]any, len(packagePathMap)) + + //multi := pterm.DefaultMultiPrinter + p := mpb.New( + mpb.WithWaitGroup(&wg), + mpb.WithWidth(64), + ) + + for i := 0; i < maxWorkers; i++ { + go func(workerID int) { + for { + select { + case <-ctx.Done(): + return + case domain, ok := <-workChan: + if !ok { + return + } + + name := domain["name"].(string) + path := domain["path"].(string) + //pb := domain["pb"].(*pterm.ProgressbarPrinter) + pb := domain["pb"].(*mpb.Bar) + + if err := s.findResourcesChangeTime(ctx, resources[name], path, &mx, pb); err != nil { + select { + case errorChan <- fmt.Errorf("worker %d error processing %s: %w", workerID, name, err): + cancel() + default: + } + return + } + wg.Done() + } + } + }(i) + } + + for name, path := range packagePathMap { + if resources[name].Len() == 0 { + // Skipping packages with 0 composed resources. + continue + } + + wg.Add(1) + + //var p *pterm.ProgressbarPrinter + var b *mpb.Bar + //var err error + if s.showProgress { + //p, err = pterm.DefaultProgressbar.WithTotal(resources[name].Len()).WithWriter(multi.NewWriter()).Start(fmt.Sprintf("Collecting resources from %s", name)) + //if err != nil { + // return err + //} + + b = p.AddBar(int64(resources[name].Len()), + mpb.PrependDecorators( + // simple name decorator + decor.Name(fmt.Sprintf("Collecting resources from %s", name)), + // decor.DSyncWidth bit enables column width synchronization + decor.Percentage(decor.WCSyncSpace), + ), + mpb.AppendDecorators( + // replace ETA decorator with "done" message, OnComplete event + decor.OnComplete( + // ETA decorator with ewma age of 30 + decor.EwmaETA(decor.ET_STYLE_GO, 30, decor.WCSyncWidth), "done", + ), + ), + ) + } + + workChan <- map[string]any{"name": name, "path": path, "pb": b} + } + close(workChan) + go func() { + if s.showProgress { + //_, err := multi.Start() + //if err != nil { + // errorChan <- fmt.Errorf("error starting multi progress bar: %w", err) + //} + p.Wait() + } else { + wg.Wait() + } + + //wg.Wait() + close(errorChan) + }() + + for err := range errorChan { + if err != nil { + return err + } + } + + // Sleep to re-render progress bar. Needed to achieve latest state. + //if s.showProgress { + // time.Sleep(multi.UpdateDelay) + // _, _ = multi.Stop() + //} + + return nil +} + +func collectResourcesCommits(r *git.Repository, beforeDate string) (*sync.OrderedMap[*CommitsGroup], map[string]map[string]string, error) { + ref, err := r.Head() + if err != nil { + return nil, nil, fmt.Errorf("can't get HEAD ref > %w", err) + } + + hashes := make(map[string]map[string]string) + var commits []string + var section string + var sectionName string + var sectionDate time.Time + + // start from the latest commit and iterate to the past + cIter, err := r.Log(&git.LogOptions{From: ref.Hash()}) + if err != nil { + return nil, nil, fmt.Errorf("git log error > %w", err) + } + + var before time.Time + + if beforeDate != "" { + before, err = time.Parse(time.DateOnly, beforeDate) + if err != nil { + return nil, nil, fmt.Errorf("can't parse date %s, format should be %s > %w", beforeDate, time.DateOnly, err) + } + } + + groups := sync.NewOrderedMap[*CommitsGroup]() + + _ = cIter.ForEach(func(c *object.Commit) error { + if c.Author.When.Before(before) { + return storer.ErrStop + } + + hash := c.Hash.String() + hash = hash[:13] + if _, ok := hashes[hash]; !ok { + hashes[hash] = make(map[string]string) + hashes[hash]["original"] = c.Hash.String() + hashes[hash]["section"] = "" + } else { + panic(fmt.Sprintf("dupicate version hash has been met %s during commits iteration", hash)) + } + + if ref.Hash() == c.Hash { + commits = []string{} + sectionDate = c.Author.When + if c.Author.Name == repository.Author { + section = c.Hash.String() + sectionName = section + hashes[hash]["section"] = sectionName + } else { + section = ref.Hash().String() + sectionName = headGroupName + hashes[hash]["section"] = sectionName + commits = append(commits, c.Hash.String()) + } + + return nil + } + + // create new group when bump commits appears and store previous one. + if c.Author.Name == repository.Author { + group := &CommitsGroup{ + name: sectionName, + commit: section, + date: sectionDate, + items: commits, + } + + groups.Set(section, group) + + section = c.Hash.String() + sectionName = c.Hash.String() + sectionDate = c.Author.When + commits = []string{} + } else { + hashes[hash]["section"] = section + commits = append(commits, c.Hash.String()) + } + + return nil + }) + + if _, ok := groups.Get(section); !ok { + group := &CommitsGroup{ + name: sectionName, + commit: section, + date: sectionDate, + items: commits, + } + + groups.Set(section, group) + } + + return groups, hashes, nil +} + +func (s *SyncAction) findResourcesChangeTime(ctx context.Context, namespaceResources *sync.OrderedMap[*sync.Resource], gitPath string, mx *async.Mutex, p *mpb.Bar) error { + repo, err := git.PlainOpen(gitPath) + if err != nil { + return fmt.Errorf("%s - %w", gitPath, err) + } + + groups, commitsMap, err := collectResourcesCommits(repo, s.commitsAfter) + if err != nil { + return fmt.Errorf("collect resources commits > %w", err) + } + + var wg async.WaitGroup + errorChan := make(chan error, 1) + //maxWorkers := 3 + maxWorkers := runtime.NumCPU() + resourcesChan := make(chan *sync.Resource, namespaceResources.Len()) + + for w := 0; w < maxWorkers; w++ { + go func() { + for { + select { + case <-ctx.Done(): + return + case r, ok := <-resourcesChan: + if !ok { + return + } + if err = s.processResource(r, groups, commitsMap, repo, gitPath, mx); err != nil { + if p != nil { + p.Abort(true) + // //_, _ = p.Stop() + } + + select { + case errorChan <- err: + default: + } + } + if p != nil { + p.Increment() + } + + wg.Done() + } + } + }() + } + + for _, k := range namespaceResources.Keys() { + r, ok := namespaceResources.Get(k) + if !ok { + continue + } + + wg.Add(1) + resourcesChan <- r + } + close(resourcesChan) + + go func() { + wg.Wait() + close(errorChan) + }() + + for err = range errorChan { + if err != nil { + return err + } + } + + return nil +} + +func (s *SyncAction) processResource(resource *sync.Resource, commitsGroups *sync.OrderedMap[*CommitsGroup], commitsMap map[string]map[string]string, _ *git.Repository, gitPath string, mx *async.Mutex) error { + repo, err := git.PlainOpen(gitPath) + if err != nil { + return fmt.Errorf("%s - %w", gitPath, err) + } + + buildResource := sync.NewResource(resource.GetName(), s.buildDir) + currentVersion, err := buildResource.GetVersion() + if err != nil { + return err + } + + versionHash := &hashStruct{ + hash: buildHackAuthor, + hashTime: time.Now(), + author: buildHackAuthor, + } + + head, err := repo.Head() + if err != nil { + return fmt.Errorf("can't get HEAD ref > %w", err) + } + + headCommit, err := repo.CommitObject(head.Hash()) + if err != nil { + return fmt.Errorf("can't get HEAD commit object > %w", err) + } + + resourceMetaPath := resource.BuildMetaPath() + + file, err := headCommit.File(resourceMetaPath) + if err != nil { + return fmt.Errorf("opening file %s in commit %s > %w", resourceMetaPath, headCommit.Hash, err) + } + + metaFile, err := loadYamlFileFromBytes(file, resourceMetaPath) + if err != nil { + return fmt.Errorf("YAML load commit %s > %w", headCommit.Hash, err) + } + + currentMetaHash := file.Hash.String() + headVersion := sync.GetMetaVersion(metaFile) + + // Ensure actual version and head versions match. + // If actual version doesn't match head commit. Ensure override is allowed. + // If override is not allowed, return error. + // In other case add new timeline item with overridden version. + + overridden := false + if currentVersion != headVersion { + msg := fmt.Sprintf("Version of `%s` doesn't match HEAD commit", resource.GetName()) + if !s.allowOverride { + return errors.New(msg) + } + + launchr.Log().Warn(msg) + overridden = true + } else { + versionHash.hash = headCommit.Hash.String() + versionHash.hashTime = headCommit.Author.When + versionHash.author = headCommit.Author.Name + } + + if !overridden { + // @todo rewrite to concurrent map ? + //mx.Lock() + item, ok := commitsMap[currentVersion] + //mx.Unlock() + if !ok { + launchr.Log().Warn(fmt.Sprintf("Latest version of `%s` doesn't match any existing commit", resource.GetName())) + } + + var commit *object.Commit + var errProcess error + + if len(item) == 0 { + commit, errProcess = s.processUnknownSection(commitsGroups, resourceMetaPath, currentVersion, repo, currentMetaHash) + } else { + group, okSection := commitsGroups.Get(item["section"]) + if !okSection { + panic(fmt.Sprintf("Requested group %s doesn't exist", item["section"])) + } + + commit, errProcess = s.processBumpSection(group, resourceMetaPath, currentVersion, repo, currentMetaHash) + } + + if errors.Is(errProcess, errRunBruteProcess) { + commit, errProcess = s.processAllSections(commitsGroups, resourceMetaPath, currentVersion, repo, currentMetaHash) + } + + if errProcess != nil { + return errProcess + } + + if commit == nil { + return fmt.Errorf("couldn't find version commit for %s", resource.GetName()) + } + + versionHash.hash = commit.Hash.String() + versionHash.hashTime = commit.Author.When + versionHash.author = commit.Author.Name + } + + mx.Lock() + defer mx.Unlock() + + launchr.Log().Debug("add resource to timeline", + slog.String("mrn", resource.GetName()), + slog.String("commit", versionHash.hash), + slog.String("version", currentVersion), + slog.Time("date", versionHash.hashTime), + ) + + if versionHash.author != repository.Author && versionHash.author != buildHackAuthor { + launchr.Log().Warn(fmt.Sprintf("Latest commit of %s is not a bump commit", resource.GetName())) + } + + tri := sync.NewTimelineResourcesItem(currentVersion, versionHash.hash, versionHash.hashTime) + tri.AddResource(resource) + + s.timeline = sync.AddToTimeline(s.timeline, tri) + + return nil +} + +func (s *SyncAction) processAllSections(commitsGroups *sync.OrderedMap[*CommitsGroup], resourceMetaPath, currentVersion string, repo *git.Repository, originalHash string) (*object.Commit, error) { + keys := commitsGroups.Keys() + for i := commitsGroups.Len() - 1; i >= 0; i-- { + group, _ := commitsGroups.Get(keys[i]) + sectionCommit, errGr := repo.CommitObject(plumbing.NewHash(group.commit)) + if errGr != nil { + return nil, fmt.Errorf("can't get group commit object %s > %w", group.commit, errGr) + } + + var commitWeNeed *object.Commit + var fileHash string + + if group.name == headGroupName { + // Well, if we are in head, it's the final line of defense. + fileHash = originalHash + commitWeNeed = sectionCommit + } else { + sectionMetaHash, sectionMetaFile, err := getFileHashFromCommit(sectionCommit, resourceMetaPath) + if err != nil { + // Iterate until we find group which contains resource with current version. + if errors.Is(err, object.ErrFileNotFound) { + continue + } + + return nil, fmt.Errorf("can't hash meta file from commit %s - %w", group.commit, err) + } + + sectionMetaYaml, err := loadYamlFileFromBytes(sectionMetaFile, resourceMetaPath) + if err != nil { + return nil, fmt.Errorf("YAML load group commit %s > %w", group.commit, err) + } + + sectionVersion := sync.GetMetaVersion(sectionMetaYaml) + if sectionVersion != currentVersion { + continue + } + + commitWeNeed = sectionCommit + fileHash = sectionMetaHash + } + + for _, item := range group.items { + itemCommit, errItm := repo.CommitObject(plumbing.NewHash(item)) + if errItm != nil { + return nil, errItm + } + + itemMetaHash, itemMetaFile, errItm := getFileHashFromCommit(itemCommit, resourceMetaPath) + if errItm != nil { + // Files don't exist, it means they were created in previous commit. + if errors.Is(errItm, object.ErrFileNotFound) { + break + } + + return nil, fmt.Errorf("can't hash meta file from commit %s > %w", itemCommit.Hash.String(), errItm) + } + + if fileHash == itemMetaHash { + commitWeNeed = itemCommit + continue + } + + itemMetaYaml, errItm := loadYamlFileFromBytes(itemMetaFile, resourceMetaPath) + if errItm != nil { + return nil, fmt.Errorf("YAML load item commit %s > %w", itemCommit.Hash, errItm) + } + + prevVer := sync.GetMetaVersion(itemMetaYaml) + if prevVer != currentVersion { + break + } + + fileHash = itemMetaHash + commitWeNeed = itemCommit + } + + return commitWeNeed, nil + } + + return nil, nil +} + +func (s *SyncAction) processUnknownSection(commitsGroups *sync.OrderedMap[*CommitsGroup], resourceMetaPath, currentVersion string, repo *git.Repository, originalHash string) (*object.Commit, error) { + keys := commitsGroups.Keys() + for i := commitsGroups.Len() - 1; i >= 0; i-- { + group, _ := commitsGroups.Get(keys[i]) + + if group.name == headGroupName { + // Well, you should have bumped your results, because we can't be sure that version was actually set in + // head. + // i.e. someone updated meta file (changed author), didn't bump, but version came from previous bump and in + // this function first comparison done by file hash. + return nil, errRunBruteProcess + } + sectionCommit, err := repo.CommitObject(plumbing.NewHash(group.commit)) + if err != nil { + return nil, fmt.Errorf("can't get group commit object %s > %w", group.commit, err) + } + + sectionMetaHash, _, err := getFileHashFromCommit(sectionCommit, resourceMetaPath) + if err != nil { + if errors.Is(err, object.ErrFileNotFound) { + continue + } + + return nil, fmt.Errorf("can't hash meta file from commit %s - %w", group.commit, err) + } + + if originalHash != sectionMetaHash { + continue + } + + if len(group.items) == 0 { + // Something wrong with process in this case. It's not possible to have version from head commits group. + // Either someone can predict future or git history was manipulated. Send to manual search in this case. + return nil, errRunBruteProcess + } + + item := group.items[0] + itemCommit, errItem := repo.CommitObject(plumbing.NewHash(item)) + if errItem != nil { + return nil, fmt.Errorf("can't get item commit object %s > %w", itemCommit.Hash.String(), errItem) + } + + itemMetaHash, itemMetaFile, errItem := getFileHashFromCommit(itemCommit, resourceMetaPath) + if errItem != nil { + // How it's possible to not have meta file in commit before bump ? + // @todo case looks impossible, maybe makes sense to panic here + if errors.Is(err, object.ErrFileNotFound) { + return nil, errRunBruteProcess + } + + return nil, fmt.Errorf("can't hash meta file from commit %s > %w", itemCommit.Hash.String(), err) + } + + // Hashes don't match, as expected + if originalHash != itemMetaHash { + // Ensure real version is different + itemMetaYaml, errMeta := loadYamlFileFromBytes(itemMetaFile, resourceMetaPath) + if errMeta != nil { + return nil, fmt.Errorf("YAML load item commit %s > %w", itemCommit.Hash, errMeta) + } + + itemVer := sync.GetMetaVersion(itemMetaYaml) + + // Version match when shouldn't + if itemVer == currentVersion { + return nil, errRunBruteProcess + } + } else { + // File hashes match when shouldn't + return nil, errRunBruteProcess + } + + return sectionCommit, nil + } + + return nil, nil +} + +func (s *SyncAction) processBumpSection(group *CommitsGroup, resourceMetaPath, currentVersion string, repo *git.Repository, originalHash string) (*object.Commit, error) { + if group.name == headGroupName || len(group.items) == 0 { + // Something wrong with process in this case. It's not possible to have version from head commits group. + // Either someone can predict future or git history was manipulated. Send to manual search in this case. + //panic(fmt.Sprintf("zero section items: %s %s", group.name, group.date)) + return nil, errRunBruteProcess + } + + // Ensure bump commit has the same file hash + sectionCommit, err := repo.CommitObject(plumbing.NewHash(group.commit)) + if err != nil { + return nil, fmt.Errorf("can't get group commit object %s > %w", group.commit, err) + } + + sectionMetaHash, _, err := getFileHashFromCommit(sectionCommit, resourceMetaPath) + if err != nil { + // 'Bad' resource version was used and assigned to group. Requires manual search. + if errors.Is(err, object.ErrFileNotFound) { + return nil, errRunBruteProcess + } + + return nil, fmt.Errorf("can't hash meta file from commit %s > %w", group.commit, err) + } + + if originalHash != sectionMetaHash { + // 'Bad' resource version was used and assigned to group, but file exists. Requires manual search. + return nil, errRunBruteProcess + } + + // Ensure version from next item commit is different from bump commit. + item := group.items[0] + itemCommit, errItem := repo.CommitObject(plumbing.NewHash(item)) + if errItem != nil { + return nil, fmt.Errorf("can't get item commit object %s > %w", itemCommit.Hash.String(), errItem) + } + + itemMetaHash, itemMetaFile, errItem := getFileHashFromCommit(itemCommit, resourceMetaPath) + if errItem != nil { + // How it's possible to not have meta file in commit before bump ? + // @todo case looks impossible, maybe makes sense to panic here + if errors.Is(err, object.ErrFileNotFound) { + return nil, errRunBruteProcess + } + + return nil, fmt.Errorf("can't hash meta file from commit %s - %w", itemCommit.Hash.String(), err) + } + + // Hashes don't match, as expected + if originalHash != itemMetaHash { + // ensure real version is different + itemMetaYaml, errMeta := loadYamlFileFromBytes(itemMetaFile, resourceMetaPath) + if errMeta != nil { + return nil, fmt.Errorf("YAML load item commit %s > %w", itemCommit.Hash.String(), errMeta) + } + + itemVersion := sync.GetMetaVersion(itemMetaYaml) + // Version match when shouldn't + if itemVersion == currentVersion { + return nil, errRunBruteProcess + } + } else { + // File hashes match when shouldn't + return nil, errRunBruteProcess + } + + return sectionCommit, nil +} + +func getFileHashFromCommit(c *object.Commit, path string) (string, *object.File, error) { + file, err := c.File(path) + if err != nil { + return "", nil, err + } + + hash := file.Hash.String() + + return hash, file, err +} + +func loadYamlFileFromBytes(file *object.File, path string) (map[string]any, error) { + reader, errIt := file.Blob.Reader() + if errIt != nil { + return nil, fmt.Errorf("can't read %s > %w", path, errIt) + } + + contents, errIt := io.ReadAll(reader) + if errIt != nil { + return nil, fmt.Errorf("can't read %s > %w", path, errIt) + } + + yamlFile, errIt := sync.LoadYamlFileFromBytes(contents) + if errIt != nil { + return nil, fmt.Errorf("YAML load %s > %w", path, errIt) + } + + return yamlFile, nil +} diff --git a/actionSync.variables.go b/actionSync.variables.go new file mode 100644 index 0000000..8b4c220 --- /dev/null +++ b/actionSync.variables.go @@ -0,0 +1,346 @@ +package plasmactlbump + +import ( + "context" + "errors" + "fmt" + "github.com/vbauerster/mpb/v8" + "github.com/vbauerster/mpb/v8/decor" + "io" + "log/slog" + "path/filepath" + "runtime" + "strings" + async "sync" + "time" + + "github.com/cespare/xxhash/v2" + "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/plumbing/object" + "github.com/go-git/go-git/v5/plumbing/storer" + "github.com/launchrctl/launchr" + "github.com/skilld-labs/plasmactl-bump/v2/pkg/sync" +) + +func (s *SyncAction) populateTimelineVars(buildInv *sync.Inventory) error { + if s.filterByResourceUsage { + // Quick return in case of empty usage pool. + usedResources := buildInv.GetUsedResources() + if len(usedResources) == 0 { + return nil + } + } + + filesCrawler := sync.NewFilesCrawler(s.domainDir) + groupedFiles, err := filesCrawler.FindVarsFiles("") + if err != nil { + return fmt.Errorf("can't get vars files > %w", err) + } + + var varsFiles []string + for _, paths := range groupedFiles { + varsFiles = append(varsFiles, paths...) + } + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + var wg async.WaitGroup + var mx async.Mutex + + maxWorkers := min(runtime.NumCPU(), len(varsFiles)) + workChan := make(chan string, len(varsFiles)) + errorChan := make(chan error, 1) + + //var p *pterm.ProgressbarPrinter + var p *mpb.Progress + var b *mpb.Bar + if s.showProgress { + //p, _ = pterm.DefaultProgressbar.WithTotal(len(varsFiles)).WithTitle("Processing variables files").Start() + p = mpb.New(mpb.WithWidth(64)) + b = p.New(int64(len(varsFiles)), + //mpb.BarStyle().Lbound("╢").Filler("▌").Tip("▌").Padding("░").Rbound("╟"), + mpb.BarStyle(), + mpb.PrependDecorators( + decor.Name("Processing variables files:", decor.WC{C: decor.DindentRight | decor.DextraSpace}), + decor.OnComplete(decor.AverageETA(decor.ET_STYLE_GO), "done in "), + mpbTimeDecorator(time.Now()), + decor.CountersNoUnit(" [%d/%d] "), + ), + mpb.AppendDecorators(decor.Percentage()), + ) + } + + for i := 0; i < maxWorkers; i++ { + go func(workerID int) { + for { + select { + case <-ctx.Done(): + return + case varsFile, ok := <-workChan: + if !ok { + return + } + if err = s.findVariableUpdateTime(varsFile, buildInv, s.domainDir, &mx); err != nil { + if p != nil { + p.Shutdown() + //_, _ = p.Stop() + } + + select { + case errorChan <- fmt.Errorf("worker %d error processing %s: %w", workerID, varsFile, err): + cancel() + default: + } + return + } + + if b != nil { + b.Increment() + //p.Increment() + } + + wg.Done() + } + } + }(i) + } + + for _, f := range varsFiles { + wg.Add(1) + workChan <- f + } + close(workChan) + + go func() { + wg.Wait() + close(errorChan) + p.Wait() + }() + + for err = range errorChan { + if err != nil { + return err + } + } + + return nil +} + +func (s *SyncAction) findVariableUpdateTime(varsFile string, inv *sync.Inventory, gitPath string, mx *async.Mutex) error { + repo, err := git.PlainOpen(gitPath) + if err != nil { + return fmt.Errorf("%s - %w", gitPath, err) + } + + ref, err := repo.Head() + if err != nil { + return fmt.Errorf("can't get HEAD ref > %w", err) + } + + var varsYaml map[string]any + hashesMap := make(map[string]*hashStruct) + variablesMap := sync.NewOrderedMap[*sync.Variable]() + isVault := sync.IsVaultFile(varsFile) + + varsYaml, err = sync.LoadVariablesFile(filepath.Join(s.buildDir, varsFile), s.vaultPass, isVault) + if err != nil { + return err + } + + for k, value := range varsYaml { + v := sync.NewVariable(varsFile, k, hashString(fmt.Sprint(value)), isVault) + isUsed := inv.IsUsedVariable(s.filterByResourceUsage, v.GetName(), v.GetPlatform()) + if !isUsed { + // launchr.Term().Warning().Printfln("Unused variable %s - %s", v.GetName(), v.GetPath()) + continue + } + + variablesMap.Set(k, v) + + if _, ok := hashesMap[k]; !ok { + hashesMap[k] = &hashStruct{} + } + + hashesMap[k].hash = fmt.Sprint(v.GetHash()) + hashesMap[k].hashTime = time.Now() + hashesMap[k].author = buildHackAuthor + } + + varsFileHash := "" + + // Used to set versions after difference found. + // To prevent assigning same versions multiple times. + var danglingCommit *object.Commit + + toIterate := variablesMap.ToDict() + cIter, err := repo.Log(&git.LogOptions{From: ref.Hash()}) + if err != nil { + return err + } + + remainingDebug := len(toIterate) + err = cIter.ForEach(func(c *object.Commit) error { + if len(toIterate) == 0 { + return storer.ErrStop + } + + if len(toIterate) != remainingDebug { + remainingDebug = len(toIterate) + launchr.Log().Debug(fmt.Sprintf("Remaining unidentified variables, %s - %d", varsFile, remainingDebug)) + } + + file, errIt := c.File(varsFile) + if errIt != nil { + if !errors.Is(errIt, object.ErrFileNotFound) { + return fmt.Errorf("opening file %s in commit %s > %w", varsFile, c.Hash, errIt) + } + + return storer.ErrStop + } + + commitFileHash := file.Hash.String() + + // No need to iterate file if it's the same between commits. + if varsFileHash == commitFileHash { + danglingCommit = c + + return nil + } + + if danglingCommit != nil { + for k := range toIterate { + hashesMap[k].hash = danglingCommit.Hash.String() + hashesMap[k].hashTime = danglingCommit.Author.When + hashesMap[k].author = danglingCommit.Author.Name + } + + danglingCommit = nil + } + + varFile, errIt := loadVariablesFileFromBytes(file, varsFile, s.vaultPass, isVault) + if errIt != nil { + if strings.Contains(errIt.Error(), "did not find expected key") || + strings.Contains(errIt.Error(), "did not find expected comment or line break") || + strings.Contains(errIt.Error(), "could not find expected") { + launchr.Log().Warn("Bad YAML structured detected", + slog.String("file", varsFile), + slog.String("commit", c.Hash.String()), + slog.String("error", errIt.Error()), + ) + + return nil + } + + if strings.Contains(errIt.Error(), "invalid password for vault") { + launchr.Log().Warn("Invalid password for vault", + slog.String("file", varsFile), + slog.String("commit", c.Hash.String()), + ) + + return storer.ErrStop + } + + if strings.Contains(errIt.Error(), "invalid secret format") { + launchr.Log().Warn("invalid secret format for vault", + slog.String("file", varsFile), + slog.String("commit", c.Hash.String()), + ) + return nil + } + + return fmt.Errorf("YAML load commit %s > %w", c.Hash, errIt) + } + + varsFileHash = commitFileHash + + for k, v := range toIterate { + prevVar, exists := varFile[k] + if !exists { + // Variable didn't exist before, take current hash as version + delete(toIterate, k) + continue + } + + prevVarHash := hashString(fmt.Sprint(prevVar)) + if v.GetHash() != prevVarHash { + // Variable exists, hashes don't match, stop iterating + delete(toIterate, k) + continue + } + + hashesMap[k].hash = c.Hash.String() + hashesMap[k].hashTime = c.Author.When + hashesMap[k].author = c.Author.Name + } + + return nil + }) + + if err != nil { + return fmt.Errorf("git log error > %w", err) + } + + if danglingCommit != nil { + for k := range toIterate { + hashesMap[k].hash = danglingCommit.Hash.String() + hashesMap[k].hashTime = danglingCommit.Author.When + hashesMap[k].author = danglingCommit.Author.Name + } + + danglingCommit = nil + } + + mx.Lock() + defer mx.Unlock() + + for n, hm := range hashesMap { + v, _ := variablesMap.Get(n) + version := hm.hash[:13] + launchr.Log().Debug("add variable to timeline", + slog.String("variable", v.GetName()), + slog.String("version", version), + slog.Time("date", hm.hashTime), + slog.String("path", v.GetPath()), + ) + + if hm.author == buildHackAuthor { + msg := fmt.Sprintf("Value of `%s` doesn't match HEAD commit", n) + if !s.allowOverride { + return errors.New(msg) + } + + launchr.Log().Warn(msg) + } + + tri := sync.NewTimelineVariablesItem(version, hm.hash, hm.hashTime) + tri.AddVariable(v) + + s.timeline = sync.AddToTimeline(s.timeline, tri) + } + + return err +} + +func hashString(item string) uint64 { + return xxhash.Sum64String(item) +} + +func loadVariablesFileFromBytes(file *object.File, path, vaultPass string, isVault bool) (map[string]any, error) { + reader, errIt := file.Blob.Reader() + if errIt != nil { + return nil, fmt.Errorf("can't read %s > %w", path, errIt) + } + + contents, errIt := io.ReadAll(reader) + if errIt != nil { + return nil, fmt.Errorf("can't read %s > %w", path, errIt) + } + + varFile, errIt := sync.LoadVariablesFileFromBytes(contents, vaultPass, isVault) + if errIt != nil { + return nil, fmt.Errorf("YAML load %s > %w", path, errIt) + } + + return varFile, nil +} diff --git a/go.mod b/go.mod index c4fd90f..63fbc9d 100644 --- a/go.mod +++ b/go.mod @@ -8,9 +8,15 @@ require ( github.com/launchrctl/compose v0.14.0 github.com/launchrctl/keyring v0.3.0 github.com/launchrctl/launchr v0.17.1 - github.com/pterm/pterm v0.12.80 github.com/sosedoff/ansible-vault-go v0.2.0 github.com/stevenle/topsort v0.2.0 + github.com/vbauerster/mpb/v8 v8.9.1 +) + +require ( + github.com/VividCortex/ewma v1.2.0 // indirect + github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d // indirect + github.com/pterm/pterm v0.12.80 // indirect ) require ( diff --git a/go.sum b/go.sum index 5b133c3..edeaf4c 100644 --- a/go.sum +++ b/go.sum @@ -35,6 +35,10 @@ github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERo github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/ProtonMail/go-crypto v1.1.5 h1:eoAQfK2dwL+tFSFpr7TbOaPNUbPiJj4fLYwwGE1FQO4= github.com/ProtonMail/go-crypto v1.1.5/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= +github.com/VividCortex/ewma v1.2.0 h1:f58SaIzcDXrSy3kWaHNvuJgJ3Nmz59Zji6XoJR/q1ow= +github.com/VividCortex/ewma v1.2.0/go.mod h1:nz4BbCtbLyFDeC9SUHbtcT5644juEuWfUAUnGx7j5l4= +github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8= +github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= @@ -456,6 +460,8 @@ github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/vbauerster/mpb/v8 v8.9.1 h1:LH5R3lXPfE2e3lIGxN7WNWv3Hl5nWO6LRi2B0L0ERHw= +github.com/vbauerster/mpb/v8 v8.9.1/go.mod h1:4XMvznPh8nfe2NpnDo1QTPvW9MVkUhbG90mPWvmOzcQ= github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= github.com/xo/terminfo v0.0.0-20210125001918-ca9a967f8778/go.mod h1:2MuV+tbUrU1zIOPMxZ5EncGwgmMJsa+9ucAQZXxsObs= diff --git a/pkg/sync/inventory.go b/pkg/sync/inventory.go index 9939bbe..922961b 100644 --- a/pkg/sync/inventory.go +++ b/pkg/sync/inventory.go @@ -61,7 +61,10 @@ type Inventory struct { dependsOn map[string]*OrderedMap[bool] topOrder []string - variablesCalculated bool + resourcesUsageCalculated bool + usedResources map[string]bool + + variablesUsageCalculated bool variableVariablesDependencyMap map[string]map[string]*VariableDependency variableResourcesDependencyMap map[string]map[string][]string @@ -274,18 +277,18 @@ func (i *Inventory) lookupDependenciesRecursively(resourceName string, resources // GetChangedResources returns an OrderedResourceMap containing the resources that have been modified, based on the provided list of modified files. // It iterates over the modified files, builds a resource from each file path, and adds it to the result map if it is not already present. -func (i *Inventory) GetChangedResources(files []string) *OrderedMap[*Resource] { - resources := NewOrderedMap[*Resource]() - for _, path := range files { - resource := BuildResourceFromPath(path, i.sourceDir) - if resource == nil { - continue - } - if _, ok := resources.Get(resource.GetName()); ok { - continue - } - resources.Set(resource.GetName(), resource) - } - - return resources -} +//func (i *Inventory) GetChangedResources(files []string) *OrderedMap[*Resource] { +// resources := NewOrderedMap[*Resource]() +// for _, path := range files { +// resource := BuildResourceFromPath(path, i.sourceDir) +// if resource == nil { +// continue +// } +// if _, ok := resources.Get(resource.GetName()); ok { +// continue +// } +// resources.Set(resource.GetName(), resource) +// } +// +// return resources +//} diff --git a/pkg/sync/inventory.resource.go b/pkg/sync/inventory.resource.go index 8b08b0b..e141a9f 100644 --- a/pkg/sync/inventory.resource.go +++ b/pkg/sync/inventory.resource.go @@ -265,7 +265,10 @@ func (m *OrderedMap[T]) Get(key string) (T, bool) { // Keys returns the ordered keys from the [OrderedMap]. func (m *OrderedMap[T]) Keys() []string { - return m.keys + var keys []string + keys = append(keys, m.keys...) + + return keys } // OrderBy updates the order of keys in the [OrderedMap] based on the orderList. @@ -326,3 +329,114 @@ func (m *OrderedMap[T]) ToDict() map[string]T { } return dict } + +// GetUsedResources returns list of used resources. +func (i *Inventory) GetUsedResources() map[string]bool { + if !i.resourcesUsageCalculated { + panic("use inventory.CalculateResourcesUsage first") + } + + return i.usedResources +} + +// CalculateResourcesUsage parse platform playbooks and determine resources used in platform. +func (i *Inventory) CalculateResourcesUsage() error { + file, err := os.ReadFile(filepath.Join(i.sourceDir, "platform/platform.yaml")) + if err != nil { + if os.IsNotExist(err) { + return fmt.Errorf("platform/platform.yaml playbook doesn't exist") + } + + return err + } + + var platformData []any + err = yaml.Unmarshal(file, &platformData) + if err != nil { + return err + } + + var playbooks []string + resources := make(map[string]bool) + + for _, item := range platformData { + if m, ok := item.(map[string]any); ok { + for k, val := range m { + if k == "import_playbook" { + playbookName, okV := val.(string) + if okV { + cleanPath := filepath.Clean(strings.ReplaceAll(playbookName, "../", "")) + playbooks = append(playbooks, filepath.Join(i.sourceDir, cleanPath)) + } + } + + extractPlaybookRoles(resources, k, val) + } + } + } + + for _, playbook := range playbooks { + var playbookData []any + file, err = os.ReadFile(filepath.Clean(playbook)) + if err != nil { + return err + } + + err = yaml.Unmarshal(file, &playbookData) + if err != nil { + return err + } + + for _, item := range playbookData { + if m, ok := item.(map[string]any); ok { + for k, val := range m { + extractPlaybookRoles(resources, k, val) + } + } + } + } + + usedResourcesWithDependencies := make(map[string]bool) + for r := range resources { + mrn := strings.ReplaceAll(r, ".", "__") + deps := i.GetDependsOnResources(mrn, -1) + + usedResourcesWithDependencies[mrn] = true + for d := range deps { + usedResourcesWithDependencies[d] = true + } + } + + i.usedResources = usedResourcesWithDependencies + i.resourcesUsageCalculated = true + + return nil +} + +func extractPlaybookRoles(result map[string]bool, k string, val any) { + if k != "roles" { + return + } + + if s, ok := val.([]any); ok { + for _, i := range s { + if v, okV := i.(string); okV { + result[v] = true + continue + } + + if m, okM := i.(map[string]any); okM { + role, okR := m["role"] + if !okR { + return + } + + if r, okV := role.(string); okV { + result[r] = true + + continue + } + } + } + } +} diff --git a/pkg/sync/inventory.variable.go b/pkg/sync/inventory.variable.go index c180048..5bb6e72 100644 --- a/pkg/sync/inventory.variable.go +++ b/pkg/sync/inventory.variable.go @@ -102,10 +102,10 @@ func (v *Variable) IsVault() bool { } // GetVariableResources returns list of resources which depends on variable. -func (i *Inventory) GetVariableResources(variableName, variablePlatform string) ([]string, error) { +func (i *Inventory) GetVariableResources(variableName, variablePlatform string) []string { var result []string - if !i.variablesCalculated { + if !i.variablesUsageCalculated { panic("use inventory.CalculateVariablesUsage first") } @@ -132,7 +132,7 @@ func (i *Inventory) GetVariableResources(variableName, variablePlatform string) } } - return result, nil + return result } // GetVariableResources returns list of variables which depends on variable. @@ -171,7 +171,7 @@ func (i *Inventory) CalculateVariablesUsage(vaultpass string) error { i.variableVariablesDependencyMap = variableVariablesDependencyMap i.variableResourcesDependencyMap = variableResourcesDependencyMap - i.variablesCalculated = true + i.variablesUsageCalculated = true return nil } @@ -623,7 +623,7 @@ func getPathPrefix(filePath string, parts int) string { func extractLinesWithVariables(filePath string) ([]string, error) { file, err := os.Open(filepath.Clean(filePath)) if err != nil { - return nil, fmt.Errorf("error opening file %s: %w", filePath, err) + return nil, fmt.Errorf("opening file %s: %w", filePath, err) } defer file.Close() @@ -640,8 +640,53 @@ func extractLinesWithVariables(filePath string) ([]string, error) { } } if err = scanner.Err(); err != nil { - return nil, fmt.Errorf("error reading file %s: %w", filePath, err) + return nil, fmt.Errorf("reading file %s: %w", filePath, err) } return linesWithVariables, nil } + +// IsUsedVariable checks if variable used in any resource. +// checkResourcesUsage adds additional check if resources are used in platform. +func (i *Inventory) IsUsedVariable(checkResourcesUsage bool, variableName, variablePlatform string) bool { + if !checkResourcesUsage { + _, okM := i.variableResourcesDependencyMap[variableName][variablePlatform] + if okM { + return okM + } + + variablesList := make(map[string]map[string]bool) + i.getVariableVariables(variableName, variablePlatform, variablesList) + + for v, m := range variablesList { + if _, ok := i.variableResourcesDependencyMap[v]; !ok { + continue + } + + for p := range m { + _, ok := i.variableResourcesDependencyMap[v][p] + if !ok { + continue + } + + return true + } + } + + return false + } + + usedResources := i.GetUsedResources() + if len(usedResources) == 0 { + return false + } + + resources := i.GetVariableResources(variableName, variablePlatform) + for _, item := range resources { + if usedResources[item] { + return true + } + } + + return false +} diff --git a/pkg/sync/timeline.go b/pkg/sync/timeline.go index aee0dce..429488c 100644 --- a/pkg/sync/timeline.go +++ b/pkg/sync/timeline.go @@ -7,6 +7,11 @@ import ( "github.com/launchrctl/launchr" ) +const ( + SortAsc = "asc" // SortAsc const. + SortDesc = "desc" // SortDesc const. +) + // TimelineItem is interface for storing commit, date and version of propagated items. // Storing such items in slice allows us to propagate items in the same order they were changed. type TimelineItem interface { @@ -193,14 +198,17 @@ func AddToTimeline(list []TimelineItem, item TimelineItem) []TimelineItem { } // SortTimeline sorts timeline items in slice. -func SortTimeline(list []TimelineItem) { +func SortTimeline(list []TimelineItem, order string) { sort.Slice(list, func(i, j int) bool { dateI := list[i].GetDate() dateJ := list[j].GetDate() - // First, compare by date + // Determine the date comparison based on the order if !dateI.Equal(dateJ) { - return dateI.Before(dateJ) + if order == SortAsc { + return dateI.Before(dateJ) + } + return dateI.After(dateJ) } // If dates are the same, prioritize by type @@ -211,8 +219,8 @@ func SortTimeline(list []TimelineItem) { // Both are Variables, maintain current order return false case *TimelineResourcesItem: - // Variables come before Resources - return true + // Variables come before Resources if asc, after if desc + return order == SortAsc default: // Variables come before unknown types return true @@ -220,8 +228,8 @@ func SortTimeline(list []TimelineItem) { case *TimelineResourcesItem: switch list[j].(type) { case *TimelineVariablesItem: - // Resources come after Variables - return false + // Resources come after Variables if asc, before if desc + return order == SortDesc case *TimelineResourcesItem: // Both are Resources, maintain current order return false diff --git a/plugin.go b/plugin.go index e6b9940..e811165 100644 --- a/plugin.go +++ b/plugin.go @@ -45,6 +45,8 @@ func (p *Plugin) DiscoverActions(_ context.Context) ([]*action.Action, error) { doSync := input.Opt("sync").(bool) dryRun := input.Opt("dry-run").(bool) allowOverride := input.Opt("allow-override").(bool) + filterByResourceUsage := input.Opt("playbook-resources").(bool) + commitsAfter := input.Opt("commits-after").(string) vaultpass := input.Opt("vault-pass").(string) last := input.Opt("last").(bool) @@ -70,10 +72,12 @@ func (p *Plugin) DiscoverActions(_ context.Context) ([]*action.Action, error) { buildDir: ".compose/build", packagesDir: ".compose/packages", - dryRun: dryRun, - allowOverride: allowOverride, - vaultPass: vaultpass, - showProgress: showProgress, + dryRun: dryRun, + filterByResourceUsage: filterByResourceUsage, + commitsAfter: commitsAfter, + allowOverride: allowOverride, + vaultPass: vaultpass, + showProgress: showProgress, } err := syncAction.Execute()