mirror of
https://github.com/woodpecker-ci/woodpecker.git
synced 2025-10-22 00:24:58 +00:00
Renamed procs/jobs to steps in code (#1331)
Renamed `procs` to `steps` in code for the issue #1288 Co-authored-by: Harikesh Prajapati <harikesh.prajapati@druva.com> Co-authored-by: qwerty287 <ndev@web.de> Co-authored-by: qwerty287 <80460567+qwerty287@users.noreply.github.com> Co-authored-by: 6543 <6543@obermui.de>
This commit is contained in:
@@ -85,7 +85,7 @@ func loop(c *cli.Context) error {
|
||||
log.Logger = log.With().Caller().Logger()
|
||||
}
|
||||
|
||||
counter.Polling = c.Int("max-procs")
|
||||
counter.Polling = c.Int("max-workflows")
|
||||
counter.Running = 0
|
||||
|
||||
if c.Bool("healthcheck") {
|
||||
@@ -139,7 +139,7 @@ func loop(c *cli.Context) error {
|
||||
backend.Init(context.WithValue(ctx, types.CliContext, c))
|
||||
|
||||
var wg sync.WaitGroup
|
||||
parallel := c.Int("max-procs")
|
||||
parallel := c.Int("max-workflows")
|
||||
wg.Add(parallel)
|
||||
|
||||
// new engine
|
||||
@@ -169,7 +169,7 @@ func loop(c *cli.Context) error {
|
||||
return
|
||||
}
|
||||
|
||||
log.Debug().Msg("polling new jobs")
|
||||
log.Debug().Msg("polling new steps")
|
||||
if err := r.Run(ctx); err != nil {
|
||||
log.Error().Err(err).Msg("pipeline done with error")
|
||||
return
|
||||
|
@@ -79,8 +79,8 @@ var flags = []cli.Flag{
|
||||
Usage: "List of labels to filter tasks on. An agent must be assigned every tag listed in a task to be selected.",
|
||||
},
|
||||
&cli.IntFlag{
|
||||
EnvVars: []string{"WOODPECKER_MAX_PROCS"},
|
||||
Name: "max-procs",
|
||||
EnvVars: []string{"WOODPECKER_MAX_WORKFLOWS", "WOODPECKER_MAX_PROCS"},
|
||||
Name: "max-workflows",
|
||||
Usage: "agent parallel workflows",
|
||||
Value: 1,
|
||||
},
|
||||
|
@@ -304,20 +304,20 @@ func setupCoding(c *cli.Context) (remote.Remote, error) {
|
||||
}
|
||||
|
||||
func setupMetrics(g *errgroup.Group, _store store.Store) {
|
||||
pendingJobs := promauto.NewGauge(prometheus.GaugeOpts{
|
||||
pendingSteps := promauto.NewGauge(prometheus.GaugeOpts{
|
||||
Namespace: "woodpecker",
|
||||
Name: "pending_jobs",
|
||||
Help: "Total number of pending pipeline processes.",
|
||||
Name: "pending_steps",
|
||||
Help: "Total number of pending pipeline steps.",
|
||||
})
|
||||
waitingJobs := promauto.NewGauge(prometheus.GaugeOpts{
|
||||
waitingSteps := promauto.NewGauge(prometheus.GaugeOpts{
|
||||
Namespace: "woodpecker",
|
||||
Name: "waiting_jobs",
|
||||
Name: "waiting_steps",
|
||||
Help: "Total number of pipeline waiting on deps.",
|
||||
})
|
||||
runningJobs := promauto.NewGauge(prometheus.GaugeOpts{
|
||||
runningSteps := promauto.NewGauge(prometheus.GaugeOpts{
|
||||
Namespace: "woodpecker",
|
||||
Name: "running_jobs",
|
||||
Help: "Total number of running pipeline processes.",
|
||||
Name: "running_steps",
|
||||
Help: "Total number of running pipeline steps.",
|
||||
})
|
||||
workers := promauto.NewGauge(prometheus.GaugeOpts{
|
||||
Namespace: "woodpecker",
|
||||
@@ -343,9 +343,9 @@ func setupMetrics(g *errgroup.Group, _store store.Store) {
|
||||
g.Go(func() error {
|
||||
for {
|
||||
stats := server.Config.Services.Queue.Info(context.TODO())
|
||||
pendingJobs.Set(float64(stats.Stats.Pending))
|
||||
waitingJobs.Set(float64(stats.Stats.WaitingOnDeps))
|
||||
runningJobs.Set(float64(stats.Stats.Running))
|
||||
pendingSteps.Set(float64(stats.Stats.Pending))
|
||||
waitingSteps.Set(float64(stats.Stats.WaitingOnDeps))
|
||||
runningSteps.Set(float64(stats.Stats.Running))
|
||||
workers.Set(float64(stats.Stats.Workers))
|
||||
time.Sleep(500 * time.Millisecond)
|
||||
}
|
||||
|
Reference in New Issue
Block a user