diff --git a/command/commands.go b/command/commands.go index 0f146dc01..03960e0c5 100644 --- a/command/commands.go +++ b/command/commands.go @@ -307,6 +307,16 @@ func Commands(metaPtr *Meta, agentUi cli.Ui) map[string]cli.CommandFactory { Meta: meta, }, nil }, + "job periodic": func() (cli.Command, error) { + return &JobPeriodicCommand{ + Meta: meta, + }, nil + }, + "job periodic force": func() (cli.Command, error) { + return &JobPeriodicForceCommand{ + Meta: meta, + }, nil + }, "job plan": func() (cli.Command, error) { return &JobPlanCommand{ Meta: meta, diff --git a/command/job_periodic.go b/command/job_periodic.go new file mode 100644 index 000000000..020e2ab29 --- /dev/null +++ b/command/job_periodic.go @@ -0,0 +1,36 @@ +package command + +import ( + "strings" + + "github.com/mitchellh/cli" +) + +type JobPeriodicCommand struct { + Meta +} + +func (f *JobPeriodicCommand) Name() string { return "periodic" } + +func (f *JobPeriodicCommand) Run(args []string) int { + return cli.RunResultHelp +} + +func (f *JobPeriodicCommand) Synopsis() string { + return "Interact with periodic jobs" +} + +func (f *JobPeriodicCommand) Help() string { + helpText := ` +Usage: nomad job periodic [options] [args] + + This command groups subcommands for interacting with periodic jobs. + + Force a periodic job: + + $ nomad job periodic force + + Please see the individual subcommand help for detailed usage information. +` + return strings.TrimSpace(helpText) +} diff --git a/command/job_periodic_force.go b/command/job_periodic_force.go new file mode 100644 index 000000000..f11d35031 --- /dev/null +++ b/command/job_periodic_force.go @@ -0,0 +1,151 @@ +package command + +import ( + "fmt" + "strings" + + "github.com/hashicorp/nomad/api" + "github.com/posener/complete" +) + +type JobPeriodicForceCommand struct { + Meta +} + +func (c *JobPeriodicForceCommand) Help() string { + helpText := ` +Usage: nomad job periodic force + + This command is used to force the creation of a new instance of a periodic job. + This is used to immediately run a periodic job, even if it violates the job's + prohibit_overlap setting. + +General Options: + + ` + generalOptionsUsage() + ` + +Periodic Force Options: + + -detach + Return immediately instead of entering monitor mode. After the force, + the evaluation ID will be printed to the screen, which can be used to + examine the evaluation using the eval-status command. + + -verbose + Display full information. +` + + return strings.TrimSpace(helpText) +} + +func (c *JobPeriodicForceCommand) Synopsis() string { + return "Force the launch of a periodic job" +} + +func (c *JobPeriodicForceCommand) AutocompleteFlags() complete.Flags { + return mergeAutocompleteFlags(c.Meta.AutocompleteFlags(FlagSetClient), + complete.Flags{ + "-detach": complete.PredictNothing, + "-verbose": complete.PredictNothing, + }) +} + +func (c *JobPeriodicForceCommand) AutocompleteArgs() complete.Predictor { + return complete.PredictFunc(func(a complete.Args) []string { + client, err := c.Meta.Client() + if err != nil { + return nil + } + + resp, _, err := client.Jobs().PrefixList(a.Last) + if err != nil { + return []string{} + } + + // filter this by periodic jobs + matches := make([]string, 0, len(resp)) + for _, job := range resp { + if job.Periodic { + matches = append(matches, job.ID) + } + } + return matches + }) +} + +func (c *JobPeriodicForceCommand) Name() string { return "job periodic force" } + +func (c *JobPeriodicForceCommand) Run(args []string) int { + var detach, verbose bool + + flags := c.Meta.FlagSet(c.Name(), FlagSetClient) + flags.Usage = func() { c.Ui.Output(c.Help()) } + flags.BoolVar(&detach, "detach", false, "") + flags.BoolVar(&verbose, "verbose", false, "") + + if err := flags.Parse(args); err != nil { + return 1 + } + + // Check that we got exactly one argument + args = flags.Args() + if l := len(args); l != 1 { + c.Ui.Error("This command takes one argument: ") + c.Ui.Error(commandErrorText(c)) + return 1 + } + + // Truncate the id unless full length is requested + length := shortId + if verbose { + length = fullId + } + + // Get the HTTP client + client, err := c.Meta.Client() + if err != nil { + c.Ui.Error(fmt.Sprintf("Error initializing client: %s", err)) + return 1 + } + + // Check if the job exists + jobID := args[0] + jobs, _, err := client.Jobs().PrefixList(jobID) + if err != nil { + c.Ui.Error(fmt.Sprintf("Error forcing periodic job: %s", err)) + return 1 + } + // filter non-periodic jobs + periodicJobs := make([]*api.JobListStub, 0, len(jobs)) + for _, j := range jobs { + if j.Periodic { + periodicJobs = append(periodicJobs, j) + } + } + if len(periodicJobs) == 0 { + c.Ui.Error(fmt.Sprintf("No periodic job(s) with prefix or id %q found", jobID)) + return 1 + } + if len(periodicJobs) > 1 { + c.Ui.Error(fmt.Sprintf("Prefix matched multiple periodic jobs\n\n%s", createStatusListOutput(periodicJobs))) + return 1 + } + jobID = periodicJobs[0].ID + + // force the evaluation + evalID, _, err := client.Jobs().PeriodicForce(jobID, nil) + if err != nil { + c.Ui.Error(fmt.Sprintf("Error forcing periodic job %q: %s", jobID, err)) + return 1 + } + + if detach { + c.Ui.Output("Force periodic successful") + c.Ui.Output("Evaluation ID: " + evalID) + return 0 + } + + // Detach was not specified, so start monitoring + mon := newMonitor(c.Ui, client, length) + return mon.monitor(evalID, false) +} diff --git a/command/job_periodic_force_test.go b/command/job_periodic_force_test.go new file mode 100644 index 000000000..fc8caedc6 --- /dev/null +++ b/command/job_periodic_force_test.go @@ -0,0 +1,197 @@ +package command + +import ( + "fmt" + "testing" + + "github.com/hashicorp/nomad/api" + "github.com/hashicorp/nomad/helper" + "github.com/hashicorp/nomad/nomad/mock" + "github.com/hashicorp/nomad/nomad/structs" + "github.com/hashicorp/nomad/testutil" + "github.com/mitchellh/cli" + "github.com/posener/complete" + "github.com/stretchr/testify/require" +) + +func TestJobPeriodicForceCommand_Implements(t *testing.T) { + t.Parallel() + var _ cli.Command = &JobPeriodicForceCommand{} +} + +func TestJobPeriodicForceCommand_Fails(t *testing.T) { + t.Parallel() + ui := new(cli.MockUi) + cmd := &JobPeriodicForceCommand{Meta: Meta{Ui: ui}} + + // Fails on misuse + code := cmd.Run([]string{"some", "bad", "args"}) + require.Equal(t, code, 1, "expected error") + out := ui.ErrorWriter.String() + require.Contains(t, out, commandErrorText(cmd), "expected help output") + ui.ErrorWriter.Reset() + + code = cmd.Run([]string{"-address=nope", "12"}) + require.Equal(t, code, 1, "expected error") + out = ui.ErrorWriter.String() + require.Contains(t, out, "Error forcing periodic job", "expected force error") +} + +func TestJobPeriodicForceCommand_AutocompleteArgs(t *testing.T) { + t.Parallel() + + srv, _, url := testServer(t, true, nil) + defer srv.Shutdown() + + ui := new(cli.MockUi) + cmd := &JobPeriodicForceCommand{Meta: Meta{Ui: ui, flagAddress: url}} + + // Create a fake job, not periodic + state := srv.Agent.Server().State() + j := mock.Job() + require.NoError(t, state.UpsertJob(1000, j)) + + predictor := cmd.AutocompleteArgs() + + res := predictor.Predict(complete.Args{Last: j.ID[:len(j.ID)-5]}) + require.Empty(t, res) + + // Create another fake job, periodic + state = srv.Agent.Server().State() + j2 := mock.Job() + j2.Periodic = &structs.PeriodicConfig{ + Enabled: true, + Spec: "spec", + SpecType: "cron", + ProhibitOverlap: true, + TimeZone: "test zone", + } + require.NoError(t, state.UpsertJob(1000, j2)) + + res = predictor.Predict(complete.Args{Last: j2.ID[:len(j.ID)-5]}) + require.Equal(t, []string{j2.ID}, res) + + res = predictor.Predict(complete.Args{}) + require.Equal(t, []string{j2.ID}, res) +} + +func TestJobPeriodicForceCommand_NonPeriodicJob(t *testing.T) { + t.Parallel() + srv, client, url := testServer(t, true, nil) + defer srv.Shutdown() + testutil.WaitForResult(func() (bool, error) { + nodes, _, err := client.Nodes().List(nil) + if err != nil { + return false, err + } + if len(nodes) == 0 { + return false, fmt.Errorf("missing node") + } + if _, ok := nodes[0].Drivers["mock_driver"]; !ok { + return false, fmt.Errorf("mock_driver not ready") + } + return true, nil + }, func(err error) { + require.NoError(t, err) + }) + + // Register a job + j := testJob("job_not_periodic") + + ui := new(cli.MockUi) + cmd := &JobPeriodicForceCommand{Meta: Meta{Ui: ui, flagAddress: url}} + + resp, _, err := client.Jobs().Register(j, nil) + require.NoError(t, err) + code := waitForSuccess(ui, client, fullId, t, resp.EvalID) + require.Equal(t, 0, code) + + code = cmd.Run([]string{"-address=" + url, "job_not_periodic"}) + require.Equal(t, 1, code, "expected exit code") + out := ui.ErrorWriter.String() + require.Contains(t, out, "No periodic job(s)", "non-periodic error message") +} + +func TestJobPeriodicForceCommand_SuccessfulPeriodicForceDetach(t *testing.T) { + t.Parallel() + srv, client, url := testServer(t, true, nil) + defer srv.Shutdown() + testutil.WaitForResult(func() (bool, error) { + nodes, _, err := client.Nodes().List(nil) + if err != nil { + return false, err + } + if len(nodes) == 0 { + return false, fmt.Errorf("missing node") + } + if _, ok := nodes[0].Drivers["mock_driver"]; !ok { + return false, fmt.Errorf("mock_driver not ready") + } + return true, nil + }, func(err error) { + require.NoError(t, err) + }) + + // Register a job + j := testJob("job1_is_periodic") + j.Periodic = &api.PeriodicConfig{ + SpecType: helper.StringToPtr(api.PeriodicSpecCron), + Spec: helper.StringToPtr("*/15 * * * * *"), + ProhibitOverlap: helper.BoolToPtr(true), + TimeZone: helper.StringToPtr("Europe/Minsk"), + } + + ui := new(cli.MockUi) + cmd := &JobPeriodicForceCommand{Meta: Meta{Ui: ui, flagAddress: url}} + + _, _, err := client.Jobs().Register(j, nil) + require.NoError(t, err) + + code := cmd.Run([]string{"-address=" + url, "-detach", "job1_is_periodic"}) + require.Equal(t, 0, code, "expected no error code") + out := ui.OutputWriter.String() + require.Contains(t, out, "Force periodic successful") + require.Contains(t, out, "Evaluation ID:") +} + +func TestJobPeriodicForceCommand_SuccessfulPeriodicForce(t *testing.T) { + t.Parallel() + srv, client, url := testServer(t, true, nil) + defer srv.Shutdown() + testutil.WaitForResult(func() (bool, error) { + nodes, _, err := client.Nodes().List(nil) + if err != nil { + return false, err + } + if len(nodes) == 0 { + return false, fmt.Errorf("missing node") + } + if _, ok := nodes[0].Drivers["mock_driver"]; !ok { + return false, fmt.Errorf("mock_driver not ready") + } + return true, nil + }, func(err error) { + require.NoError(t, err) + }) + + // Register a job + j := testJob("job2_is_periodic") + j.Periodic = &api.PeriodicConfig{ + SpecType: helper.StringToPtr(api.PeriodicSpecCron), + Spec: helper.StringToPtr("*/15 * * * * *"), + ProhibitOverlap: helper.BoolToPtr(true), + TimeZone: helper.StringToPtr("Europe/Minsk"), + } + + ui := new(cli.MockUi) + cmd := &JobPeriodicForceCommand{Meta: Meta{Ui: ui, flagAddress: url}} + + _, _, err := client.Jobs().Register(j, nil) + require.NoError(t, err) + + code := cmd.Run([]string{"-address=" + url, "job2_is_periodic"}) + require.Equal(t, 0, code, "expected no error code") + out := ui.OutputWriter.String() + require.Contains(t, out, "Monitoring evaluation") + require.Contains(t, out, "finished with status \"complete\"") +} diff --git a/website/source/docs/commands/job/periodic-force.html.md.erb b/website/source/docs/commands/job/periodic-force.html.md.erb new file mode 100644 index 000000000..106713e7d --- /dev/null +++ b/website/source/docs/commands/job/periodic-force.html.md.erb @@ -0,0 +1,61 @@ +--- +layout: "docs" +page_title: "Commands: job periodic force" +sidebar_current: "docs-commands-job-periodic-force" +description: > + The job periodic force command is used to force the evaluation of a periodic job. +--- + +# Command: job periodic force + +The `job periodic force` command is used to [force the evaluation](/api/jobs.html#force-new-periodic-instance) +of a [periodic job](/docs/job-specification/periodic.html). + +## Usage + +``` +nomad job periodic force [options] +``` + +The `job periodic force` command requires a single argument, specifying the ID of the +job. This job must be a periodic job. This is used to immediately run a periodic job, +even if it violates the job's `prohibit_overlap` setting. + +By default, on successful job submission the command will enter an +interactive monitor and display log information detailing the scheduling +decisions and placement information for the forced evaluation. The monitor will +exit after scheduling has finished or failed. + +## General Options + +<%= partial "docs/commands/_general_options" %> + +## Run Options + +* `-detach`: Return immediately instead of monitoring. A new evaluation ID + will be output, which can be used to examine the evaluation using the + [eval status](/docs/commands/eval-status.html) command + +* `-verbose`: Show full information. + +## Examples + +Force the evaluation of the job `example`, monitoring placement: + +``` +$ nomad job periodic force example +==> Monitoring evaluation "54b2d6d9" + Evaluation triggered by job "example/periodic-1555094493" + Allocation "637aee17" created: node "a35ab8fc", group "cache" + Allocation "637aee17" status changed: "pending" -> "running" (Tasks are running) + Evaluation status changed: "pending" -> "complete" +==> Evaluation "54b2d6d9" finished with status "complete" +``` + +Force the evaluation of the job `example` and return immediately: + +``` +$ nomad job periodic force -detach example +Force periodic successful +Evaluation ID: 0865fbf3-30de-5f53-0811-821e73e63178 +``` diff --git a/website/source/layouts/docs.erb b/website/source/layouts/docs.erb index b7070e77d..1675b51f0 100644 --- a/website/source/layouts/docs.erb +++ b/website/source/layouts/docs.erb @@ -181,6 +181,9 @@ > plan + > + periodic force + > promote