Merge pull request #3047 from hashicorp/f-status-autocomplete

Status autocomplete
This commit is contained in:
Chelsea Komlo
2017-08-25 16:27:53 -04:00
committed by GitHub
12 changed files with 434 additions and 36 deletions

View File

@@ -12,6 +12,8 @@ IMPROVEMENTS:
* cli: node-status displays node version [GH-3002]
* cli: Disable color output when STDOUT is not a TTY [GH-3057]
* cli: Add autocomplete functionality for flags for all CLI command [GH 3087]
* cli: Add status command which takes any identifier and routes to the
appropriate status command.
* client: Unmount task directories when alloc is terminal [GH-3006]
* client/template: Allow template to set Vault grace [GH-2947]
* client/template: Template emits events explaining why it is blocked [GH-3001]
@@ -157,7 +159,7 @@ __BACKWARDS INCOMPATIBILITIES:__
prior to this release. A single image is expected by the driver so this
behavior has been changed to take a single path as a string. Jobs using the
`load` command should update the syntax to a single string. [GH-2361]
IMPROVEMENTS:
* core: Handle Serf Reap event [GH-2310]
* core: Update Serf and Memberlist for more reliable gossip [GH-2255]
@@ -203,7 +205,7 @@ BUG FIXES:
* client: Fix remounting alloc dirs after reboots [GH-2391] [GH-2394]
* client: Replace `-` with `_` in environment variable names [GH-2406]
* client: Fix panic and deadlock during client restore state when prestart
fails [GH-2376]
fails [GH-2376]
* config: Fix Consul Config Merging/Copying [GH-2278]
* config: Fix Client reserved resource merging panic [GH-2281]
* server: Fix panic when forwarding Vault derivation requests from non-leader
@@ -218,7 +220,7 @@ IMPROVEMENTS:
BUG FIXES:
* client: Fix panic when upgrading to 0.5.3 [GH-2256]
## 0.5.3 (January 30, 2017)
## 0.5.3 (January 30, 2017)
IMPROVEMENTS:
* core: Introduce parameterized jobs and dispatch command/API [GH-2128]
@@ -319,7 +321,7 @@ IMPROVEMENTS:
* core: Scheduler version enforcement disallows different scheduler version
from making decisions simultaneously [GH-1872]
* core: Introduce node SecretID which can be used to minimize the available
surface area of RPCs to malicious Nomad Clients [GH-1597]
surface area of RPCs to malicious Nomad Clients [GH-1597]
* core: Add `sticky` volumes which inform the scheduler to prefer placing
updated allocations on the same node and to reuse the `local/` and
`alloc/data` directory from previous allocation allowing semi-persistent
@@ -383,7 +385,7 @@ BUG FIXES:
logger [GH-1886]
* client/fingerprint: Fix inconsistent CPU MHz fingerprinting [GH-1366]
* env/aws: Fix an issue with reserved ports causing placement failures
[GH-1617]
[GH-1617]
* discovery: Interpolate all service and check fields [GH-1966]
* discovery: Fix old services not getting removed from Consul on update
[GH-1668]

View File

@@ -145,8 +145,6 @@ func (s *HTTPServer) registerHandlers(enableDebug bool) {
s.mux.HandleFunc("/v1/evaluations", s.wrap(s.EvalsRequest))
s.mux.HandleFunc("/v1/evaluation/", s.wrap(s.EvalSpecificRequest))
s.mux.HandleFunc("/v1/search", s.wrap(s.SearchRequest))
s.mux.HandleFunc("/v1/deployments", s.wrap(s.DeploymentsRequest))
s.mux.HandleFunc("/v1/deployment/", s.wrap(s.DeploymentSpecificRequest))
@@ -169,6 +167,8 @@ func (s *HTTPServer) registerHandlers(enableDebug bool) {
s.mux.HandleFunc("/v1/status/leader", s.wrap(s.StatusLeaderRequest))
s.mux.HandleFunc("/v1/status/peers", s.wrap(s.StatusPeersRequest))
s.mux.HandleFunc("/v1/search", s.wrap(s.SearchRequest))
s.mux.HandleFunc("/v1/operator/", s.wrap(s.OperatorRequest))
s.mux.HandleFunc("/v1/system/gc", s.wrap(s.GarbageCollectRequest))

View File

@@ -138,6 +138,7 @@ func (c *JobStatusCommand) Run(args []string) int {
// Try querying the job
jobID := args[0]
jobs, _, err := client.Jobs().PrefixList(jobID)
if err != nil {
c.Ui.Error(fmt.Sprintf("Error querying job: %s", err))

138
command/status.go Normal file
View File

@@ -0,0 +1,138 @@
package command
import (
"fmt"
"strings"
"github.com/hashicorp/nomad/api/contexts"
"github.com/mitchellh/cli"
"github.com/posener/complete"
)
type StatusCommand struct {
Meta
}
func (s *StatusCommand) Help() string {
helpText := `
Usage: nomad status [options] <identifier>
Display the status output for any given resource. The command will
detect the type of resource being queried and display the appropriate
status output.
General Options:
` + generalOptionsUsage()
return strings.TrimSpace(helpText)
}
func (c *StatusCommand) Synopsis() string {
return "Display the status output for a resource"
}
func (c *StatusCommand) AutocompleteFlags() complete.Flags {
return mergeAutocompleteFlags(c.Meta.AutocompleteFlags(FlagSetClient), nil)
}
func (c *StatusCommand) AutocompleteArgs() complete.Predictor {
client, _ := c.Meta.Client()
return complete.PredictFunc(func(a complete.Args) []string {
if len(a.Completed) > 1 {
return nil
}
resp, err := client.Search().PrefixSearch(a.Last, contexts.All)
if err != nil {
return []string{}
}
final := make([]string, 0)
for _, matches := range resp.Matches {
if len(matches) == 0 {
continue
}
final = append(final, matches...)
}
return final
})
}
func (c *StatusCommand) Run(args []string) int {
flags := c.Meta.FlagSet("status", FlagSetClient)
flags.Usage = func() { c.Ui.Output(c.Help()) }
if err := flags.Parse(args); err != nil {
c.Ui.Error(fmt.Sprintf("Error parsing arguments: %q", err))
return 1
}
// Store the original arguments so we can pass them to the routed command
argsCopy := args
// Check that we got exactly one evaluation ID
args = flags.Args()
// Get the HTTP client
client, err := c.Meta.Client()
if err != nil {
c.Ui.Error(fmt.Sprintf("Error initializing client: %q", err))
return 1
}
// If no identifier is provided, default to listing jobs
if len(args) == 0 {
cmd := &JobStatusCommand{Meta: c.Meta}
return cmd.Run(argsCopy)
}
id := args[len(args)-1]
// Query for the context associated with the id
res, err := client.Search().PrefixSearch(id, contexts.All)
if err != nil {
c.Ui.Error(fmt.Sprintf("Error querying search with id: %q", err))
return 1
}
if res.Matches == nil {
c.Ui.Error(fmt.Sprintf("No matches returned for query: %q", err))
return 1
}
var match contexts.Context
matchCount := 0
for ctx, vers := range res.Matches {
if len(vers) == 1 {
match = ctx
matchCount++
}
// Only a single result should return, as this is a match against a full id
if matchCount > 1 || len(vers) > 1 {
c.Ui.Error(fmt.Sprintf("Multiple matches found for id %q", id))
return 1
}
}
var cmd cli.Command
switch match {
case contexts.Evals:
cmd = &EvalStatusCommand{Meta: c.Meta}
case contexts.Nodes:
cmd = &NodeStatusCommand{Meta: c.Meta}
case contexts.Allocs:
cmd = &AllocStatusCommand{Meta: c.Meta}
case contexts.Jobs:
cmd = &JobStatusCommand{Meta: c.Meta}
default:
c.Ui.Error(fmt.Sprintf("Unable to resolve ID: %q", id))
return 1
}
return cmd.Run(argsCopy)
}

185
command/status_test.go Normal file
View File

@@ -0,0 +1,185 @@
package command
import (
"fmt"
"testing"
"github.com/hashicorp/nomad/command/agent"
"github.com/hashicorp/nomad/nomad/mock"
"github.com/hashicorp/nomad/nomad/structs"
"github.com/hashicorp/nomad/testutil"
"github.com/mitchellh/cli"
"github.com/posener/complete"
"github.com/stretchr/testify/assert"
)
func TestStatusCommand_Run_JobStatus(t *testing.T) {
assert := assert.New(t)
t.Parallel()
srv, _, url := testServer(t, true, nil)
defer srv.Shutdown()
ui := new(cli.MockUi)
cmd := &StatusCommand{Meta: Meta{Ui: ui, flagAddress: url}}
// Create a fake job
state := srv.Agent.Server().State()
j := mock.Job()
assert.Nil(state.UpsertJob(1000, j))
// Query to check the job status
if code := cmd.Run([]string{"-address=" + url, j.ID}); code != 0 {
t.Fatalf("expected exit 0, got: %d", code)
}
out := ui.OutputWriter.String()
assert.Contains(out, j.ID)
ui.OutputWriter.Reset()
}
func TestStatusCommand_Run_EvalStatus(t *testing.T) {
assert := assert.New(t)
t.Parallel()
srv, _, url := testServer(t, true, nil)
defer srv.Shutdown()
ui := new(cli.MockUi)
cmd := &StatusCommand{Meta: Meta{Ui: ui, flagAddress: url}}
// Create a fake eval
state := srv.Agent.Server().State()
eval := mock.Eval()
assert.Nil(state.UpsertEvals(1000, []*structs.Evaluation{eval}))
// Query to check the eval status
if code := cmd.Run([]string{"-address=" + url, eval.ID}); code != 0 {
t.Fatalf("expected exit 0, got: %d", code)
}
out := ui.OutputWriter.String()
assert.Contains(out, eval.ID[:shortId])
ui.OutputWriter.Reset()
}
func TestStatusCommand_Run_NodeStatus(t *testing.T) {
assert := assert.New(t)
t.Parallel()
// Start in dev mode so we get a node registration
srv, client, url := testServer(t, true, func(c *agent.Config) {
c.NodeName = "mynode"
})
defer srv.Shutdown()
ui := new(cli.MockUi)
cmd := &StatusCommand{Meta: Meta{Ui: ui, flagAddress: url}}
// Wait for a node to appear
var nodeID string
testutil.WaitForResult(func() (bool, error) {
nodes, _, err := client.Nodes().List(nil)
if err != nil {
return false, err
}
if len(nodes) == 0 {
return false, fmt.Errorf("missing node")
}
nodeID = nodes[0].ID
return true, nil
}, func(err error) {
t.Fatalf("err: %s", err)
})
// Query to check the node status
if code := cmd.Run([]string{"-address=" + url, nodeID}); code != 0 {
t.Fatalf("expected exit 0, got: %d", code)
}
out := ui.OutputWriter.String()
assert.Contains(out, "mynode")
ui.OutputWriter.Reset()
}
func TestStatusCommand_Run_AllocStatus(t *testing.T) {
assert := assert.New(t)
t.Parallel()
srv, _, url := testServer(t, true, nil)
defer srv.Shutdown()
ui := new(cli.MockUi)
cmd := &StatusCommand{Meta: Meta{Ui: ui, flagAddress: url}}
// Create a fake alloc
state := srv.Agent.Server().State()
alloc := mock.Alloc()
assert.Nil(state.UpsertAllocs(1000, []*structs.Allocation{alloc}))
if code := cmd.Run([]string{"-address=" + url, alloc.ID}); code != 0 {
t.Fatalf("expected exit 0, got: %d", code)
}
out := ui.OutputWriter.String()
assert.Contains(out, alloc.ID[:shortId])
ui.OutputWriter.Reset()
}
func TestStatusCommand_Run_NoPrefix(t *testing.T) {
assert := assert.New(t)
t.Parallel()
srv, _, url := testServer(t, true, nil)
defer srv.Shutdown()
ui := new(cli.MockUi)
cmd := &StatusCommand{Meta: Meta{Ui: ui, flagAddress: url}}
// Create a fake job
state := srv.Agent.Server().State()
job := mock.Job()
assert.Nil(state.UpsertJob(1000, job))
// Query to check status
if code := cmd.Run([]string{"-address=" + url}); code != 0 {
t.Fatalf("expected exit 0, got: %d", code)
}
out := ui.OutputWriter.String()
assert.Contains(out, job.ID)
ui.OutputWriter.Reset()
}
func TestStatusCommand_AutocompleteArgs(t *testing.T) {
assert := assert.New(t)
t.Parallel()
srv, _, url := testServer(t, true, nil)
defer srv.Shutdown()
ui := new(cli.MockUi)
cmd := &StatusCommand{Meta: Meta{Ui: ui, flagAddress: url}}
// Create a fake job
state := srv.Agent.Server().State()
job := mock.Job()
assert.Nil(state.UpsertJob(1000, job))
prefix := job.ID[:len(job.ID)-5]
args := complete.Args{Last: prefix}
predictor := cmd.AutocompleteArgs()
res := predictor.Predict(args)
assert.Contains(res, job.ID)
args = complete.Args{Last: prefix, Completed: []string{prefix, "1", "2"}}
predictor = cmd.AutocompleteArgs()
res = predictor.Predict(args)
assert.Nil(res)
}

View File

@@ -224,6 +224,11 @@ func Commands(metaPtr *command.Meta) map[string]cli.CommandFactory {
Meta: meta,
}, nil
},
"status": func() (cli.Command, error) {
return &command.StatusCommand{
Meta: meta,
}, nil
},
"stop": func() (cli.Command, error) {
return &command.StopCommand{
Meta: meta,

View File

@@ -120,10 +120,16 @@ func (s *Search) PrefixSearch(args *structs.SearchRequest,
for _, ctx := range contexts {
iter, err := getResourceIter(ctx, roundUUIDDownIfOdd(args.Prefix, args.Context), ws, state)
if err != nil {
return err
// Searching other contexts with job names raises an error, which in
// this case we want to ignore.
if !strings.Contains(err.Error(), "Invalid UUID: encoding/hex") {
return err
}
} else {
iters[ctx] = iter
}
iters[ctx] = iter
}
// Return matches for the given prefix

View File

@@ -25,7 +25,7 @@ func registerAndVerifyJob(s *Server, t *testing.T, prefix string, counter int) s
return job.ID
}
func TestSearch_PrefixSearch(t *testing.T) {
func TestSearch_PrefixSearch_Job(t *testing.T) {
assert := assert.New(t)
prefix := "aaaaaaaa-e8f7-fd38-c855-ab94ceb8970"
@@ -88,6 +88,42 @@ func TestSearch_PrefixSearch_Truncate(t *testing.T) {
assert.Equal(uint64(jobIndex), resp.Index)
}
func TestSearch_PrefixSearch_AllWithJob(t *testing.T) {
assert := assert.New(t)
prefix := "aaaaaaaa-e8f7-fd38-c855-ab94ceb8970"
t.Parallel()
s := testServer(t, func(c *Config) {
c.NumSchedulers = 0
})
defer s.Shutdown()
codec := rpcClient(t, s)
testutil.WaitForLeader(t, s.RPC)
jobID := registerAndVerifyJob(s, t, prefix, 0)
eval1 := mock.Eval()
eval1.ID = jobID
s.fsm.State().UpsertEvals(2000, []*structs.Evaluation{eval1})
req := &structs.SearchRequest{
Prefix: prefix,
Context: structs.All,
}
var resp structs.SearchResponse
if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil {
t.Fatalf("err: %v", err)
}
assert.Equal(1, len(resp.Matches[structs.Jobs]))
assert.Equal(jobID, resp.Matches[structs.Jobs][0])
assert.Equal(1, len(resp.Matches[structs.Evals]))
assert.Equal(eval1.ID, resp.Matches[structs.Evals][0])
}
func TestSearch_PrefixSearch_Evals(t *testing.T) {
assert := assert.New(t)
t.Parallel()

20
vendor/github.com/mitchellh/cli/Makefile generated vendored Normal file
View File

@@ -0,0 +1,20 @@
TEST?=./...
default: test
# test runs the test suite and vets the code
test:
go list $(TEST) | xargs -n1 go test -timeout=60s -parallel=10 $(TESTARGS)
# testrace runs the race checker
testrace:
go list $(TEST) | xargs -n1 go test -race $(TESTARGS)
# updatedeps installs all the dependencies to run and build
updatedeps:
go list ./... \
| xargs go list -f '{{ join .Deps "\n" }}{{ printf "\n" }}{{ join .TestImports "\n" }}' \
| grep -v github.com/mitchellh/cli \
| xargs go get -f -u -v
.PHONY: test testrace updatedeps

View File

@@ -85,13 +85,17 @@ type CLI struct {
// for the flag name. These default to `autocomplete-install` and
// `autocomplete-uninstall` respectively.
//
// AutocompleteNoDefaultFlags is a boolean which controls if the default auto-
// complete flags like -help and -version are added to the output.
//
// AutocompleteGlobalFlags are a mapping of global flags for
// autocompletion. The help and version flags are automatically added.
Autocomplete bool
AutocompleteInstall string
AutocompleteUninstall string
AutocompleteGlobalFlags complete.Flags
autocompleteInstaller autocompleteInstaller // For tests
Autocomplete bool
AutocompleteInstall string
AutocompleteUninstall string
AutocompleteNoDefaultFlags bool
AutocompleteGlobalFlags complete.Flags
autocompleteInstaller autocompleteInstaller // For tests
// HelpFunc and HelpWriter are used to output help information, if
// requested.
@@ -375,11 +379,13 @@ func (c *CLI) initAutocomplete() {
// For the root, we add the global flags to the "Flags". This way
// they don't show up on every command.
cmd.Flags = map[string]complete.Predictor{
"-" + c.AutocompleteInstall: complete.PredictNothing,
"-" + c.AutocompleteUninstall: complete.PredictNothing,
"-help": complete.PredictNothing,
"-version": complete.PredictNothing,
if !c.AutocompleteNoDefaultFlags {
cmd.Flags = map[string]complete.Predictor{
"-" + c.AutocompleteInstall: complete.PredictNothing,
"-" + c.AutocompleteUninstall: complete.PredictNothing,
"-help": complete.PredictNothing,
"-version": complete.PredictNothing,
}
}
cmd.GlobalFlags = c.AutocompleteGlobalFlags
@@ -392,27 +398,22 @@ func (c *CLI) initAutocomplete() {
func (c *CLI) initAutocompleteSub(prefix string) complete.Command {
var cmd complete.Command
walkFn := func(k string, raw interface{}) bool {
// Keep track of the full key so that we can nest further if necessary
fullKey := k
if len(prefix) > 0 {
// If we have a prefix, trim the prefix + 1 (for the space)
// Example: turns "sub one" to "one" with prefix "sub"
k = k[len(prefix)+1:]
}
// Keep track of the full key so that we can nest further if necessary
fullKey := k
if idx := strings.LastIndex(k, " "); idx >= 0 {
// If there is a space, we trim up to the space
if idx := strings.Index(k, " "); idx >= 0 {
// If there is a space, we trim up to the space. This turns
// "sub sub2 sub3" into "sub". The prefix trim above will
// trim our current depth properly.
k = k[:idx]
}
if idx := strings.LastIndex(k, " "); idx >= 0 {
// This catches the scenario just in case where we see "sub one"
// before "sub". This will let us properly setup the subcommand
// regardless.
k = k[idx+1:]
}
if _, ok := cmd.Sub[k]; ok {
// If we already tracked this subcommand then ignore
return false

View File

@@ -100,8 +100,12 @@ func (b *syncBuffer) Reset() {
}
func (b *syncBuffer) String() string {
return string(b.Bytes())
}
func (b *syncBuffer) Bytes() []byte {
b.RLock()
data := b.b.Bytes()
b.RUnlock()
return string(data)
return data
}

6
vendor/vendor.json vendored
View File

@@ -996,10 +996,10 @@
"revision": "7e024ce8ce18b21b475ac6baf8fa3c42536bf2fa"
},
{
"checksumSHA1": "cwT95naFga0RFGUZsCT1NeX5ncI=",
"checksumSHA1": "gPuHq0UytpuYPb2YWmFVb22Twcc=",
"path": "github.com/mitchellh/cli",
"revision": "921cc83dadc195c0cd67f9df3a6ec822400a1df5",
"revisionTime": "2017-07-25T23:05:51Z"
"revision": "0ce7cd515f64496ee660ab19f6bbf373945d3af0",
"revisionTime": "2017-08-24T19:02:09Z"
},
{
"checksumSHA1": "ttEN1Aupb7xpPMkQLqb3tzLFdXs=",