diff --git a/CHANGELOG.md b/CHANGELOG.md index 532afab06..898af560b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,7 @@ ## 0.8.2 (Unreleased) IMPROVEMENTS: + * api: Add /v1/jobs/parse api endpoint for rendering HCL jobs files as JSON [[GH-2782](https://github.com/hashicorp/nomad/issues/2782)] * client: Create new process group on process startup. [[GH-3572](https://github.com/hashicorp/nomad/issues/3572)] BUG FIXES: diff --git a/api/jobs.go b/api/jobs.go index 5fcecf403..7c2b7c723 100644 --- a/api/jobs.go +++ b/api/jobs.go @@ -36,11 +36,33 @@ type Jobs struct { client *Client } +// JobsParseRequest is used for arguments of the /vi/jobs/parse endpoint +type JobsParseRequest struct { + // JobHCL is an hcl jobspec + JobHCL string + + // Canonicalize is a flag as to if the server should return default values + // for unset fields + Canonicalize bool +} + // Jobs returns a handle on the jobs endpoints. func (c *Client) Jobs() *Jobs { return &Jobs{client: c} } +// Parse is used to convert the HCL repesentation of a Job to JSON server side. +// To parse the HCL client side see package github.com/hashicorp/nomad/jobspec +func (j *Jobs) ParseHCL(jobHCL string, canonicalize bool) (*Job, error) { + var job Job + req := &JobsParseRequest{ + JobHCL: jobHCL, + Canonicalize: canonicalize, + } + _, err := j.client.write("/v1/jobs/parse", req, &job, nil) + return &job, err +} + func (j *Jobs) Validate(job *Job, q *WriteOptions) (*JobValidateResponse, *WriteMeta, error) { var resp JobValidateResponse req := &JobValidateRequest{Job: job} diff --git a/api/jobs_test.go b/api/jobs_test.go index 66f157c54..9119f5118 100644 --- a/api/jobs_test.go +++ b/api/jobs_test.go @@ -47,6 +47,43 @@ func TestJobs_Register(t *testing.T) { } } +func TestJobs_Parse(t *testing.T) { + t.Parallel() + c, s := makeClient(t, nil, nil) + defer s.Stop() + + jobs := c.Jobs() + + checkJob := func(job *Job, expectedRegion string) { + if job == nil { + t.Fatal("job should not be nil") + } + + region := job.Region + + if region == nil { + if expectedRegion != "" { + t.Fatalf("expected job region to be '%s' but was unset", expectedRegion) + } + } else { + if expectedRegion != *region { + t.Fatalf("expected job region '%s', but got '%s'", expectedRegion, *region) + } + } + } + job, err := jobs.ParseHCL(mock.HCL(), true) + if err != nil { + t.Fatalf("err: %s", err) + } + checkJob(job, "global") + + job, err = jobs.ParseHCL(mock.HCL(), false) + if err != nil { + t.Fatalf("err: %s", err) + } + checkJob(job, "") +} + func TestJobs_Validate(t *testing.T) { t.Parallel() c, s := makeClient(t, nil, nil) diff --git a/command/agent/http.go b/command/agent/http.go index 0321a7b22..6e9b6a802 100644 --- a/command/agent/http.go +++ b/command/agent/http.go @@ -143,6 +143,7 @@ func (s *HTTPServer) Shutdown() { // registerHandlers is used to attach our handlers to the mux func (s *HTTPServer) registerHandlers(enableDebug bool) { s.mux.HandleFunc("/v1/jobs", s.wrap(s.JobsRequest)) + s.mux.HandleFunc("/v1/jobs/parse", s.wrap(s.JobsParseRequest)) s.mux.HandleFunc("/v1/job/", s.wrap(s.JobSpecificRequest)) s.mux.HandleFunc("/v1/nodes", s.wrap(s.NodesRequest)) diff --git a/command/agent/job_endpoint.go b/command/agent/job_endpoint.go index ce1605728..2a0a92b17 100644 --- a/command/agent/job_endpoint.go +++ b/command/agent/job_endpoint.go @@ -8,6 +8,7 @@ import ( "github.com/golang/snappy" "github.com/hashicorp/nomad/api" + "github.com/hashicorp/nomad/jobspec" "github.com/hashicorp/nomad/nomad/structs" ) @@ -544,6 +545,32 @@ func (s *HTTPServer) jobDispatchRequest(resp http.ResponseWriter, req *http.Requ return out, nil } +// JobsParseRequest parses a hcl jobspec and returns a api.Job +func (s *HTTPServer) JobsParseRequest(resp http.ResponseWriter, req *http.Request) (interface{}, error) { + if req.Method != http.MethodPut && req.Method != http.MethodPost { + return nil, CodedError(405, ErrInvalidMethod) + } + + args := &api.JobsParseRequest{} + if err := decodeBody(req, &args); err != nil { + return nil, CodedError(400, err.Error()) + } + if args.JobHCL == "" { + return nil, CodedError(400, "Job spec is empty") + } + + jobfile := strings.NewReader(args.JobHCL) + jobStruct, err := jobspec.Parse(jobfile) + if err != nil { + return nil, CodedError(400, err.Error()) + } + + if args.Canonicalize { + jobStruct.Canonicalize() + } + return jobStruct, nil +} + func ApiJobToStructJob(job *api.Job) *structs.Job { job.Canonicalize() diff --git a/command/agent/job_endpoint_test.go b/command/agent/job_endpoint_test.go index 3a950f91c..1c80d6fac 100644 --- a/command/agent/job_endpoint_test.go +++ b/command/agent/job_endpoint_test.go @@ -272,6 +272,38 @@ func TestHTTP_JobsRegister_Defaulting(t *testing.T) { }) } +func TestHTTP_JobsParse(t *testing.T) { + t.Parallel() + httpTest(t, nil, func(s *TestAgent) { + buf := encodeReq(api.JobsParseRequest{JobHCL: mock.HCL()}) + req, err := http.NewRequest("POST", "/v1/jobs/parse", buf) + if err != nil { + t.Fatalf("err: %v", err) + } + + respW := httptest.NewRecorder() + + obj, err := s.Server.JobsParseRequest(respW, req) + if err != nil { + t.Fatalf("err: %v", err) + } + if obj == nil { + t.Fatal("response should not be nil") + } + + job := obj.(*api.Job) + expected := mock.Job() + if job.Name == nil || *job.Name != expected.Name { + t.Fatalf("job name is '%s', expected '%s'", *job.Name, expected.Name) + } + + if job.Datacenters == nil || + job.Datacenters[0] != expected.Datacenters[0] { + t.Fatalf("job datacenters is '%s', expected '%s'", + job.Datacenters[0], expected.Datacenters[0]) + } + }) +} func TestHTTP_JobQuery(t *testing.T) { t.Parallel() httpTest(t, nil, func(s *TestAgent) { diff --git a/nomad/mock/mock.go b/nomad/mock/mock.go index 1d39384a8..2d410d1a1 100644 --- a/nomad/mock/mock.go +++ b/nomad/mock/mock.go @@ -63,6 +63,38 @@ func Node() *structs.Node { return node } +func HCL() string { + return `job "my-job" { + datacenters = ["dc1"] + type = "service" + constraint { + attribute = "${attr.kernel.name}" + value = "linux" + } + + group "web" { + count = 10 + restart { + attempts = 3 + interval = "10m" + delay = "1m" + mode = "delay" + } + task "web" { + driver = "exec" + config { + command = "/bin/date" + } + resources { + cpu = 500 + memory = 256 + } + } + } +} +` +} + func Job() *structs.Job { job := &structs.Job{ Region: "global", diff --git a/website/source/api/jobs.html.md b/website/source/api/jobs.html.md index d3316df58..284830937 100644 --- a/website/source/api/jobs.html.md +++ b/website/source/api/jobs.html.md @@ -231,6 +231,83 @@ $ curl \ } ``` +## Parse Job + +This endpoint will parse a HCL jobspec and produce the equivalent JSON encoded +job. + +| Method | Path | Produces | +| ------ | ------------------------- | -------------------------- | +| `POST` | `/v1/jobs/parse` | `application/json` | + +The table below shows this endpoint's support for +[blocking queries](/api/index.html#blocking-queries) and +[required ACLs](/api/index.html#acls). + +| Blocking Queries | ACL Required | +| ---------------- | ------------ | +| `NO` | `none` | + +### Parameters + +- `JobHCL` `(string: )` - Specifies the HCL definition of the job + encoded in a JSON string. +- `Canonicalize` `(bool: false)` - Flag to enable setting any unset fields to + their default values. + +## Sample Payload + +```json +{ + "JobHCL":"job \"example\" { type = \"service\" group \"cache\" {} }", + "Canonicalize": true +} +``` + +### Sample Request + +```text +$ curl \ + --request POST \ + --data '{"Canonicalize": true, "JobHCL": "job \"my-job\" {}"}' \ + https://localhost:4646/v1/jobs/parse +``` + +### Sample Response + +```json +{ + "AllAtOnce": false, + "Constraints": null, + "CreateIndex": 0, + "Datacenters": null, + "ID": "my-job", + "JobModifyIndex": 0, + "Meta": null, + "Migrate": null, + "ModifyIndex": 0, + "Name": "my-job", + "Namespace": "default", + "ParameterizedJob": null, + "ParentID": "", + "Payload": null, + "Periodic": null, + "Priority": 50, + "Region": "global", + "Reschedule": null, + "Stable": false, + "Status": "", + "StatusDescription": "", + "Stop": false, + "SubmitTime": null, + "TaskGroups": null, + "Type": "service", + "Update": null, + "VaultToken": "", + "Version": 0 +} +``` + ## Read Job This endpoint reads information about a single job for its specification and