mirror of
https://github.com/kemko/nomad.git
synced 2026-01-01 16:05:42 +03:00
Add common serialiser abstractions (#8634)
This extracts some common API-idiosyncracy-handling patterns from model serialisers into properties that are processed by the application serialiser: * arrayNullOverrides converts a null property value to an empty array * mapToArray converts a map to an array of maps, using the original map keys as Name properties on the array maps * separateNanos splits nanosecond-containing timestamps into millisecond timestamps and separate nanosecond properties
This commit is contained in:
@@ -18,6 +18,8 @@ export default class AllocationSerializer extends ApplicationSerializer {
|
||||
states: 'TaskStates',
|
||||
};
|
||||
|
||||
separateNanos = ['CreateTime', 'ModifyTime'];
|
||||
|
||||
normalize(typeHash, hash) {
|
||||
// Transform the map-based TaskStates object into an array-based
|
||||
// TaskState fragment list
|
||||
@@ -40,12 +42,6 @@ export default class AllocationSerializer extends ApplicationSerializer {
|
||||
'default';
|
||||
hash.JobID = JSON.stringify([hash.JobID, hash.Namespace]);
|
||||
|
||||
hash.ModifyTimeNanos = hash.ModifyTime % 1000000;
|
||||
hash.ModifyTime = Math.floor(hash.ModifyTime / 1000000);
|
||||
|
||||
hash.CreateTimeNanos = hash.CreateTime % 1000000;
|
||||
hash.CreateTime = Math.floor(hash.CreateTime / 1000000);
|
||||
|
||||
hash.RescheduleEvents = (hash.RescheduleTracker || {}).Events;
|
||||
|
||||
hash.IsMigrating = (hash.DesiredTransition || {}).Migrate;
|
||||
|
||||
@@ -4,10 +4,48 @@ import { makeArray } from '@ember/array';
|
||||
import JSONSerializer from 'ember-data/serializers/json';
|
||||
import { pluralize, singularize } from 'ember-inflector';
|
||||
import removeRecord from '../utils/remove-record';
|
||||
import { assign } from '@ember/polyfills';
|
||||
|
||||
export default class Application extends JSONSerializer {
|
||||
primaryKey = 'ID';
|
||||
|
||||
/**
|
||||
A list of keys that are converted to empty arrays if their value is null.
|
||||
|
||||
arrayNullOverrides = ['Array'];
|
||||
{ Array: null } => { Array: [] }
|
||||
|
||||
@property arrayNullOverrides
|
||||
@type String[]
|
||||
*/
|
||||
arrayNullOverrides = null;
|
||||
|
||||
/**
|
||||
A list of keys or objects to convert a map into an array of maps with the original map keys as Name properties.
|
||||
|
||||
mapToArray = ['Map'];
|
||||
{ Map: { a: { x: 1 } } } => { Map: [ { Name: 'a', x: 1 }] }
|
||||
|
||||
mapToArray = [{ beforeName: 'M', afterName: 'Map' }];
|
||||
{ M: { a: { x: 1 } } } => { Map: [ { Name: 'a', x: 1 }] }
|
||||
|
||||
@property mapToArray
|
||||
@type (String|Object)[]
|
||||
*/
|
||||
mapToArray = null;
|
||||
|
||||
/**
|
||||
A list of keys for nanosecond timestamps that will be split into two properties: `separateNanos = ['Time']` will
|
||||
produce a `Time` property with a millisecond timestamp and `TimeNanos` with the nanoseconds alone.
|
||||
|
||||
separateNanos = ['Time'];
|
||||
{ Time: 1607839992000100000 } => { Time: 1607839992000, TimeNanos: 100096 }
|
||||
|
||||
@property separateNanos
|
||||
@type String[]
|
||||
*/
|
||||
separateNanos = null;
|
||||
|
||||
keyForAttribute(attr) {
|
||||
return attr.camelize().capitalize();
|
||||
}
|
||||
@@ -43,6 +81,53 @@ export default class Application extends JSONSerializer {
|
||||
store.push(documentHash);
|
||||
}
|
||||
|
||||
normalize(modelClass, hash) {
|
||||
if (hash) {
|
||||
if (this.arrayNullOverrides) {
|
||||
this.arrayNullOverrides.forEach(key => {
|
||||
if (!hash[key]) {
|
||||
hash[key] = [];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (this.mapToArray) {
|
||||
this.mapToArray.forEach(conversion => {
|
||||
let apiKey, uiKey;
|
||||
|
||||
if (conversion.beforeName) {
|
||||
apiKey = conversion.beforeName;
|
||||
uiKey = conversion.afterName;
|
||||
} else {
|
||||
apiKey = conversion;
|
||||
uiKey = conversion;
|
||||
}
|
||||
|
||||
const map = hash[apiKey] || {};
|
||||
|
||||
hash[uiKey] = Object.keys(map).map(mapKey => {
|
||||
const propertiesForKey = map[mapKey] || {};
|
||||
const convertedMap = { Name: mapKey };
|
||||
|
||||
assign(convertedMap, propertiesForKey);
|
||||
|
||||
return convertedMap;
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
if (this.separateNanos) {
|
||||
this.separateNanos.forEach(key => {
|
||||
const timeWithNanos = hash[key];
|
||||
hash[`${key}Nanos`] = timeWithNanos % 1000000;
|
||||
hash[key] = Math.floor(timeWithNanos / 1000000);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return super.normalize(modelClass, hash);
|
||||
}
|
||||
|
||||
normalizeFindAllResponse(store, modelClass) {
|
||||
const result = super.normalizeFindAllResponse(...arguments);
|
||||
this.cullStore(store, modelClass.modelName, result.data);
|
||||
|
||||
@@ -9,14 +9,10 @@ export default class DeploymentSerializer extends ApplicationSerializer {
|
||||
versionNumber: 'JobVersion',
|
||||
};
|
||||
|
||||
mapToArray = [{ beforeName: 'TaskGroups', afterName: 'TaskGroupSummaries' }];
|
||||
|
||||
normalize(typeHash, hash) {
|
||||
if (hash) {
|
||||
const taskGroups = hash.TaskGroups || {};
|
||||
hash.TaskGroupSummaries = Object.keys(taskGroups).map(key => {
|
||||
const deploymentStats = taskGroups[key];
|
||||
return assign({ Name: key }, deploymentStats);
|
||||
});
|
||||
|
||||
hash.PlainJobId = hash.JobID;
|
||||
hash.Namespace =
|
||||
hash.Namespace ||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { inject as service } from '@ember/service';
|
||||
import { get } from '@ember/object';
|
||||
import { assign } from '@ember/polyfills';
|
||||
import ApplicationSerializer from './application';
|
||||
import classic from 'ember-classic-decorator';
|
||||
|
||||
@@ -8,13 +7,10 @@ import classic from 'ember-classic-decorator';
|
||||
export default class Evaluation extends ApplicationSerializer {
|
||||
@service system;
|
||||
|
||||
normalize(typeHash, hash) {
|
||||
const failures = hash.FailedTGAllocs || {};
|
||||
hash.FailedTGAllocs = Object.keys(failures).map(key => {
|
||||
const propertiesForKey = failures[key] || {};
|
||||
return assign({ Name: key }, propertiesForKey);
|
||||
});
|
||||
mapToArray = ['FailedTGAllocs'];
|
||||
separateNanos = ['CreateTime', 'ModifyTime'];
|
||||
|
||||
normalize(typeHash, hash) {
|
||||
hash.PlainJobId = hash.JobID;
|
||||
hash.Namespace =
|
||||
hash.Namespace ||
|
||||
@@ -23,12 +19,6 @@ export default class Evaluation extends ApplicationSerializer {
|
||||
'default';
|
||||
hash.JobID = JSON.stringify([hash.JobID, hash.Namespace]);
|
||||
|
||||
hash.ModifyTimeNanos = hash.ModifyTime % 1000000;
|
||||
hash.ModifyTime = Math.floor(hash.ModifyTime / 1000000);
|
||||
|
||||
hash.CreateTimeNanos = hash.CreateTime % 1000000;
|
||||
hash.CreateTime = Math.floor(hash.CreateTime / 1000000);
|
||||
|
||||
return super.normalize(typeHash, hash);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
import { assign } from '@ember/polyfills';
|
||||
import ApplicationSerializer from './application';
|
||||
import { get } from '@ember/object';
|
||||
|
||||
export default class JobPlan extends ApplicationSerializer {
|
||||
mapToArray = ['FailedTGAllocs'];
|
||||
|
||||
normalize(typeHash, hash) {
|
||||
const failures = hash.FailedTGAllocs || {};
|
||||
hash.FailedTGAllocs = Object.keys(failures).map(key => {
|
||||
return assign({ Name: key }, failures[key] || {});
|
||||
});
|
||||
hash.PreemptionIDs = (get(hash, 'Annotations.PreemptedAllocs') || []).mapBy('ID');
|
||||
return super.normalize(...arguments);
|
||||
}
|
||||
|
||||
@@ -1,19 +1,13 @@
|
||||
import { assign } from '@ember/polyfills';
|
||||
import ApplicationSerializer from './application';
|
||||
|
||||
export default class JobScale extends ApplicationSerializer {
|
||||
mapToArray = [{ beforeName: 'TaskGroups', afterName: 'TaskGroupScales' }];
|
||||
|
||||
normalize(modelClass, hash) {
|
||||
// Transform the map-based TaskGroups object into an array-based
|
||||
// TaskGroupScale fragment list
|
||||
hash.PlainJobId = hash.JobID;
|
||||
hash.ID = JSON.stringify([hash.JobID, hash.Namespace || 'default']);
|
||||
hash.JobID = hash.ID;
|
||||
|
||||
const taskGroups = hash.TaskGroups || {};
|
||||
hash.TaskGroupScales = Object.keys(taskGroups).map(key => {
|
||||
return assign(taskGroups[key], { Name: key });
|
||||
});
|
||||
|
||||
return super.normalize(modelClass, hash);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,12 +3,13 @@ import ApplicationSerializer from './application';
|
||||
|
||||
export default class JobSummary extends ApplicationSerializer {
|
||||
normalize(modelClass, hash) {
|
||||
// Transform the map-based Summary object into an array-based
|
||||
// TaskGroupSummary fragment list
|
||||
hash.PlainJobId = hash.JobID;
|
||||
hash.ID = JSON.stringify([hash.JobID, hash.Namespace || 'default']);
|
||||
hash.JobID = hash.ID;
|
||||
|
||||
// Transform the map-based Summary object into an array-based
|
||||
// TaskGroupSummary fragment list
|
||||
|
||||
const fullSummary = hash.Summary || {};
|
||||
hash.TaskGroupSummaries = Object.keys(fullSummary).map(key => {
|
||||
const allocStats = fullSummary[key] || {};
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { assign } from '@ember/polyfills';
|
||||
import { inject as service } from '@ember/service';
|
||||
import ApplicationSerializer from './application';
|
||||
|
||||
@@ -10,18 +9,7 @@ export default class NodeSerializer extends ApplicationSerializer {
|
||||
httpAddr: 'HTTPAddr',
|
||||
};
|
||||
|
||||
normalize(modelClass, hash) {
|
||||
// Transform map-based objects into array-based fragment lists
|
||||
const drivers = hash.Drivers || {};
|
||||
hash.Drivers = Object.keys(drivers).map(key => {
|
||||
return assign({}, drivers[key], { Name: key });
|
||||
});
|
||||
|
||||
const hostVolumes = hash.HostVolumes || {};
|
||||
hash.HostVolumes = Object.keys(hostVolumes).map(key => hostVolumes[key]);
|
||||
|
||||
return super.normalize(modelClass, hash);
|
||||
}
|
||||
mapToArray = ['Drivers', 'HostVolumes'];
|
||||
|
||||
extractRelationships(modelClass, hash) {
|
||||
const { modelName } = modelClass;
|
||||
|
||||
@@ -1,14 +1,9 @@
|
||||
import ApplicationSerializer from './application';
|
||||
|
||||
export default class RescheduleEvent extends ApplicationSerializer {
|
||||
normalize(typeHash, hash) {
|
||||
// Time is in the form of nanoseconds since epoch, but JS dates
|
||||
// only understand time to the millisecond precision. So store
|
||||
// the time (precise to ms) as a date, and store the remaining ns
|
||||
// as a number to deal with when it comes up.
|
||||
hash.TimeNanos = hash.RescheduleTime % 1000000;
|
||||
hash.Time = Math.floor(hash.RescheduleTime / 1000000);
|
||||
separateNanos = ['Time'];
|
||||
|
||||
normalize(typeHash, hash) {
|
||||
hash.PreviousAllocationId = hash.PrevAllocID ? hash.PrevAllocID : null;
|
||||
hash.PreviousNodeId = hash.PrevNodeID ? hash.PrevNodeID : null;
|
||||
|
||||
|
||||
@@ -8,8 +8,5 @@ export default class ResourcesSerializer extends ApplicationSerializer {
|
||||
iops: 'IOPS',
|
||||
};
|
||||
|
||||
normalize(typeHash, hash) {
|
||||
hash.Ports = hash.Ports || [];
|
||||
return super.normalize(typeHash, hash);
|
||||
}
|
||||
arrayNullOverrides = ['Ports'];
|
||||
}
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
import ApplicationSerializer from './application';
|
||||
|
||||
export default class ScaleEventSerializer extends ApplicationSerializer {
|
||||
normalize(typeHash, hash) {
|
||||
hash.TimeNanos = hash.Time % 1000000;
|
||||
hash.Time = Math.floor(hash.Time / 1000000);
|
||||
|
||||
return super.normalize(typeHash, hash);
|
||||
}
|
||||
separateNanos = ['Time'];
|
||||
}
|
||||
|
||||
@@ -5,11 +5,5 @@ export default class ServiceSerializer extends ApplicationSerializer {
|
||||
connect: 'Connect',
|
||||
};
|
||||
|
||||
normalize(typeHash, hash) {
|
||||
if (!hash.Tags) {
|
||||
hash.Tags = [];
|
||||
}
|
||||
|
||||
return super.normalize(typeHash, hash);
|
||||
}
|
||||
arrayNullOverrides = ['Tags'];
|
||||
}
|
||||
|
||||
@@ -5,14 +5,5 @@ export default class TaskEventSerializer extends ApplicationSerializer {
|
||||
message: 'DisplayMessage',
|
||||
};
|
||||
|
||||
normalize(typeHash, hash) {
|
||||
// Time is in the form of nanoseconds since epoch, but JS dates
|
||||
// only understand time to the millisecond precision. So store
|
||||
// the time (precise to ms) as a date, and store the remaining ns
|
||||
// as a number to deal with when it comes up.
|
||||
hash.TimeNanos = hash.Time % 1000000;
|
||||
hash.Time = Math.floor(hash.Time / 1000000);
|
||||
|
||||
return super.normalize(typeHash, hash);
|
||||
}
|
||||
separateNanos = ['Time'];
|
||||
}
|
||||
|
||||
@@ -1,11 +1,5 @@
|
||||
import ApplicationSerializer from './application';
|
||||
|
||||
export default class TaskGroupScaleSerializer extends ApplicationSerializer {
|
||||
normalize(typeHash, hash) {
|
||||
if (!hash.Events) {
|
||||
hash.Events = [];
|
||||
}
|
||||
|
||||
return super.normalize(typeHash, hash);
|
||||
}
|
||||
arrayNullOverrides = ['Events'];
|
||||
}
|
||||
|
||||
@@ -2,6 +2,9 @@ import { copy } from 'ember-copy';
|
||||
import ApplicationSerializer from './application';
|
||||
|
||||
export default class TaskGroup extends ApplicationSerializer {
|
||||
arrayNullOverrides = ['Services'];
|
||||
mapToArray = ['Volumes'];
|
||||
|
||||
normalize(typeHash, hash) {
|
||||
// Provide EphemeralDisk to each task
|
||||
hash.Tasks.forEach(task => {
|
||||
@@ -9,10 +12,6 @@ export default class TaskGroup extends ApplicationSerializer {
|
||||
});
|
||||
|
||||
hash.ReservedEphemeralDisk = hash.EphemeralDisk.SizeMB;
|
||||
hash.Services = hash.Services || [];
|
||||
|
||||
const volumes = hash.Volumes || {};
|
||||
hash.Volumes = Object.keys(volumes).map(key => volumes[key]);
|
||||
|
||||
return super.normalize(typeHash, hash);
|
||||
}
|
||||
|
||||
107
ui/tests/unit/serializers/application-test.js
Normal file
107
ui/tests/unit/serializers/application-test.js
Normal file
@@ -0,0 +1,107 @@
|
||||
import { module, test } from 'qunit';
|
||||
import { setupTest } from 'ember-qunit';
|
||||
import ApplicationSerializer from 'nomad-ui/serializers/application';
|
||||
|
||||
import Model from 'ember-data/model';
|
||||
import attr from 'ember-data/attr';
|
||||
|
||||
class TestSerializer extends ApplicationSerializer {
|
||||
arrayNullOverrides = ['Things'];
|
||||
|
||||
mapToArray = [
|
||||
'ArrayableMap',
|
||||
{ beforeName: 'OriginalNameArrayableMap', afterName: 'RenamedArrayableMap' },
|
||||
];
|
||||
|
||||
separateNanos = ['Time'];
|
||||
}
|
||||
|
||||
class TestModel extends Model {
|
||||
@attr() things;
|
||||
|
||||
@attr() arrayableMap;
|
||||
@attr() renamedArrayableMap;
|
||||
|
||||
@attr() time;
|
||||
@attr() timeNanos;
|
||||
}
|
||||
|
||||
module('Unit | Serializer | Application', function(hooks) {
|
||||
setupTest(hooks);
|
||||
|
||||
hooks.beforeEach(function() {
|
||||
this.store = this.owner.lookup('service:store');
|
||||
this.owner.register('model:test', TestModel);
|
||||
this.owner.register('serializer:test', TestSerializer);
|
||||
|
||||
this.subject = () => this.store.serializerFor('test');
|
||||
});
|
||||
|
||||
const normalizationTestCases = [
|
||||
{
|
||||
name: 'Null array and maps',
|
||||
in: {
|
||||
ID: 'test-test',
|
||||
Things: null,
|
||||
ArrayableMap: null,
|
||||
OriginalNameArrayableMap: null,
|
||||
Time: 1607839992000100000,
|
||||
},
|
||||
out: {
|
||||
data: {
|
||||
id: 'test-test',
|
||||
attributes: {
|
||||
things: [],
|
||||
arrayableMap: [],
|
||||
renamedArrayableMap: [],
|
||||
time: 1607839992000,
|
||||
timeNanos: 100096,
|
||||
},
|
||||
relationships: {},
|
||||
type: 'test',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'Non-null array and maps',
|
||||
in: {
|
||||
ID: 'test-test',
|
||||
Things: [1, 2, 3],
|
||||
ArrayableMap: {
|
||||
a: { Order: 1 },
|
||||
b: { Order: 2 },
|
||||
'c.d': { Order: 3 },
|
||||
},
|
||||
OriginalNameArrayableMap: {
|
||||
a: { X: 1 },
|
||||
},
|
||||
Time: 1607839992000100000,
|
||||
SomethingExtra: 'xyz',
|
||||
},
|
||||
out: {
|
||||
data: {
|
||||
id: 'test-test',
|
||||
attributes: {
|
||||
things: [1, 2, 3],
|
||||
arrayableMap: [
|
||||
{ Name: 'a', Order: 1 },
|
||||
{ Name: 'b', Order: 2 },
|
||||
{ Name: 'c.d', Order: 3 },
|
||||
],
|
||||
renamedArrayableMap: [{ Name: 'a', X: 1 }],
|
||||
time: 1607839992000,
|
||||
timeNanos: 100096,
|
||||
},
|
||||
relationships: {},
|
||||
type: 'test',
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
normalizationTestCases.forEach(testCase => {
|
||||
test(`normalization: ${testCase.name}`, async function(assert) {
|
||||
assert.deepEqual(this.subject().normalize(TestModel, testCase.in), testCase.out);
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user