[RAC][Uptime] remove extra dot from the uptime alert connector message (#124000)

This commit is contained in:
mgiota 2022-01-28 11:50:24 +01:00 committed by GitHub
parent 5b8af6c1ea
commit e45d594818
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 15 additions and 15 deletions

View file

@ -21,7 +21,7 @@ export const VALUE_MUST_BE_AN_INTEGER = i18n.translate('xpack.uptime.settings.in
export const MonitorStatusTranslations = {
defaultActionMessage: i18n.translate('xpack.uptime.alerts.monitorStatus.defaultActionMessage', {
defaultMessage:
'Monitor {monitorName} with url {monitorUrl} from {observerLocation} {statusMessage}. The latest error message is {latestErrorMessage}',
'Monitor {monitorName} with url {monitorUrl} from {observerLocation} {statusMessage} The latest error message is {latestErrorMessage}',
values: {
monitorName: '{{state.monitorName}}',
monitorUrl: '{{{state.monitorUrl}}}',

View file

@ -202,7 +202,7 @@ describe('monitor status alert type', () => {
})
).toMatchInlineSnapshot(`
Object {
"defaultActionMessage": "Monitor {{state.monitorName}} with url {{{state.monitorUrl}}} from {{state.observerLocation}} {{{state.statusMessage}}}. The latest error message is {{{state.latestErrorMessage}}}",
"defaultActionMessage": "Monitor {{state.monitorName}} with url {{{state.monitorUrl}}} from {{state.observerLocation}} {{{state.statusMessage}}} The latest error message is {{{state.latestErrorMessage}}}",
"description": "Alert when a monitor is down or an availability threshold is breached.",
"documentationUrl": [Function],
"format": [Function],

View file

@ -50,7 +50,7 @@ describe('Alert Actions factory', () => {
eventAction: 'trigger',
severity: 'error',
summary:
'Monitor {{state.monitorName}} with url {{{state.monitorUrl}}} from {{state.observerLocation}} {{{state.statusMessage}}}. The latest error message is {{{state.latestErrorMessage}}}',
'Monitor {{state.monitorName}} with url {{{state.monitorUrl}}} from {{state.observerLocation}} {{{state.statusMessage}}} The latest error message is {{{state.latestErrorMessage}}}',
},
id: 'f2a3b195-ed76-499a-805d-82d24d4eeba9',
},
@ -75,7 +75,7 @@ describe('Alert Actions factory', () => {
eventAction: 'trigger',
severity: 'error',
summary:
'Monitor {{state.monitorName}} with url {{{state.monitorUrl}}} from {{state.observerLocation}} {{{state.statusMessage}}}. The latest error message is {{{state.latestErrorMessage}}}',
'Monitor {{state.monitorName}} with url {{{state.monitorUrl}}} from {{state.observerLocation}} {{{state.statusMessage}}} The latest error message is {{{state.latestErrorMessage}}}',
},
},
]);
@ -93,7 +93,7 @@ describe('Alert Actions factory', () => {
eventAction: 'trigger',
severity: 'error',
summary:
'Monitor {{state.monitorName}} with url {{{state.monitorUrl}}} from {{state.observerLocation}} {{{state.statusMessage}}}. The latest error message is {{{state.latestErrorMessage}}}',
'Monitor {{state.monitorName}} with url {{{state.monitorUrl}}} from {{state.observerLocation}} {{{state.statusMessage}}} The latest error message is {{{state.latestErrorMessage}}}',
},
id: 'f2a3b195-ed76-499a-805d-82d24d4eeba9',
},
@ -118,7 +118,7 @@ describe('Alert Actions factory', () => {
eventAction: 'trigger',
severity: 'error',
summary:
'Monitor {{state.monitorName}} with url {{{state.monitorUrl}}} from {{state.observerLocation}} {{{state.statusMessage}}}. The latest error message is {{{state.latestErrorMessage}}}',
'Monitor {{state.monitorName}} with url {{{state.monitorUrl}}} from {{state.observerLocation}} {{{state.statusMessage}}} The latest error message is {{{state.latestErrorMessage}}}',
},
},
]);

View file

@ -177,26 +177,26 @@ export const getStatusMessage = (
) => {
let statusMessage = '';
if (downMonParams?.info) {
statusMessage = `${statusCheckTranslations.downMonitorsLabel(
statusMessage = statusCheckTranslations.downMonitorsLabel(
downMonParams.count!,
downMonParams.interval!,
downMonParams.numTimes
)}.`;
);
}
let availabilityMessage = '';
if (availMonInfo) {
availabilityMessage = `${statusCheckTranslations.availabilityBreachLabel(
availabilityMessage = statusCheckTranslations.availabilityBreachLabel(
(availMonInfo.availabilityRatio! * 100).toFixed(2),
availability?.threshold!,
getInterval(availability?.range!, availability?.rangeUnit!)
)}.`;
);
}
if (availMonInfo && downMonParams?.info) {
return `${statusCheckTranslations.downMonitorsAndAvailabilityBreachLabel(
return statusCheckTranslations.downMonitorsAndAvailabilityBreachLabel(
statusMessage,
availabilityMessage
)}`;
);
}
return statusMessage + availabilityMessage;
};

View file

@ -331,7 +331,7 @@ export const durationAnomalyTranslations = {
export const statusCheckTranslations = {
downMonitorsLabel: (count: number, interval: string, numTimes: number) =>
i18n.translate('xpack.uptime.alerts.monitorStatus.actionVariables.down', {
defaultMessage: `failed {count} times in the last {interval}. Alert when > {numTimes}`,
defaultMessage: `failed {count} times in the last {interval}. Alert when > {numTimes}.`,
values: {
count,
interval,
@ -345,7 +345,7 @@ export const statusCheckTranslations = {
) =>
i18n.translate('xpack.uptime.alerts.monitorStatus.actionVariables.availabilityMessage', {
defaultMessage:
'{interval} availability is {availabilityRatio}%. Alert when < {expectedAvailability}%',
'{interval} availability is {availabilityRatio}%. Alert when < {expectedAvailability}%.',
values: {
availabilityRatio,
expectedAvailability,

View file

@ -107,7 +107,7 @@ export default ({ getPageObjects, getService }: FtrProviderContext) => {
group: 'xpack.uptime.alerts.actionGroups.monitorStatus',
params: {
message:
'Monitor {{state.monitorName}} with url {{{state.monitorUrl}}} from {{state.observerLocation}} {{{state.statusMessage}}}. The latest error message is {{{state.latestErrorMessage}}}',
'Monitor {{state.monitorName}} with url {{{state.monitorUrl}}} from {{state.observerLocation}} {{{state.statusMessage}}} The latest error message is {{{state.latestErrorMessage}}}',
},
id: 'my-slack1',
},