- Send a ping command from the agent to an IP address. Use this tool to troubleshoot local network connectivity.
+ Send a ping command from the agent to a valid IP address or hostname. Use this tool to troubleshoot local
+ network connectivity.
{!pinging && lastPing && (
<>
diff --git a/packages/app/src/test-utils/render.tsx b/packages/app/src/test-utils/render.tsx
index cd5441f107..699d5ea5c1 100644
--- a/packages/app/src/test-utils/render.tsx
+++ b/packages/app/src/test-utils/render.tsx
@@ -5,15 +5,14 @@
import { MantineProvider } from '@mantine/core';
import { RenderResult, act, fireEvent, screen, render as testingLibraryRender, waitFor } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
+import { ReactNode } from 'react';
export { RenderResult, act, fireEvent, screen, userEvent, waitFor };
const theme = {};
-export function render(ui: React.ReactNode): RenderResult {
+export function render(ui: ReactNode): RenderResult {
return testingLibraryRender(<>{ui}>, {
- wrapper: ({ children }: { children: React.ReactNode }) => (
- {children}
- ),
+ wrapper: ({ children }: { children: ReactNode }) => {children},
});
}
diff --git a/packages/bot-layer/package.json b/packages/bot-layer/package.json
index 67718f0e1a..6e1a78b217 100644
--- a/packages/bot-layer/package.json
+++ b/packages/bot-layer/package.json
@@ -1,6 +1,6 @@
{
"name": "@medplum/bot-layer",
- "version": "3.1.2",
+ "version": "3.1.3",
"description": "Medplum Bot Lambda Layer",
"keywords": [
"medplum",
@@ -22,8 +22,8 @@
"author": "Medplum ",
"type": "module",
"dependencies": {
- "@medplum/core": "3.1.2",
- "@medplum/definitions": "3.1.2",
+ "@medplum/core": "3.1.3",
+ "@medplum/definitions": "3.1.3",
"form-data": "4.0.0",
"jose": "5.2.4",
"node-fetch": "2.7.0",
@@ -35,7 +35,7 @@
"@types/node-fetch": "2.6.11"
},
"peerDependencies": {
- "@medplum/core": "3.1.2",
+ "@medplum/core": "3.1.3",
"form-data": "^4.0.0",
"node-fetch": "^2.7.0",
"pdfmake": "^0.2.7",
diff --git a/packages/cdk/package.json b/packages/cdk/package.json
index 6cccdbad4f..0711f44fee 100644
--- a/packages/cdk/package.json
+++ b/packages/cdk/package.json
@@ -1,6 +1,6 @@
{
"name": "@medplum/cdk",
- "version": "3.1.2",
+ "version": "3.1.3",
"description": "Medplum CDK Infra as Code",
"homepage": "https://www.medplum.com/",
"bugs": {
@@ -24,11 +24,11 @@
},
"dependencies": {
"@aws-sdk/types": "3.535.0",
- "@medplum/core": "3.1.2",
- "aws-cdk-lib": "2.136.0",
- "cdk": "2.136.0",
- "cdk-nag": "2.28.84",
- "cdk-serverless-clamscan": "2.6.145",
+ "@medplum/core": "3.1.3",
+ "aws-cdk-lib": "2.137.0",
+ "cdk": "2.137.0",
+ "cdk-nag": "2.28.89",
+ "cdk-serverless-clamscan": "2.6.150",
"constructs": "10.3.0"
},
"engines": {
diff --git a/packages/cdk/src/frontend.ts b/packages/cdk/src/frontend.ts
index 8a382c9afc..d4294df1ec 100644
--- a/packages/cdk/src/frontend.ts
+++ b/packages/cdk/src/frontend.ts
@@ -77,7 +77,7 @@ export class FrontEnd extends Construct {
`form-action 'self' *.gstatic.com *.google.com`,
`frame-ancestors 'none'`,
`frame-src 'self' ${config.storageDomainName} *.medplum.com *.gstatic.com *.google.com`,
- `img-src 'self' data: ${config.storageDomainName} *.gstatic.com *.google.com *.googleapis.com`,
+ `img-src 'self' data: ${config.storageDomainName} *.gstatic.com *.google.com *.googleapis.com gravatar.com`,
`manifest-src 'self'`,
`media-src 'self' ${config.storageDomainName}`,
`script-src 'self' *.medplum.com *.gstatic.com *.google.com`,
diff --git a/packages/cli/package.json b/packages/cli/package.json
index 3055f6b13d..790c836cb2 100644
--- a/packages/cli/package.json
+++ b/packages/cli/package.json
@@ -1,6 +1,6 @@
{
"name": "@medplum/cli",
- "version": "3.1.2",
+ "version": "3.1.3",
"description": "Medplum Command Line Interface",
"keywords": [
"medplum",
@@ -41,28 +41,27 @@
"test": "jest"
},
"dependencies": {
- "@aws-sdk/client-acm": "3.549.0",
- "@aws-sdk/client-cloudformation": "3.549.0",
- "@aws-sdk/client-cloudfront": "3.549.0",
- "@aws-sdk/client-ecs": "3.549.0",
- "@aws-sdk/client-s3": "3.550.0",
- "@aws-sdk/client-ssm": "3.549.0",
- "@aws-sdk/client-sts": "3.549.0",
+ "@aws-sdk/client-acm": "3.554.0",
+ "@aws-sdk/client-cloudformation": "3.555.0",
+ "@aws-sdk/client-cloudfront": "3.554.0",
+ "@aws-sdk/client-ecs": "3.554.0",
+ "@aws-sdk/client-s3": "3.554.0",
+ "@aws-sdk/client-ssm": "3.554.0",
+ "@aws-sdk/client-sts": "3.554.0",
"@aws-sdk/types": "3.535.0",
- "@medplum/core": "3.1.2",
- "@medplum/hl7": "3.1.2",
+ "@medplum/core": "3.1.3",
+ "@medplum/hl7": "3.1.3",
"aws-sdk-client-mock": "4.0.0",
"commander": "12.0.0",
"dotenv": "16.4.5",
"fast-glob": "3.3.2",
"node-fetch": "2.7.0",
- "tar": "6.2.1"
+ "tar": "7.0.1"
},
"devDependencies": {
- "@medplum/fhirtypes": "3.1.2",
- "@medplum/mock": "3.1.2",
- "@types/node-fetch": "2.6.11",
- "@types/tar": "6.1.12"
+ "@medplum/fhirtypes": "3.1.3",
+ "@medplum/mock": "3.1.3",
+ "@types/node-fetch": "2.6.11"
},
"engines": {
"node": ">=18.0.0"
diff --git a/packages/cli/src/utils.test.ts b/packages/cli/src/utils.test.ts
index d9161d9019..98fffed29a 100644
--- a/packages/cli/src/utils.test.ts
+++ b/packages/cli/src/utils.test.ts
@@ -1,6 +1,7 @@
import { ContentType } from '@medplum/core';
+import { Stats } from 'fs';
import { Writable } from 'stream';
-import tar from 'tar';
+import tar, { Unpack } from 'tar';
import { getCodeContentType, safeTarExtractor } from './utils';
jest.mock('tar', () => ({
@@ -12,10 +13,10 @@ describe('CLI utils', () => {
(tar as jest.Mocked).x.mockImplementationOnce((options) => {
const writable = new Writable({
write(chunk, _, callback) {
- options.filter?.(chunk.toString(), { size: 1 } as tar.FileStat);
+ options.filter?.(chunk.toString(), { size: 1 } as Stats);
callback();
},
- });
+ }) as unknown as Unpack;
return writable;
});
@@ -35,10 +36,10 @@ describe('CLI utils', () => {
(tar as jest.Mocked).x.mockImplementationOnce((options) => {
const writable = new Writable({
write(chunk, _, callback) {
- options.filter?.(chunk.toString(), { size: 1024 * 1024 } as tar.FileStat);
+ options.filter?.(chunk.toString(), { size: 1024 * 1024 } as Stats);
callback();
},
- });
+ }) as unknown as Unpack;
return writable;
});
diff --git a/packages/cli/src/utils.ts b/packages/cli/src/utils.ts
index 73666853e7..5ff7adb660 100644
--- a/packages/cli/src/utils.ts
+++ b/packages/cli/src/utils.ts
@@ -4,7 +4,6 @@ import { createHmac, createPrivateKey, randomBytes } from 'crypto';
import { existsSync, readFileSync, writeFileSync } from 'fs';
import { SignJWT } from 'jose';
import { basename, extname, resolve } from 'path';
-import internal from 'stream';
import tar from 'tar';
import { FileSystemStorage } from './storage';
@@ -211,7 +210,7 @@ function escapeRegex(str: string): string {
* @param destinationDir - The destination directory where all files will be extracted.
* @returns A tar file extractor.
*/
-export function safeTarExtractor(destinationDir: string): internal.Writable {
+export function safeTarExtractor(destinationDir: string): NodeJS.WritableStream {
const MAX_FILES = 100;
const MAX_SIZE = 10 * 1024 * 1024; // 10 MB
@@ -233,7 +232,9 @@ export function safeTarExtractor(destinationDir: string): internal.Writable {
return true;
},
- });
+
+ // Temporary cast for tar issue: https://github.com/isaacs/node-tar/issues/409
+ }) as ReturnType & NodeJS.WritableStream;
}
export function getUnsupportedExtension(): Extension {
diff --git a/packages/core/package.json b/packages/core/package.json
index 43448a872f..604f561773 100644
--- a/packages/core/package.json
+++ b/packages/core/package.json
@@ -1,6 +1,6 @@
{
"name": "@medplum/core",
- "version": "3.1.2",
+ "version": "3.1.3",
"description": "Medplum TS/JS Library",
"keywords": [
"medplum",
@@ -55,8 +55,8 @@
"test": "jest"
},
"devDependencies": {
- "@medplum/definitions": "3.1.2",
- "@medplum/fhirtypes": "3.1.2",
+ "@medplum/definitions": "3.1.3",
+ "@medplum/fhirtypes": "3.1.3",
"jest-websocket-mock": "2.5.0"
},
"peerDependencies": {
diff --git a/packages/core/src/agent.ts b/packages/core/src/agent.ts
index 48e0d3c6ff..fa6257bc93 100644
--- a/packages/core/src/agent.ts
+++ b/packages/core/src/agent.ts
@@ -27,6 +27,7 @@ export interface AgentHeartbeatRequest extends BaseAgentRequestMessage {
export interface AgentHeartbeatResponse extends BaseAgentMessage {
type: 'agent:heartbeat:response';
+ version: string;
}
export interface AgentTransmitRequest extends BaseAgentRequestMessage {
@@ -42,6 +43,7 @@ export interface AgentTransmitResponse extends BaseAgentMessage {
channel?: string;
remote: string;
contentType: string;
+ statusCode?: number;
body: string;
}
diff --git a/packages/core/src/client.ts b/packages/core/src/client.ts
index 49592e182a..1fb4505c10 100644
--- a/packages/core/src/client.ts
+++ b/packages/core/src/client.ts
@@ -22,7 +22,7 @@ import {
Project,
ProjectMembership,
ProjectMembershipAccess,
- ProjectSecret,
+ ProjectSetting,
Reference,
Resource,
ResourceType,
@@ -88,7 +88,7 @@ import {
sortStringArray,
} from './utils';
-export const MEDPLUM_VERSION = import.meta.env.MEDPLUM_VERSION ?? '';
+export const MEDPLUM_VERSION: string = import.meta.env.MEDPLUM_VERSION ?? '';
export const MEDPLUM_CLI_CLIENT_ID = 'medplum-cli';
export const DEFAULT_ACCEPT = ContentType.FHIR_JSON + ', */*; q=0.1';
@@ -429,7 +429,7 @@ export interface BotEvent;
readonly contentType: string;
readonly input: T;
- readonly secrets: Record;
+ readonly secrets: Record;
readonly traceId?: string;
}
@@ -906,7 +906,9 @@ export class MedplumClient extends EventTarget {
*/
clear(): void {
this.storage.clear();
- sessionStorage.clear();
+ if (typeof window !== 'undefined') {
+ sessionStorage.clear();
+ }
this.clearActiveLogin();
}
@@ -3310,7 +3312,7 @@ export class MedplumClient extends EventTarget {
if (this.refresh()) {
return this.request(method, url, options);
}
- this.clearActiveLogin();
+ this.clear();
if (this.onUnauthenticated) {
this.onUnauthenticated();
}
diff --git a/packages/core/src/fhirmapper/transform.ts b/packages/core/src/fhirmapper/transform.ts
index 86b29db4cb..06927e7bbd 100644
--- a/packages/core/src/fhirmapper/transform.ts
+++ b/packages/core/src/fhirmapper/transform.ts
@@ -13,7 +13,7 @@ import { generateId } from '../crypto';
import { evalFhirPathTyped } from '../fhirpath/parse';
import { getTypedPropertyValue, toJsBoolean, toTypedValue } from '../fhirpath/utils';
import { TypedValue } from '../types';
-import { tryGetDataType } from '../typeschema/types';
+import { InternalSchemaElement, tryGetDataType } from '../typeschema/types';
import { conceptMapTranslate } from './conceptmaptranslate';
interface TransformContext {
@@ -345,10 +345,12 @@ function evalTarget(ctx: TransformContext, target: StructureMapGroupRuleTarget):
const isArray = isArrayProperty(targetContext, target.element as string) || Array.isArray(originalValue);
if (!target.transform) {
+ const elementTypes = tryGetPropertySchema(targetContext, target.element as string)?.type;
+ const elementType = elementTypes?.length === 1 ? elementTypes[0].code : undefined;
if (isArray || originalValue === undefined) {
- targetValue = [toTypedValue({})];
+ targetValue = [elementType ? { type: elementType, value: {} } : toTypedValue({})];
} else {
- targetValue = [toTypedValue(originalValue)];
+ targetValue = [elementType ? { type: elementType, value: originalValue } : toTypedValue(originalValue)];
}
} else {
switch (target.transform) {
@@ -408,9 +410,18 @@ function evalTarget(ctx: TransformContext, target: StructureMapGroupRuleTarget):
* @internal
*/
function isArrayProperty(targetContext: TypedValue, element: string): boolean | undefined {
- const targetContextTypeDefinition = tryGetDataType(targetContext.type);
- const targetPropertyTypeDefinition = targetContextTypeDefinition?.elements?.[element];
- return targetPropertyTypeDefinition?.isArray;
+ return tryGetPropertySchema(targetContext, element)?.isArray;
+}
+
+/**
+ * Returns the type schema
+ * @param targetContext - The target context.
+ * @param element - The element to check (i.e., the property name).
+ * @returns the type schema for the target element, if it is loeaded
+ * @internal
+ */
+function tryGetPropertySchema(targetContext: TypedValue, element: string): InternalSchemaElement | undefined {
+ return tryGetDataType(targetContext.type)?.elements?.[element];
}
/**
diff --git a/packages/core/src/fhirmapper/transform.types.test.ts b/packages/core/src/fhirmapper/transform.types.test.ts
new file mode 100644
index 0000000000..ae5791292e
--- /dev/null
+++ b/packages/core/src/fhirmapper/transform.types.test.ts
@@ -0,0 +1,42 @@
+import { readJson } from '@medplum/definitions';
+import { Bundle } from '@medplum/fhirtypes';
+import { toTypedValue } from '../fhirpath/utils';
+import { indexStructureDefinitionBundle } from '../typeschema/types';
+import { parseMappingLanguage } from './parse';
+import { structureMapTransform } from './transform';
+import { TypedValue } from '../types';
+
+describe('FHIR Mapper transform - dependent', () => {
+ beforeAll(() => {
+ indexStructureDefinitionBundle(readJson('fhir/r4/profiles-types.json') as Bundle);
+ indexStructureDefinitionBundle(readJson('fhir/r4/profiles-resources.json') as Bundle);
+ });
+
+ test('Patient name', () => {
+ const map = `
+ group PIDToPatient(source src: PID, target tgt: Patient) {
+ src -> tgt.resourceType = 'Patient';
+ src.PID_5 as s_name -> tgt.name as t_name then xpnToName(s_name, t_name);
+ }
+
+ group xpnToName(source srcName: XPN, target tgtName: HumanName) {
+ srcName._0 as s_family_name -> tgtName.family = s_family_name;
+ srcName._1 as s_given0 -> tgtName.given = s_given0;
+ srcName._2 as s_given1 -> tgtName.given = s_given1;
+ }
+ `;
+
+ const input: TypedValue[] = [
+ toTypedValue({
+ PID_5: { _0: 'DOE', _1: 'JANE', _2: 'Q' },
+ }),
+ { type: 'Patient', value: {} } as TypedValue,
+ ];
+
+ const structureMap = parseMappingLanguage(map);
+ const actual = structureMapTransform(structureMap, input);
+ const expected = [{ value: { resourceType: 'Patient', name: [{ family: 'DOE', given: ['JANE', 'Q'] }] } }];
+
+ expect(actual).toMatchObject(expected);
+ });
+});
diff --git a/packages/core/src/format.test.ts b/packages/core/src/format.test.ts
index 99759c2df2..215c3f45cb 100644
--- a/packages/core/src/format.test.ts
+++ b/packages/core/src/format.test.ts
@@ -1,5 +1,5 @@
import { Observation } from '@medplum/fhirtypes';
-import { UCUM } from './constants';
+import { LOINC, UCUM } from './constants';
import {
formatAddress,
formatCodeableConcept,
@@ -423,4 +423,30 @@ test('Format Observation value', () => {
],
} as Observation)
).toBe('110 mmHg / 75 mmHg');
+ expect(
+ formatObservationValue({
+ resourceType: 'Observation',
+ code: { text: 'Body temperature' },
+ valueQuantity: {
+ value: 36.7,
+ unit: 'C',
+ code: 'Cel',
+ system: UCUM,
+ },
+ component: [
+ {
+ code: { text: 'Body temperature measurement site' },
+ valueCodeableConcept: {
+ coding: [
+ {
+ display: 'Oral',
+ code: 'LA9367-9',
+ system: LOINC,
+ },
+ ],
+ },
+ },
+ ],
+ } as Observation)
+ ).toBe('36.7 C / Oral');
});
diff --git a/packages/core/src/format.ts b/packages/core/src/format.ts
index 70b1b6c42c..5d3a8c5d0c 100644
--- a/packages/core/src/format.ts
+++ b/packages/core/src/format.ts
@@ -439,24 +439,24 @@ export function formatObservationValue(obs: Observation | ObservationComponent |
return '';
}
- if ('component' in obs) {
- return (obs.component as ObservationComponent[]).map((c) => formatObservationValue(c)).join(' / ');
- }
+ const result = [];
if (obs.valueQuantity) {
- return formatQuantity(obs.valueQuantity);
- }
-
- if (obs.valueCodeableConcept) {
- return formatCodeableConcept(obs.valueCodeableConcept);
+ result.push(formatQuantity(obs.valueQuantity));
+ } else if (obs.valueCodeableConcept) {
+ result.push(formatCodeableConcept(obs.valueCodeableConcept));
+ } else {
+ const valueString = ensureString(obs.valueString);
+ if (valueString) {
+ result.push(valueString);
+ }
}
- const valueString = ensureString(obs.valueString);
- if (valueString) {
- return valueString;
+ if ('component' in obs) {
+ result.push((obs.component as ObservationComponent[]).map((c) => formatObservationValue(c)).join(' / '));
}
- return '';
+ return result.join(' / ').trim();
}
/**
diff --git a/packages/core/src/typeschema/validation.test.ts b/packages/core/src/typeschema/validation.test.ts
index 8ecc02a254..6f78938b37 100644
--- a/packages/core/src/typeschema/validation.test.ts
+++ b/packages/core/src/typeschema/validation.test.ts
@@ -1259,6 +1259,19 @@ describe('FHIR resource validation', () => {
expect(() => validateResource(e3, { profile })).toThrow();
});
+ // TODO: Change this check from warning to error
+ // Duplicate entries for choice-of-type property is currently a warning
+ // We need to first log and track this, and notify customers of breaking changes
+ function expectOneWarning(resource: Resource, textContains: string): void {
+ const issues = validateResource(resource);
+ expect(issues).toHaveLength(1);
+ expect(issues[0].severity).toBe('warning');
+ expect(issues[0].details?.text).toContain(textContains);
+ }
+
+ const DUPLICATE_CHOICE_OF_TYPE_PROPERTY = 'Duplicate choice of type property';
+ const PRIMITIVE_EXTENSION_TYPE_MISMATCH = 'Type of primitive extension does not match the type of property';
+
test('Multiple values for choice of type property', () => {
const carePlan: CarePlan = {
resourceType: 'CarePlan',
@@ -1286,13 +1299,91 @@ describe('FHIR resource validation', () => {
],
};
- // TODO: Change this check from warning to error
- // Duplicate entries for choice-of-type property is currently a warning
- // We need to first log and track this, and notify customers of breaking changes
- const issues = validateResource(carePlan);
- expect(issues).toHaveLength(1);
- expect(issues[0].severity).toBe('warning');
- expect(issues[0].details?.text).toContain('Duplicate choice of type property');
+ expectOneWarning(carePlan, DUPLICATE_CHOICE_OF_TYPE_PROPERTY);
+ });
+
+ test('Valid choice of type properties with primitive extensions', () => {
+ expect(
+ validateResource({
+ resourceType: 'Patient',
+ multipleBirthInteger: 2,
+ } as Patient)
+ ).toHaveLength(0);
+
+ expect(
+ validateResource({
+ resourceType: 'Patient',
+ _multipleBirthInteger: {
+ extension: [],
+ },
+ } as Patient)
+ ).toHaveLength(0);
+
+ // check both orders of the properties
+ expect(
+ validateResource({
+ resourceType: 'Patient',
+ multipleBirthInteger: 2,
+ _multipleBirthInteger: {
+ extension: [],
+ },
+ } as Patient)
+ ).toHaveLength(0);
+ expect(
+ validateResource({
+ resourceType: 'Patient',
+ multipleBirthInteger: 2,
+ _multipleBirthInteger: {
+ extension: [],
+ },
+ } as Patient)
+ ).toHaveLength(0);
+ });
+
+ test('Invalid choice of type properties with primitive extensions', () => {
+ expectOneWarning(
+ {
+ resourceType: 'Patient',
+ multipleBirthBoolean: true,
+ multipleBirthInteger: 2,
+ } as Patient,
+ DUPLICATE_CHOICE_OF_TYPE_PROPERTY
+ );
+
+ expectOneWarning(
+ {
+ resourceType: 'Patient',
+ _multipleBirthInteger: {
+ extension: [],
+ },
+ _multipleBirthBoolean: {
+ extension: [],
+ },
+ } as Patient,
+ DUPLICATE_CHOICE_OF_TYPE_PROPERTY
+ );
+
+ // Primitive extension type mismatch, check both orders of the properties
+ expectOneWarning(
+ {
+ resourceType: 'Patient',
+ multipleBirthInteger: 2,
+ _multipleBirthBoolean: {
+ extension: [],
+ },
+ } as Patient,
+ PRIMITIVE_EXTENSION_TYPE_MISMATCH
+ );
+ expectOneWarning(
+ {
+ resourceType: 'Patient',
+ _multipleBirthBoolean: {
+ extension: [],
+ },
+ multipleBirthInteger: 2,
+ } as Patient,
+ PRIMITIVE_EXTENSION_TYPE_MISMATCH
+ );
});
test('Reference type check', () => {
diff --git a/packages/core/src/typeschema/validation.ts b/packages/core/src/typeschema/validation.ts
index a1f7807234..f1fd419f34 100644
--- a/packages/core/src/typeschema/validation.ts
+++ b/packages/core/src/typeschema/validation.ts
@@ -283,13 +283,38 @@ class ResourceValidator implements ResourceVisitor {
if (!object) {
return;
}
- const choiceOfTypeElements: Record = {};
+ const choiceOfTypeElements: Record = {};
for (const key of Object.keys(object)) {
if (key === 'resourceType') {
continue; // Skip special resource type discriminator property in JSON
}
const choiceOfTypeElementName = isChoiceOfType(parent, key, properties);
if (choiceOfTypeElementName) {
+ // check that the type of the primitive extension matches the type of the property
+ let relatedElementName: string;
+ let requiredRelatedElementName: string;
+ if (choiceOfTypeElementName.startsWith('_')) {
+ relatedElementName = choiceOfTypeElementName.slice(1);
+ requiredRelatedElementName = key.slice(1);
+ } else {
+ relatedElementName = '_' + choiceOfTypeElementName;
+ requiredRelatedElementName = '_' + key;
+ }
+
+ if (
+ relatedElementName in choiceOfTypeElements &&
+ choiceOfTypeElements[relatedElementName] !== requiredRelatedElementName
+ ) {
+ this.issues.push(
+ createOperationOutcomeIssue(
+ 'warning',
+ 'structure',
+ `Type of primitive extension does not match the type of property "${choiceOfTypeElementName.startsWith('_') ? choiceOfTypeElementName.slice(1) : choiceOfTypeElementName}"`,
+ choiceOfTypeElementName
+ )
+ );
+ }
+
if (choiceOfTypeElements[choiceOfTypeElementName]) {
// Found a duplicate choice of type property
// TODO: This should be an error, but it's currently a warning to avoid breaking existing code
@@ -303,7 +328,7 @@ class ResourceValidator implements ResourceVisitor {
)
);
}
- choiceOfTypeElements[choiceOfTypeElementName] = true;
+ choiceOfTypeElements[choiceOfTypeElementName] = key;
continue;
}
if (!(key in properties) && !(key.startsWith('_') && key.slice(1) in properties)) {
@@ -486,8 +511,10 @@ function isChoiceOfType(
key: string,
propertyDefinitions: Record
): string | undefined {
+ let prefix = '';
if (key.startsWith('_')) {
key = key.slice(1);
+ prefix = '_';
}
const parts = key.split(/(?=[A-Z])/g); // Split before capital letters
let testProperty = '';
@@ -496,7 +523,7 @@ function isChoiceOfType(
const elementName = testProperty + '[x]';
if (propertyDefinitions[elementName]) {
const typedPropertyValue = getTypedPropertyValue(typedValue, testProperty);
- return typedPropertyValue ? elementName : undefined;
+ return typedPropertyValue ? prefix + elementName : undefined;
}
}
return undefined;
diff --git a/packages/core/src/utils.test.ts b/packages/core/src/utils.test.ts
index 27ca03bc63..08c211fdc7 100644
--- a/packages/core/src/utils.test.ts
+++ b/packages/core/src/utils.test.ts
@@ -45,6 +45,7 @@ import {
isPopulated,
isProfileResource,
isUUID,
+ isValidHostname,
lazy,
parseReference,
preciseEquals,
@@ -1342,4 +1343,22 @@ describe('Core Utils', () => {
);
expect(getQueryString(undefined)).toEqual('');
});
+
+ test('isValidHostname', () => {
+ expect(isValidHostname('foo')).toEqual(true);
+ expect(isValidHostname('foo.com')).toEqual(true);
+ expect(isValidHostname('foo.bar.com')).toEqual(true);
+ expect(isValidHostname('foo.org')).toEqual(true);
+ expect(isValidHostname('foo.bar.co.uk')).toEqual(true);
+ expect(isValidHostname('localhost')).toEqual(true);
+ expect(isValidHostname('LOCALHOST')).toEqual(true);
+ expect(isValidHostname('foo-bar-baz')).toEqual(true);
+ expect(isValidHostname('foo_bar')).toEqual(true);
+ expect(isValidHostname('foobar123')).toEqual(true);
+
+ expect(isValidHostname('foo.com/bar')).toEqual(false);
+ expect(isValidHostname('https://foo.com')).toEqual(false);
+ expect(isValidHostname('foo_-bar_-')).toEqual(false);
+ expect(isValidHostname('foo | rm -rf /')).toEqual(false);
+ });
});
diff --git a/packages/core/src/utils.ts b/packages/core/src/utils.ts
index 02a8685d01..0b71db545d 100644
--- a/packages/core/src/utils.ts
+++ b/packages/core/src/utils.ts
@@ -1139,3 +1139,36 @@ export function getQueryString(query: QueryTypes): string {
// Source: https://url.spec.whatwg.org/#dom-urlsearchparams-urlsearchparams:~:text=6.2.%20URLSearchParams,)%20init%20%3D%20%22%22)%3B
return new URLSearchParams(query).toString();
}
+
+export const VALID_HOSTNAME_REGEX =
+ /^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-_]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-_]*[A-Za-z0-9])$/;
+
+/**
+ * Tests whether a given input is a valid hostname.
+ *
+ * __NOTE: Does not validate that the input is a valid domain name, only a valid hostname.__
+ *
+ * @param input - The input to test.
+ * @returns True if `input` is a valid hostname, otherwise returns false.
+ *
+ * ### Valid matches:
+ * - foo
+ * - foo.com
+ * - foo.bar.com
+ * - foo.org
+ * - foo.bar.co.uk
+ * - localhost
+ * - LOCALHOST
+ * - foo-bar-baz
+ * - foo_bar
+ * - foobar123
+ *
+ * ### Invalid matches:
+ * - foo.com/bar
+ * - https://foo.com
+ * - foo_-bar_-
+ * - foo | rm -rf /
+ */
+export function isValidHostname(input: string): boolean {
+ return VALID_HOSTNAME_REGEX.test(input);
+}
diff --git a/packages/definitions/dist/fhir/r4/profiles-medplum.json b/packages/definitions/dist/fhir/r4/profiles-medplum.json
index 6c91b75a53..c75186e8ad 100644
--- a/packages/definitions/dist/fhir/r4/profiles-medplum.json
+++ b/packages/definitions/dist/fhir/r4/profiles-medplum.json
@@ -240,23 +240,23 @@
}
},
{
- "id" : "Project.secret",
- "path" : "Project.secret",
- "definition" : "Secure environment variable that can be used to store secrets for bots.",
+ "id" : "Project.setting",
+ "path" : "Project.setting",
+ "definition" : "Option or parameter that can be adjusted within the Medplum Project to customize its behavior.",
"min" : 0,
"max" : "*",
"type" : [{
"code" : "BackboneElement"
}],
"base" : {
- "path" : "Project.secret",
+ "path" : "Project.setting",
"min" : 0,
"max" : "*"
}
},
{
- "id" : "Project.secret.name",
- "path" : "Project.secret.name",
+ "id" : "Project.setting.name",
+ "path" : "Project.setting.name",
"definition" : "The secret name.",
"min" : 1,
"max" : "1",
@@ -264,14 +264,14 @@
"code" : "string"
}],
"base" : {
- "path" : "Project.secret.name",
+ "path" : "Project.setting.name",
"min" : 1,
"max" : "1"
}
},
{
- "id" : "Project.secret.value[x]",
- "path" : "Project.secret.value[x]",
+ "id" : "Project.setting.value[x]",
+ "path" : "Project.setting.value[x]",
"definition" : "The secret value.",
"min" : 1,
"max" : "1",
@@ -288,11 +288,50 @@
"code" : "integer"
}],
"base" : {
- "path" : "Project.secret.value[x]",
+ "path" : "Project.setting.value[x]",
"min" : 1,
"max" : "1"
}
},
+ {
+ "id" : "Project.secret",
+ "path" : "Project.secret",
+ "definition" : "Option or parameter that can be adjusted within the Medplum Project to customize its behavior, only visible to project administrators.",
+ "min" : 0,
+ "max" : "*",
+ "base" : {
+ "path" : "Project.secret",
+ "min" : 0,
+ "max" : "*"
+ },
+ "contentReference" : "#Project.setting"
+ },
+ {
+ "id" : "Project.systemSetting",
+ "path" : "Project.systemSetting",
+ "definition" : "Option or parameter that can be adjusted within the Medplum Project to customize its behavior, only modifiable by system administrators.",
+ "min" : 0,
+ "max" : "*",
+ "base" : {
+ "path" : "Project.systemSetting",
+ "min" : 0,
+ "max" : "*"
+ },
+ "contentReference" : "#Project.setting"
+ },
+ {
+ "id" : "Project.systemSecret",
+ "path" : "Project.systemSecret",
+ "definition" : "Option or parameter that can be adjusted within the Medplum Project to customize its behavior, only visible to system administrators.",
+ "min" : 0,
+ "max" : "*",
+ "base" : {
+ "path" : "Project.systemSecret",
+ "min" : 0,
+ "max" : "*"
+ },
+ "contentReference" : "#Project.setting"
+ },
{
"id" : "Project.site",
"path" : "Project.site",
diff --git a/packages/definitions/package.json b/packages/definitions/package.json
index 61304f6376..6bb8645f4a 100644
--- a/packages/definitions/package.json
+++ b/packages/definitions/package.json
@@ -1,6 +1,6 @@
{
"name": "@medplum/definitions",
- "version": "3.1.2",
+ "version": "3.1.3",
"description": "Medplum Data Definitions",
"keywords": [
"medplum",
diff --git a/packages/docs/docs/access/binary-security-context.md b/packages/docs/docs/access/binary-security-context.md
new file mode 100644
index 0000000000..6c1c786763
--- /dev/null
+++ b/packages/docs/docs/access/binary-security-context.md
@@ -0,0 +1,16 @@
+# Binary Security Context
+
+When managing access, the FHIR [`Binary`](/docs/api/fhir/resources/binary) resource is unique case. Access controls cannot be applied to [`Binary`](/docs/api/fhir/resources/binary) resources in the same way as other resources, so you must use the `Binary.securityContext` element to add access policies.
+
+The `securityContext` element is a reference to another resource that acts as a proxy for the access controls of that [`Binary`](/docs/api/fhir/resources/binary). For example, if the `securityContext` references a [`Patient`](/docs/api/fhir/resources/patient), then the [`Binary`](/docs/api/fhir/resources/binary) will only be viewable by users and resources that have read access to that [`Patient`](/docs/api/fhir/resources/patient).
+
+Below is an example of a simiple [`Binary`](/docs/api/fhir/resources/binary) resource with a `securityContext` that references a [`Patient`](/docs/api/fhir/resources/patient).
+
+```json
+{
+ "resourceType": "Binary",
+ "securityContext": { "reference": "Patient/homer-simpson" }
+}
+```
+
+For more details on how [`Binary`](/docs/api/fhir/resources/binary) resources are used in FHIR, see the [Binary Data docs](/docs/fhir-datastore/binary-data).
diff --git a/packages/docs/docs/administration/provider-directory/index.md b/packages/docs/docs/administration/provider-directory/index.md
index ee8da13c8f..5d14afe491 100644
--- a/packages/docs/docs/administration/provider-directory/index.md
+++ b/packages/docs/docs/administration/provider-directory/index.md
@@ -4,7 +4,7 @@ sidebar_position: 7
# Modeling your Provider Directory
-Provider Directories are critical databases housing essential details about healthcare providers, from individual practitioners to entire organizations. Accurate and standardized modeling of these directories, ensures better a better patient experience via improved coordination coordination of care and operational efficiency.
+Provider Directories are critical databases housing essential details about healthcare providers, from individual practitioners to entire organizations. Accurate and standardized modeling of these directories, ensures a better patient experience via improved coordination coordination of care and operational efficiency.
This section provide a series of guides on how to properly model your organization's provider directory. It focuses on a few key challenges:
diff --git a/packages/docs/docs/agent/bulk-status.md b/packages/docs/docs/agent/bulk-status.md
new file mode 100644
index 0000000000..d80fee911b
--- /dev/null
+++ b/packages/docs/docs/agent/bulk-status.md
@@ -0,0 +1,199 @@
+---
+sidebar_position: 11
+---
+
+# Agent Bulk Status
+
+Gets the status of an agent or agents based on given search criteria. Useful for seeing whether agents are connected and listing their current software version.
+
+## Invoke the `$bulk-status` operation
+
+```
+[base]/Agent/$bulk-status
+```
+
+For example:
+
+```bash
+medplum get 'Agent/$bulk-status'
+```
+
+### Valid Response
+
+The response to this operation is a `Bundle` of `Parameters`. Each `Parameters` within the `Bundle` contains an `agent` and a `result`,
+which is the result of calling the `$status` operation on this `Agent`, either a `Parameters` or `OperationOutcome` resource.
+
+Example response:
+
+```json
+{
+ "resourceType": "Bundle",
+ "type": "collection",
+ "entry": [
+ {
+ "resource": {
+ "resourceType": "Parameters",
+ "parameter": [
+ {
+ "name": "agent",
+ "resource": {
+ "resourceType": "Agent",
+ "name": "Test Agent 1",
+ "status": "active",
+ "id": "93f8b2fb-65a3-4977-a175-71b73b26fde7",
+ "meta": {
+ "versionId": "e182201a-6925-467f-a92b-496193fb4c39",
+ "lastUpdated": "2024-04-19T20:29:25.087Z"
+ }
+ }
+ },
+ {
+ "name": "result",
+ "resource": {
+ "resourceType": "Parameters",
+ "parameter": [
+ {
+ "name": "status",
+ "valueCode": "connected"
+ },
+ {
+ "name": "version",
+ "valueString": "3.1.4"
+ },
+ {
+ "name": "lastUpdated",
+ "valueInstant": "2024-04-19T00:00:00Z"
+ }
+ ]
+ }
+ }
+ ]
+ }
+ },
+ {
+ "resource": {
+ "resourceType": "Parameters",
+ "parameter": [
+ {
+ "name": "agent",
+ "resource": {
+ "resourceType": "Agent",
+ "name": "Test Agent 2",
+ "status": "active",
+ "id": "93f8b2fb-65a3-4977-a175-71b73b26fde7",
+ "meta": {
+ "versionId": "e182201a-6925-467f-a92b-496193fb4c39",
+ "lastUpdated": "2024-04-19T20:29:25.087Z"
+ }
+ }
+ },
+ {
+ "name": "result",
+ "resource": {
+ "resourceType": "Parameters",
+ "parameter": [
+ {
+ "name": "status",
+ "valueCode": "disconnected"
+ },
+ {
+ "name": "version",
+ "valueString": "3.1.2"
+ },
+ {
+ "name": "lastUpdated",
+ "valueInstant": "2024-04-19T00:00:00Z"
+ }
+ ]
+ }
+ }
+ ]
+ }
+ },
+ {
+ "resource": {
+ "resourceType": "Parameters",
+ "parameter": [
+ {
+ "name": "agent",
+ "resource": {
+ "resourceType": "Agent",
+ "name": "Test Agent 3",
+ "status": "off",
+ "id": "93f8b2fb-65a3-4977-a175-71b73b26fde7",
+ "meta": {
+ "versionId": "e182201a-6925-467f-a92b-496193fb4c39",
+ "lastUpdated": "2024-04-19T20:29:25.087Z"
+ }
+ }
+ },
+ {
+ "name": "result",
+ "resource": {
+ "resourceType": "OperationOutcome",
+ "issue": [
+ {
+ "severity": "error",
+ "code": "exception",
+ "details": {
+ "text": "Something weird happened when getting the status"
+ }
+ }
+ ],
+ }
+ }
+ ]
+ }
+ }
+ ]
+}
+```
+
+### Invalid Response
+
+Example outcome when exceeding max `_count` limit:
+
+```json
+{
+ "resourceType": "OperationOutcome",
+ "issue": [
+ {
+ "severity": "error",
+ "code": "invalid",
+ "details": {
+ "text": "'_count' of 101 is greater than max of 100"
+ }
+ }
+ ]
+}
+```
+
+## Using search parameters
+
+All of the `Agent` search parameters can be used to select which agents to query the status of.
+
+Some useful search parameters are:
+- `name`
+- `status`
+- `_count` and `_offset`
+
+### Recipes
+
+Getting the status for one agent by name:
+
+```bash
+medplum get 'Agent/$bulk-status?name=Test+Agent+1'
+```
+
+Getting the status of all active agents:
+
+```bash
+medplum get 'Agent/$bulk-status?status=active'
+```
+
+Paging through all agent statuses, 50 at a time:
+
+```bash
+medplum get 'Agent/$bulk-status?_count=50&_offset=0'
+medplum get 'Agent/$bulk-status?_count=50&_offset=50'
+```
diff --git a/packages/docs/docs/agent/index.md b/packages/docs/docs/agent/index.md
index b8bbabaf80..eac981f39a 100644
--- a/packages/docs/docs/agent/index.md
+++ b/packages/docs/docs/agent/index.md
@@ -163,7 +163,7 @@ HL7 Feeds can be extremely high volume, and before you go live with a high-volum
## Running from source
-Testing the setup end-to-end on localhost can be done by doing the following steps. This assumes you are [running MØedplum on localhost](/docs/contributing/run-the-stack) as a prerequisite.
+Testing the setup end-to-end on localhost can be done by doing the following steps. This assumes you are [running Medplum on localhost](/docs/contributing/run-the-stack) as a prerequisite.
Navigate to the `medplum/packages/agent`` folder on your drive and run the following command in your terminal
diff --git a/packages/docs/docs/agent/requirements.md b/packages/docs/docs/agent/requirements.md
index fd88b6a8a6..1e392ca79e 100644
--- a/packages/docs/docs/agent/requirements.md
+++ b/packages/docs/docs/agent/requirements.md
@@ -1,5 +1,5 @@
---
-sidebar_position: 100
+sidebar_position: 3
---
# System Requirements
diff --git a/packages/docs/docs/agent/status.md b/packages/docs/docs/agent/status.md
new file mode 100644
index 0000000000..4a9e3dc2b5
--- /dev/null
+++ b/packages/docs/docs/agent/status.md
@@ -0,0 +1,87 @@
+---
+sidebar_position: 10
+---
+
+# Agent Status
+
+Gets the status of a given agent. Useful for seeing whether an agent is connected and listing its current software version.
+
+> For querying multiple agent statuses at once, or using `SearchParameters` to select agents to query, see [Bulk Status](./bulk-status.md).
+
+## Invoke the `$status` operation
+
+```
+[base]/Agent/[id]/$status
+```
+
+For example:
+
+```bash
+medplum get 'Agent/[id]/$status'
+```
+
+### Valid Response
+
+Valid status codes include:
+- `connected`
+- `disconnected`
+- `unknown`
+
+Example response when the `Agent` is known and connected:
+
+```json
+{
+ "resourceType": "Parameters",
+ "parameter": [
+ {
+ "name": "status",
+ "valueCode": "connected"
+ },
+ {
+ "name": "version",
+ "valueString": "3.1.4"
+ },
+ {
+ "name": "lastUpdated",
+ "valueInstant": "2024-04-19T00:00:00Z"
+ }
+ ]
+}
+```
+
+In cases where status has not been reported yet, `status` and `version` may be `unknown`, and `lastUpdated` may not be present.
+
+```json
+{
+ "resourceType": "Parameters",
+ "parameter": [
+ {
+ "name": "status",
+ "valueCode": "unknown"
+ },
+ {
+ "name": "version",
+ "valueString": "unknown"
+ }
+ ]
+}
+```
+
+### Invalid Response
+
+Example outcome when an ID was not supplied to the operation:
+
+```json
+{
+ "resourceType": "OperationOutcome",
+ "issue": [
+ {
+ "severity": "error",
+ "code": "invalid",
+ "details": {
+ "text": "Must specify agent ID or identifier"
+ }
+ }
+ ]
+}
+```
diff --git a/packages/docs/docs/auth/methods/index.md b/packages/docs/docs/auth/methods/index.md
index b34f45ab3d..b9ce149957 100644
--- a/packages/docs/docs/auth/methods/index.md
+++ b/packages/docs/docs/auth/methods/index.md
@@ -114,6 +114,7 @@ All three implementations types will have tokens or client credentials with syst
- Consider disabling local storage on device for shared workstations or in accordance with institution policy.
- Organizations with mobile devices or laptops should enable a Mobile Device Management (MDM) solution for workstations
- [IP restrictions](/docs/access/ip-access-rules) can be enabled when especially sensitive data, such as personal health information (PHI), is being accessed.
+- Reusing the same `MedplumClient` instance for different users is discouraged. Consider creating new instances of `MedplumClient` instead.
### Server Authentication
@@ -122,6 +123,7 @@ All three implementations types will have tokens or client credentials with syst
- Restrict access to host via VPC or other mechanism - do not allow access from general internet.
- Use a secrets management to store access keys - do not store credentials on disk.
- Ensure host is patched and has security updates applied regularly.
+- Consider creating a new instance of `MedplumClient`, particularly when switching to another user.
### Host authentication
diff --git a/packages/docs/docs/auth/user-management-guide/user-management-guide.md b/packages/docs/docs/auth/user-management-guide/user-management-guide.md
index 8bfa908f29..ade0860701 100644
--- a/packages/docs/docs/auth/user-management-guide/user-management-guide.md
+++ b/packages/docs/docs/auth/user-management-guide/user-management-guide.md
@@ -304,7 +304,7 @@ It is important to spread the original `ProjectMembership` to ensure that you ar
## Invite via API
-Inviting users can be done programmatically using the [`/invite` endpoint](/docs/api/project-admin/invite).
+Inviting users can be done programmatically using the [`/invite` endpoint](/docs/api/project-admin/invite). Like inviting via the [Medplum App](https://app.medplum.com), this can only be done by [project admins](/docs/access/admin#project-admin).
Prepare JSON payload:
diff --git a/packages/docs/docs/fhir-datastore/deleting-data.md b/packages/docs/docs/fhir-datastore/deleting-data.md
index 42b128f6ea..c2cce4922a 100644
--- a/packages/docs/docs/fhir-datastore/deleting-data.md
+++ b/packages/docs/docs/fhir-datastore/deleting-data.md
@@ -46,3 +46,9 @@ The Medplum `$expunge` operation supports an optional `everything` flag to syste
```
POST [base]/[resourceType]/[id]/$expunge?everything=true
```
+
+:::warning Expunging a Project
+
+If you expunge a [`Project`](/docs/api/fhir/medplum/project), it will be _permanently_ deleted and you will no longer be able to sign in or access it in any way.
+
+:::
diff --git a/packages/docs/docs/fhir-datastore/resource-history.md b/packages/docs/docs/fhir-datastore/resource-history.md
index 86595f84e0..bd564dd59f 100644
--- a/packages/docs/docs/fhir-datastore/resource-history.md
+++ b/packages/docs/docs/fhir-datastore/resource-history.md
@@ -65,3 +65,18 @@ These requests return a `Bundle` resource with the different versions stored as
:::note Resource Creation Time
There is currently no support for directly accessing the time and date that a resource was initially created. To do this use the `/_history` endpoint to retrieve all versions and view the `lastUpdated` field of the original version. Note that the GraphQL endpoint does not currently have a spec for the history API.
:::
+
+## Reverting Changes to a Resource
+
+While there is no direct method to revert changes made to a resource, it can be easily done using the `readHistory` and `readVersion` helper functions provided by Medplum.
+
+The `readHistory` function is used to get the entire history of the resource. You can then choose the version and use `readVersion` to return the complete details of that version of the resource. The current resource can then be updated to the historic details.
+
+
+ Example: Revert resource to a previous version
+
+ {ExampleCode}
+
+
+
+This method does not actually revert the resources to the previous version, but it creates a new entry in the resource's history with all of the same details as the historic version.
diff --git a/packages/docs/docs/graphql/basic-queries.mdx b/packages/docs/docs/graphql/basic-queries.mdx
index b4ca75cb17..88d87592ec 100644
--- a/packages/docs/docs/graphql/basic-queries.mdx
+++ b/packages/docs/docs/graphql/basic-queries.mdx
@@ -175,6 +175,10 @@ In the example below, we first search for a `Patient` by id, and then find all t
See the "[Reverse References](https://hl7.org/fhir/r4/graphql.html#searching)" section of the FHIR GraphQL specification for more information.
+:::note Chained Search in GraphQL
+When searching on references in GraphQL, you _cannot_ filter on the parameters of the referenced resources. This is called chained search and it is not supported by the FHIR GraphQL spec. However, it is supported in the FHIR Rest API. For more details see the [Chained Search docs](/docs/search/chained-search).
+:::
+
## Filtering lists with field arguments
FHIR GraphQL supports filtering array properties using field arguments. For example, you can filter the `Patient.name` array by the `use` field:
diff --git a/packages/docs/docs/rate-limits/index.md b/packages/docs/docs/rate-limits/index.md
new file mode 100644
index 0000000000..1ffa07caf3
--- /dev/null
+++ b/packages/docs/docs/rate-limits/index.md
@@ -0,0 +1,28 @@
+# Rate Limits
+
+The Medplum API uses a number of safeguards against bursts of incoming traffic to help maximize its stability. Users who send many requests in quick succession might see error responses that show up as status code `429`.
+
+## Default Rate Limits
+
+| Category | Free tier | Paid tier |
+| ----------------------------- | ------------------------------ | ------------------------------- |
+| Auth (`/auth/*`, `/oauth2/*`) | 1 request per IP per second | 1 request per IP per second |
+| Others | 100 requests per IP per second | 1000 requests per IP per second |
+
+All rate limits are calculated per IP address on a 15 minute window.
+
+Rate limits can be increased for paid plans. Please [contact us](mailto:info+rate-limits@medplum.com?subject=Increase%20rate%20limits) for more information.
+
+## HTTP Headers
+
+All API calls affected by rate limits will include the following headers:
+
+- `X-Ratelimit-Limit`: The maximum number of requests that the consumer is permitted to make in a 15 minute window.
+- `X-Ratelimit-Remaining`: The number of requests remaining in the current rate limit window.
+- `X-Ratelimit-Reset`: The time at which the current rate limit window resets in UTC epoch seconds.
+
+```
+X-Ratelimit-Limit: 600
+X-Ratelimit-Remaining: 599
+X-Ratelimit-Reset: 1713810464
+```
diff --git a/packages/docs/docs/search/chained-search.md b/packages/docs/docs/search/chained-search.md
index 80812d1217..664379a3bf 100644
--- a/packages/docs/docs/search/chained-search.md
+++ b/packages/docs/docs/search/chained-search.md
@@ -9,6 +9,12 @@ Chaining search parameters allows you to filter your searches based on the param
Chained searches are similar to using [`_include` or `_revinclude` parameters](/docs/search/includes), but it will not return the referenced resources, only filter based on their parameters. The primary benefit of this is it allows for easy pagination since you know you will only receive results of one resource type. See the [paginated search docs](/docs/search/paginated-search) for more details.
+:::note Chained Search Availability
+
+Chained search is only available when using the FHIR Rest API as described here. If you are using GraphQL, chained search functionality is not supported.
+
+:::
+
## Forward Chained Search
[Search parameters](/docs/search/basic-search) with the `reference` type can be chained together to search on the elements of the referenced resource.
@@ -88,6 +94,12 @@ You can include more than one link in your chained search. In the below example,
+:::note Filtering Chained Searches
+
+The [`_filter` search parameter](/docs/search/filter-search-parameter) is not currently supported when using chained search. This is on the Medplum road map, but there is no firm date when it is expected to be implemented. You can follow [this issue](https://github.com/medplum/medplum/issues/3224) for updates.
+
+:::
+
## Reverse Chained Search
Chained references can also be constructed in reverse, filtering on other resources that reference your target search resource. This is done using the `_has` parameter, which has a special syntax: `_has:::`.
diff --git a/packages/docs/docs/self-hosting/config-settings.md b/packages/docs/docs/self-hosting/config-settings.md
index 85fde2ddce..a369f669dc 100644
--- a/packages/docs/docs/self-hosting/config-settings.md
+++ b/packages/docs/docs/self-hosting/config-settings.md
@@ -156,11 +156,14 @@ Optionally override the trusted CA certificates. Default is to trust the well-kn
| `otlpTraceEndpoint` | Optional OTLP trace endpoint for OpenTelemetry. For example, `http://localhost:4318/v1/traces`. See [OpenTelemetry](/docs/self-hosting/opentelemetry) for more details. | | | |
| `accurateCountThreshold` | Optional threshold for accurate count queries. The server will always perform an estimate count first (to protect database performance), and an accurate count if the estimate is below this threshold. | | | `1000000` |
| `defaultBotRuntimeVersion` | Optional default bot runtime version. See [Bot runtime version](/docs/api/fhir/medplum/bot) for more details. | | | `awslambda` |
+| `defaultProjectFeatures` | Optional default project features. See [Project Settings](/docs/access/projects#settings) | | `init` | |
:::tip Local Config
To make changes to the server config after your first deploy, you must the edit parameter values _directly in AWS parameter store_
-To make changes to settings that affect your deployed Medplum App, you must _also_ make this change to your local configuration json file.
+To make changes to settings that affect your deployed Medplum App, you must _also_ make these changes to your local configuration json file.
+
+Once you have made these changes, you will need to restart your server for them to take effect. The easiest way to do this in a zero-downtime manner is by using the `medplum aws update-server` command. For more details on this command see the [Upgrade the Server docs](/docs/self-hosting/install-on-aws#upgrade-the-server).
:::
### AWS Secrets
diff --git a/packages/docs/package.json b/packages/docs/package.json
index 5d9cbb0187..77c775ee1c 100644
--- a/packages/docs/package.json
+++ b/packages/docs/package.json
@@ -1,6 +1,6 @@
{
"name": "@medplum/docs",
- "version": "3.1.2",
+ "version": "3.1.3",
"description": "Medplum Docs",
"homepage": "https://www.medplum.com/",
"bugs": {
@@ -47,9 +47,9 @@
"@docusaurus/tsconfig": "3.2.1",
"@docusaurus/types": "3.2.1",
"@mdx-js/react": "3.0.1",
- "@medplum/core": "3.1.2",
- "@medplum/fhirtypes": "3.1.2",
- "@medplum/mock": "3.1.2",
+ "@medplum/core": "3.1.3",
+ "@medplum/fhirtypes": "3.1.3",
+ "@medplum/mock": "3.1.3",
"@svgr/webpack": "8.1.0",
"clsx": "2.1.0",
"file-loader": "6.2.0",
@@ -57,9 +57,9 @@
"raw-loader": "4.0.2",
"react": "18.2.0",
"react-dom": "18.2.0",
- "react-intersection-observer": "9.8.1",
+ "react-intersection-observer": "9.8.2",
"react-router-dom": "6.22.3",
- "typescript": "5.4.4",
+ "typescript": "5.4.5",
"url-loader": "4.1.1"
},
"engines": {
diff --git a/packages/docs/sidebars.ts b/packages/docs/sidebars.ts
index a2e88a8e13..0ecc9ad338 100644
--- a/packages/docs/sidebars.ts
+++ b/packages/docs/sidebars.ts
@@ -234,6 +234,12 @@ const sidebars: SidebarsConfig = {
link: { type: 'doc', id: 'analytics/index' },
items: [{ type: 'autogenerated', dirName: 'analytics' }],
},
+ {
+ type: 'category',
+ label: 'Rate Limits',
+ link: { type: 'doc', id: 'rate-limits/index' },
+ items: [{ type: 'autogenerated', dirName: 'rate-limits' }],
+ },
{
type: 'category',
label: 'Self-Hosting',
diff --git a/packages/docs/src/pages/enterprise.tsx b/packages/docs/src/pages/enterprise.tsx
index b5ada72e37..fc3aed5ce0 100644
--- a/packages/docs/src/pages/enterprise.tsx
+++ b/packages/docs/src/pages/enterprise.tsx
@@ -91,10 +91,10 @@ export default function EnterprisePage(): JSX.Element {
-
Enterprise Integrations
+
Enterprise Identity Management
- Reliable integrations drive efficiency and safety. Medplum provides certificed enterprise integrations for
- diagnostics, billing, medications, legacy EHR platforms and more.
+ Connect multiplie identity prociders and provision identities programmatically across your health record
+ system. Use SCM administration for robust and compliant identity administration.
+ Gain deep insights into systems performance and health. Enables proactive issue detection, efficient
+ troubleshooting, and improved system reliability.
+
+
+
+
+
+
+
+
+
+
Enterprise Integrations
+
+ Enable reliable, compliant and auditable connectivity to service providers and partners.
+
@@ -352,12 +486,12 @@ export default function PricingPage(): JSX.Element {
Free: recommended for prototyping or learning.
- Developer: recommended for developer environments or test environments.
-
-
Production: recommended for production use, e.g. treatment of patients or conducting
research.
+
+ Premium: recommended messaging heavy and integration heavy use cases.
+
Enterprise: recommended for institutions with complex workflow, integration or data
requirements. Read more details on our Enterprise offering page.
@@ -367,24 +501,41 @@ export default function PricingPage(): JSX.Element {
Medplum application.
- Enterprise Managed: recommended for those who must host the application on their own
- cloud infrastructure.
+ Enterprise Self-Hosted: recommended for those who must host the application on their
+ own cloud infrastructure.
- Data usage refers to the creation of{' '}
+ FHIR Resources Stored: Data usage refers to the creation of{' '}
FHIR Resources. This figure
- is cumulative.
+ is cumulative. For Premium, Communication resources that are generated as part of messaging are not
+ included in the resource cap shown.
- Bots and automation refer to custom logic written by customers to execute their workflow.{' '}
+ Bot Invocations: refers to custom logic written by customers to execute their workflow.{' '}
Automation documentation and{' '}
integration are a good place to learn more.
-
Organizations can require that all logins go through Google Authentication.
+
+ Required authentication methods: Organizations can require that all logins at their
+ domain go through their identity provider of choice.
+
- Many complex compliance scenarios can be supported with this infrastructure. You can read more on the{' '}
+ Compliance: Many complex compliance scenarios can be supported with this
+ infrastructure. You can read more on the{' '}
compliance page.
+
+ Websocket Subscriptions: maximal number of concurrent websocket{' '}
+ subscriptions available.
+
+
+ Audit Support: receive support during common audits common in health system and payor
+ partnerships.
+
+
+ External Identity Providers: connect your Okta, Azure SSO, Auth0 or other oAuth based
+ identity provider.
+
diff --git a/packages/docs/src/pages/solutions/index.md b/packages/docs/src/pages/solutions/index.md
index 93054fca3d..241c08a4c9 100644
--- a/packages/docs/src/pages/solutions/index.md
+++ b/packages/docs/src/pages/solutions/index.md
@@ -40,7 +40,7 @@ Run and maintain an EMPI including patient identification, data accuracy/risk sc
## Interoperability Service
-Highly customizable internal service that supports integrations that are common in healthcare such as FHIR and Smart-on-FHIR integrations, HL7 connections, SFTP, Lab data, home health integrations, logistics providers and more. Available hosted or self-hosted. [Learn More](/products/integration)
+Highly customizable internal service that supports integrations that are common in healthcare such as FHIR and Smart-on-FHIR integrations, HL7 connections, SFTP, Lab data, home health integrations, logistics providers and more. Can also be used as a system of record between multiple integrations. Available hosted or self-hosted. [Learn More](/products/integration)
## Remote Patient Monitoring
diff --git a/packages/eslint-config/index.cjs b/packages/eslint-config/index.cjs
index da18959d45..a786453c32 100644
--- a/packages/eslint-config/index.cjs
+++ b/packages/eslint-config/index.cjs
@@ -158,6 +158,7 @@ module.exports = {
'babel.config.cjs',
'jest.sequencer.js',
'package-lock.json',
+ 'postcss.config.cjs',
'rollup.config.mjs',
'webpack.config.js',
],
diff --git a/packages/eslint-config/package.json b/packages/eslint-config/package.json
index fd19c3a9c9..99418515bb 100644
--- a/packages/eslint-config/package.json
+++ b/packages/eslint-config/package.json
@@ -1,6 +1,6 @@
{
"name": "@medplum/eslint-config",
- "version": "3.1.2",
+ "version": "3.1.3",
"description": "Shared ESLint configuration for Medplum projects",
"keywords": [
"eslint",
@@ -19,8 +19,8 @@
"author": "Medplum ",
"main": "index.cjs",
"devDependencies": {
- "@typescript-eslint/eslint-plugin": "7.5.0",
- "@typescript-eslint/parser": "7.5.0",
+ "@typescript-eslint/eslint-plugin": "7.6.0",
+ "@typescript-eslint/parser": "7.6.0",
"eslint": "8.57.0",
"eslint-plugin-jsdoc": "48.2.3",
"eslint-plugin-json-files": "4.1.0",
diff --git a/packages/examples/package.json b/packages/examples/package.json
index f7ca166466..a4f556dab6 100644
--- a/packages/examples/package.json
+++ b/packages/examples/package.json
@@ -1,6 +1,6 @@
{
"name": "@medplum/examples",
- "version": "3.1.2",
+ "version": "3.1.3",
"description": "Medplum Code Examples",
"homepage": "https://www.medplum.com/",
"bugs": {
@@ -19,9 +19,9 @@
},
"devDependencies": {
"@jest/globals": "29.7.0",
- "@medplum/core": "3.1.2",
- "@medplum/fhirtypes": "3.1.2",
- "@medplum/mock": "3.1.2",
+ "@medplum/core": "3.1.3",
+ "@medplum/fhirtypes": "3.1.3",
+ "@medplum/mock": "3.1.3",
"jest": "29.7.0"
},
"engines": {
diff --git a/packages/examples/src/fhir-datastore/fhir-batch-requests.ts b/packages/examples/src/fhir-datastore/fhir-batch-requests.ts
index b1dbc6da3b..94dc756da5 100644
--- a/packages/examples/src/fhir-datastore/fhir-batch-requests.ts
+++ b/packages/examples/src/fhir-datastore/fhir-batch-requests.ts
@@ -158,7 +158,7 @@ const internalReference: Bundle =
// start-block internalReference
{
resourceType: 'Bundle',
- type: 'batch',
+ type: 'transaction',
entry: [
{
// highlight-next-line
@@ -216,7 +216,7 @@ const conditional: Bundle =
// start-block conditionalCreate
{
resourceType: 'Bundle',
- type: 'batch',
+ type: 'transaction',
entry: [
{
fullUrl: 'urn:uuid:4aac5fb6-c2ff-4851-b3cf-d66d63a82a17',
@@ -234,7 +234,7 @@ const conditional: Bundle =
method: 'POST',
url: 'Organization',
// highlight-next-line
- ifNoneExist: 'identifer=https://example-org.com/organizations|example-organization',
+ ifNoneExist: 'identifier=https://example-org.com/organizations|example-organization',
},
},
{
diff --git a/packages/examples/src/fhir-datastore/resource-history.ts b/packages/examples/src/fhir-datastore/resource-history.ts
index 1f25b34f9e..a483047006 100644
--- a/packages/examples/src/fhir-datastore/resource-history.ts
+++ b/packages/examples/src/fhir-datastore/resource-history.ts
@@ -1,5 +1,6 @@
// start-block imports
import { MedplumClient } from '@medplum/core';
+import { Bundle } from '@medplum/fhirtypes';
// end-block imports
const medplum = new MedplumClient();
@@ -19,3 +20,23 @@ curl 'https://api.medplum.com/fhir/R4/Patient/homer-simpson/_history' \
-H 'content-type: application/fhir+json' \
// end-block accessHistoryCurl
*/
+
+// start-block revertChanges
+// Read the history, returning a bundle of history entries
+const history = await medplum.readHistory('Patient', 'homer-simpson');
+
+// Implement your own logic to get the historic version of the resource you want.
+// You will need the versionId to use the readVersion function.
+const versionId = getVersionId(history);
+
+// readVersion will return the historic Patient resource
+const version = await medplum.readVersion('Patient', 'homer-simpson', versionId);
+
+// Pass the historic version to updateResource to revert to that version
+await medplum.updateResource(version);
+// end-block revertChanges
+
+function getVersionId(history: Bundle): string {
+ console.log(history);
+ return 'versionId';
+}
diff --git a/packages/expo-polyfills/package.json b/packages/expo-polyfills/package.json
index f0d58c40c6..eb51ffcfaf 100644
--- a/packages/expo-polyfills/package.json
+++ b/packages/expo-polyfills/package.json
@@ -1,6 +1,6 @@
{
"name": "@medplum/expo-polyfills",
- "version": "3.1.2",
+ "version": "3.1.3",
"description": "A module for polyfilling the minimum necessary web APIs for using the Medplum client on React Native",
"keywords": [
"react-native",
@@ -47,9 +47,9 @@
"text-encoding": "0.7.0"
},
"devDependencies": {
- "@medplum/core": "3.1.2",
+ "@medplum/core": "3.1.3",
"@types/base-64": "1.0.2",
- "@types/react": "18.2.74",
+ "@types/react": "18.2.78",
"@types/text-encoding": "0.0.39",
"esbuild": "0.20.2",
"esbuild-node-externals": "1.13.0",
@@ -59,7 +59,7 @@
"ts-jest": "29.1.2"
},
"peerDependencies": {
- "@medplum/core": "3.1.2",
+ "@medplum/core": "3.1.3",
"expo": "*",
"expo-crypto": "^12.6.0",
"expo-secure-store": "^12.3.1",
diff --git a/packages/expo-polyfills/src/index.test.ts b/packages/expo-polyfills/src/index.test.ts
index 5b2d137813..651ec99c56 100644
--- a/packages/expo-polyfills/src/index.test.ts
+++ b/packages/expo-polyfills/src/index.test.ts
@@ -5,20 +5,6 @@ import { Platform } from 'react-native';
import { TextDecoder, TextEncoder } from 'text-encoding';
import { ExpoClientStorage, cleanupMedplumWebAPIs, polyfillMedplumWebAPIs } from '.';
-const originalWindow = window;
-
-beforeAll(() => {
- Object.defineProperty(globalThis, 'window', {
- value: { ...originalWindow },
- });
-});
-
-afterAll(() => {
- Object.defineProperty(globalThis, 'window', {
- value: originalWindow,
- });
-});
-
jest.mock('expo-secure-store', () => {
const store = new Map();
let getKeysShouldThrow = false;
@@ -58,11 +44,19 @@ if (Platform.OS === 'web') {
}
describe('polyfillMedplumWebAPIs', () => {
+ const originalWindow = globalThis.window;
+
beforeAll(() => {
+ Object.defineProperty(globalThis, 'window', {
+ value: { ...originalWindow },
+ });
polyfillMedplumWebAPIs();
});
afterAll(() => {
+ Object.defineProperty(globalThis, 'window', {
+ value: originalWindow,
+ });
cleanupMedplumWebAPIs();
});
@@ -110,8 +104,6 @@ describe('polyfillMedplumWebAPIs', () => {
expect(window.crypto.subtle).toBeDefined();
expect(window.crypto.subtle.digest).toBeDefined();
});
-
- // TODO: Add a test for `digest`
});
describe('Location', () => {
diff --git a/packages/expo-polyfills/src/index.ts b/packages/expo-polyfills/src/index.ts
index 3b7cd4366a..562cfe13e8 100644
--- a/packages/expo-polyfills/src/index.ts
+++ b/packages/expo-polyfills/src/index.ts
@@ -6,6 +6,7 @@ import expoWebCrypto from 'expo-standard-web-crypto';
import { Platform } from 'react-native';
import { setupURLPolyfill } from 'react-native-url-polyfill';
import { TextDecoder, TextEncoder } from 'text-encoding';
+import { polyfillEvent } from './polyfills/event';
let polyfilled = false;
let originalCryptoIsSet = false;
@@ -23,6 +24,7 @@ export type PolyfillEnabledConfig = {
sessionStorage?: boolean;
textEncoder?: boolean;
btoa?: boolean;
+ event?: boolean;
};
export function cleanupMedplumWebAPIs(): void {
@@ -76,6 +78,10 @@ export function cleanupMedplumWebAPIs(): void {
Object.defineProperty(window, 'atob', { configurable: true, enumerable: true, value: undefined });
}
+ if (window.Event) {
+ Object.defineProperty(window, 'Event', { configurable: true, enumerable: true, value: undefined });
+ }
+
polyfilled = false;
}
@@ -166,6 +172,10 @@ export function polyfillMedplumWebAPIs(config?: PolyfillEnabledConfig): void {
});
}
+ if (config?.event !== false && typeof window.Event === 'undefined') {
+ polyfillEvent();
+ }
+
polyfilled = true;
}
diff --git a/packages/expo-polyfills/src/polyfills.test.ts b/packages/expo-polyfills/src/polyfills.test.ts
index 6e9e3ee04e..f6f2b894dc 100644
--- a/packages/expo-polyfills/src/polyfills.test.ts
+++ b/packages/expo-polyfills/src/polyfills.test.ts
@@ -1,23 +1,23 @@
import { Platform } from 'react-native';
import { cleanupMedplumWebAPIs, polyfillMedplumWebAPIs } from '.';
-const originalWindow = window;
+describe('Medplum polyfills', () => {
+ const originalWindow = window;
-beforeEach(() => {
- Object.defineProperty(globalThis, 'window', {
- value: { ...originalWindow },
+ beforeEach(() => {
+ cleanupMedplumWebAPIs();
});
-});
-afterAll(() => {
- Object.defineProperty(globalThis, 'window', {
- value: originalWindow,
+ beforeEach(() => {
+ Object.defineProperty(globalThis, 'window', {
+ value: { ...originalWindow },
+ });
});
-});
-describe('Medplum polyfills', () => {
- beforeEach(() => {
- cleanupMedplumWebAPIs();
+ afterAll(() => {
+ Object.defineProperty(globalThis, 'window', {
+ value: originalWindow,
+ });
});
if (Platform.OS !== 'web') {
@@ -46,6 +46,21 @@ describe('Medplum polyfills', () => {
// There was specifically trouble with this object when calling polyfill multiple times before
expect(window.crypto.subtle).toBeDefined();
});
+
+ test('Event should be constructable after polyfills', () => {
+ // @ts-expect-error Testing polyfill
+ globalThis.Event = undefined;
+ expect(() => new Event('foo')).toThrow();
+ polyfillMedplumWebAPIs();
+
+ const event1 = new Event('foo');
+ expect(event1).toBeInstanceOf(Event);
+ expect(event1.type).toEqual('foo');
+
+ const event2 = new Event('foo', { bubbles: true, cancelable: true, composed: true });
+ expect(event2).toBeInstanceOf(Event);
+ expect(event2.type).toEqual('foo');
+ });
});
describe('cleanupMedplumWebAPIs()', () => {
diff --git a/packages/expo-polyfills/src/polyfills/event.ts b/packages/expo-polyfills/src/polyfills/event.ts
new file mode 100644
index 0000000000..5628a357e6
--- /dev/null
+++ b/packages/expo-polyfills/src/polyfills/event.ts
@@ -0,0 +1,40 @@
+// Original source: https://github.com/benlesh/event-target-polyfill/blob/master/index.js
+// The package is no longer maintained, so I figured we can vendor it
+
+export function polyfillEvent(): void {
+ const root = ((typeof globalThis !== 'undefined' && globalThis) ||
+ (typeof self !== 'undefined' && self) ||
+ (typeof global !== 'undefined' && global)) as typeof globalThis;
+
+ const shouldPolyfillEvent = (() => {
+ try {
+ // eslint-disable-next-line no-new
+ new root.Event('');
+ } catch (_error) {
+ return true;
+ }
+ return false;
+ })();
+
+ if (shouldPolyfillEvent) {
+ // @ts-expect-error Types don't quite match up but it should be mostly good enough
+ root.Event = (() => {
+ class Event {
+ readonly type: string;
+ readonly bubbles: boolean;
+ readonly cancelable: boolean;
+ readonly composed: boolean;
+ defaultPrevented = false;
+
+ constructor(type: string, options: EventInit) {
+ this.bubbles = !!options && !!options.bubbles;
+ this.cancelable = !!options && !!options.cancelable;
+ this.composed = !!options && !!options.composed;
+ this.type = type;
+ }
+ }
+
+ return Event;
+ })();
+ }
+}
diff --git a/packages/fhir-router/package.json b/packages/fhir-router/package.json
index 20bc458fde..559214e229 100644
--- a/packages/fhir-router/package.json
+++ b/packages/fhir-router/package.json
@@ -1,6 +1,6 @@
{
"name": "@medplum/fhir-router",
- "version": "3.1.2",
+ "version": "3.1.3",
"description": "Medplum FHIR Router",
"keywords": [
"medplum",
@@ -53,9 +53,9 @@
"test": "jest"
},
"dependencies": {
- "@medplum/core": "3.1.2",
- "@medplum/definitions": "3.1.2",
- "@medplum/fhirtypes": "3.1.2",
+ "@medplum/core": "3.1.3",
+ "@medplum/definitions": "3.1.3",
+ "@medplum/fhirtypes": "3.1.3",
"dataloader": "2.2.2",
"graphql": "16.8.1",
"rfc6902": "5.1.1"
diff --git a/packages/fhirtypes/dist/Project.d.ts b/packages/fhirtypes/dist/Project.d.ts
index 6b3e2fd52e..6b9d7ea392 100644
--- a/packages/fhirtypes/dist/Project.d.ts
+++ b/packages/fhirtypes/dist/Project.d.ts
@@ -94,10 +94,28 @@ export interface Project {
defaultPatientAccessPolicy?: Reference;
/**
- * Secure environment variable that can be used to store secrets for
- * bots.
+ * Option or parameter that can be adjusted within the Medplum Project to
+ * customize its behavior.
*/
- secret?: ProjectSecret[];
+ setting?: ProjectSetting[];
+
+ /**
+ * Option or parameter that can be adjusted within the Medplum Project to
+ * customize its behavior, only visible to project administrators.
+ */
+ secret?: ProjectSetting[];
+
+ /**
+ * Option or parameter that can be adjusted within the Medplum Project to
+ * customize its behavior, only modifiable by system administrators.
+ */
+ systemSetting?: ProjectSetting[];
+
+ /**
+ * Option or parameter that can be adjusted within the Medplum Project to
+ * customize its behavior, only visible to system administrators.
+ */
+ systemSecret?: ProjectSetting[];
/**
* Web application or web site that is associated with the project.
@@ -122,10 +140,10 @@ export interface ProjectLink {
}
/**
- * Secure environment variable that can be used to store secrets for
- * bots.
+ * Option or parameter that can be adjusted within the Medplum Project to
+ * customize its behavior.
*/
-export interface ProjectSecret {
+export interface ProjectSetting {
/**
* The secret name.
@@ -200,3 +218,8 @@ export interface ProjectSite {
*/
recaptchaSecretKey?: string;
}
+
+/**
+ * @deprecated Use ProjectSetting instead
+ */
+export type ProjectSecret = ProjectSetting;
diff --git a/packages/fhirtypes/package.json b/packages/fhirtypes/package.json
index e765b47b5c..1ccdff719f 100644
--- a/packages/fhirtypes/package.json
+++ b/packages/fhirtypes/package.json
@@ -1,6 +1,6 @@
{
"name": "@medplum/fhirtypes",
- "version": "3.1.2",
+ "version": "3.1.3",
"description": "Medplum FHIR Type Definitions",
"keywords": [
"medplum",
diff --git a/packages/generator/package.json b/packages/generator/package.json
index b40257b79e..19d8a69459 100644
--- a/packages/generator/package.json
+++ b/packages/generator/package.json
@@ -1,6 +1,6 @@
{
"name": "@medplum/generator",
- "version": "3.1.2",
+ "version": "3.1.3",
"description": "Medplum Code Generator",
"homepage": "https://www.medplum.com/",
"repository": {
@@ -24,19 +24,19 @@
"test": "jest"
},
"devDependencies": {
- "@medplum/core": "3.1.2",
- "@medplum/definitions": "3.1.2",
- "@medplum/fhirtypes": "3.1.2",
+ "@medplum/core": "3.1.3",
+ "@medplum/definitions": "3.1.3",
+ "@medplum/fhirtypes": "3.1.3",
"@types/json-schema": "7.0.15",
- "@types/pg": "8.11.4",
+ "@types/pg": "8.11.5",
"@types/unzipper": "0.10.9",
"fast-xml-parser": "4.3.6",
"fhirpath": "3.11.0",
"mkdirp": "3.0.1",
"node-stream-zip": "1.15.0",
"pg": "8.11.5",
- "tinybench": "2.6.0",
- "unzipper": "0.10.14"
+ "tinybench": "2.7.0",
+ "unzipper": "0.11.2"
},
"engines": {
"node": ">=18.0.0"
diff --git a/packages/generator/src/index.ts b/packages/generator/src/index.ts
index 41917b0224..2ca1223fcf 100644
--- a/packages/generator/src/index.ts
+++ b/packages/generator/src/index.ts
@@ -144,6 +144,13 @@ function writeInterface(b: FileBuilder, fhirType: InternalTypeSchema): void {
writeInterface(b, subType);
}
}
+
+ if (typeName === 'Project') {
+ // TODO: Remove this in Medplum v4
+ b.newLine();
+ generateJavadoc(b, '@deprecated Use ProjectSetting instead');
+ b.append('export type ProjectSecret = ProjectSetting;');
+ }
}
function writeInterfaceProperty(
diff --git a/packages/graphiql/package.json b/packages/graphiql/package.json
index c08acaa69b..a574cc63ce 100644
--- a/packages/graphiql/package.json
+++ b/packages/graphiql/package.json
@@ -1,6 +1,6 @@
{
"name": "@medplum/graphiql",
- "version": "3.1.2",
+ "version": "3.1.3",
"description": "Medplum GraphiQL",
"homepage": "https://www.medplum.com/",
"bugs": {
@@ -25,18 +25,18 @@
"devDependencies": {
"@graphiql/react": "0.21.0",
"@graphiql/toolkit": "0.9.1",
- "@mantine/core": "7.7.1",
- "@mantine/hooks": "7.7.1",
- "@medplum/core": "3.1.2",
- "@medplum/fhirtypes": "3.1.2",
- "@medplum/react": "3.1.2",
- "@types/react": "18.2.74",
- "@types/react-dom": "18.2.24",
+ "@mantine/core": "7.8.0",
+ "@mantine/hooks": "7.8.0",
+ "@medplum/core": "3.1.3",
+ "@medplum/fhirtypes": "3.1.3",
+ "@medplum/react": "3.1.3",
+ "@types/react": "18.2.78",
+ "@types/react-dom": "18.2.25",
"graphiql": "3.2.0",
"graphql": "16.8.1",
"graphql-ws": "5.16.0",
"postcss": "8.4.38",
- "postcss-preset-mantine": "1.13.0",
+ "postcss-preset-mantine": "1.14.4",
"react": "18.2.0",
"react-dom": "18.2.0",
"vite": "5.2.8"
diff --git a/packages/health-gorilla/package.json b/packages/health-gorilla/package.json
index 5fe12d8fa4..4f8953c488 100644
--- a/packages/health-gorilla/package.json
+++ b/packages/health-gorilla/package.json
@@ -1,6 +1,6 @@
{
"name": "@medplum/health-gorilla",
- "version": "3.1.2",
+ "version": "3.1.3",
"description": "Medplum Health Gorilla SDK",
"homepage": "https://www.medplum.com/",
"bugs": {
@@ -39,11 +39,11 @@
"test": "jest"
},
"dependencies": {
- "@medplum/core": "3.1.2",
- "@medplum/fhirtypes": "3.1.2"
+ "@medplum/core": "3.1.3",
+ "@medplum/fhirtypes": "3.1.3"
},
"devDependencies": {
- "@medplum/mock": "3.1.2"
+ "@medplum/mock": "3.1.3"
},
"engines": {
"node": ">=18.0.0"
diff --git a/packages/hl7/package.json b/packages/hl7/package.json
index 151410d4c6..7e3ff747cf 100644
--- a/packages/hl7/package.json
+++ b/packages/hl7/package.json
@@ -1,6 +1,6 @@
{
"name": "@medplum/hl7",
- "version": "3.1.2",
+ "version": "3.1.3",
"description": "Medplum HL7 Utilities",
"keywords": [
"medplum",
@@ -53,10 +53,10 @@
"test": "jest"
},
"dependencies": {
- "@medplum/core": "3.1.2"
+ "@medplum/core": "3.1.3"
},
"devDependencies": {
- "@medplum/fhirtypes": "3.1.2"
+ "@medplum/fhirtypes": "3.1.3"
},
"engines": {
"node": ">=18.0.0"
diff --git a/packages/mock/package.json b/packages/mock/package.json
index 579f182644..98f3fb6bbc 100644
--- a/packages/mock/package.json
+++ b/packages/mock/package.json
@@ -1,6 +1,6 @@
{
"name": "@medplum/mock",
- "version": "3.1.2",
+ "version": "3.1.3",
"description": "Medplum Mock Client",
"keywords": [
"medplum",
@@ -53,10 +53,10 @@
"test": "jest"
},
"dependencies": {
- "@medplum/core": "3.1.2",
- "@medplum/definitions": "3.1.2",
- "@medplum/fhir-router": "3.1.2",
- "@medplum/fhirtypes": "3.1.2",
+ "@medplum/core": "3.1.3",
+ "@medplum/definitions": "3.1.3",
+ "@medplum/fhir-router": "3.1.3",
+ "@medplum/fhirtypes": "3.1.3",
"dataloader": "2.2.2",
"jest-websocket-mock": "2.5.0",
"rfc6902": "5.1.1"
diff --git a/packages/mock/src/client.ts b/packages/mock/src/client.ts
index 9976b41b6e..4348d15d90 100644
--- a/packages/mock/src/client.ts
+++ b/packages/mock/src/client.ts
@@ -222,20 +222,23 @@ export class MockClient extends MedplumClient {
if (!this.agentAvailable) {
throw new OperationOutcomeError(badRequest('Timeout'));
}
- if (typeof destination === 'string' && destination !== '8.8.8.8') {
+ if (typeof destination !== 'string' || (destination !== '8.8.8.8' && destination !== 'localhost')) {
// Exception for test case
if (destination !== 'abc123') {
- console.warn('IPs other than 8.8.8.8 will always throw an error in MockClient');
+ console.warn(
+ 'IPs other than 8.8.8.8 and hostnames other than `localhost` will always throw an error in MockClient'
+ );
}
throw new OperationOutcomeError(badRequest('Destination device not found'));
}
- return `PING 8.8.8.8 (8.8.8.8): 56 data bytes
-64 bytes from 8.8.8.8: icmp_seq=0 ttl=115 time=10.977 ms
-64 bytes from 8.8.8.8: icmp_seq=1 ttl=115 time=13.037 ms
-64 bytes from 8.8.8.8: icmp_seq=2 ttl=115 time=23.159 ms
-64 bytes from 8.8.8.8: icmp_seq=3 ttl=115 time=12.725 ms
-
---- 8.8.8.8 ping statistics ---
+ const ip = destination === 'localhost' ? '127.0.0.1' : destination;
+ return `PING ${destination} (${ip}): 56 data bytes
+64 bytes from ${ip}: icmp_seq=0 ttl=115 time=10.977 ms
+64 bytes from ${ip}: icmp_seq=1 ttl=115 time=13.037 ms
+64 bytes from ${ip}: icmp_seq=2 ttl=115 time=23.159 ms
+64 bytes from ${ip}: icmp_seq=3 ttl=115 time=12.725 ms
+
+--- ${destination} ping statistics ---
4 packets transmitted, 4 packets received, 0.0% packet loss
round-trip min/avg/max/stddev = 10.977/14.975/23.159/4.790 ms
`;
diff --git a/packages/mock/src/mocks/structuredefinitions.json b/packages/mock/src/mocks/structuredefinitions.json
index 9aec46297f..b1aa9e099a 100644
--- a/packages/mock/src/mocks/structuredefinitions.json
+++ b/packages/mock/src/mocks/structuredefinitions.json
@@ -12065,8 +12065,8 @@
]
},
{
- "id": "Project.secret",
- "path": "Project.secret",
+ "id": "Project.setting",
+ "path": "Project.setting",
"min": 0,
"max": "*",
"type": [
@@ -12076,8 +12076,8 @@
]
},
{
- "id": "Project.secret.name",
- "path": "Project.secret.name",
+ "id": "Project.setting.name",
+ "path": "Project.setting.name",
"min": 1,
"max": "1",
"type": [
@@ -12087,8 +12087,8 @@
]
},
{
- "id": "Project.secret.value[x]",
- "path": "Project.secret.value[x]",
+ "id": "Project.setting.value[x]",
+ "path": "Project.setting.value[x]",
"min": 1,
"max": "1",
"type": [
@@ -12106,6 +12106,27 @@
}
]
},
+ {
+ "id": "Project.secret",
+ "path": "Project.secret",
+ "min": 0,
+ "max": "*",
+ "contentReference": "#Project.setting"
+ },
+ {
+ "id": "Project.systemSetting",
+ "path": "Project.systemSetting",
+ "min": 0,
+ "max": "*",
+ "contentReference": "#Project.setting"
+ },
+ {
+ "id": "Project.systemSecret",
+ "path": "Project.systemSecret",
+ "min": 0,
+ "max": "*",
+ "contentReference": "#Project.setting"
+ },
{
"id": "Project.site",
"path": "Project.site",
diff --git a/packages/react-hooks/package.json b/packages/react-hooks/package.json
index 00af3c31ae..f2e11f00df 100644
--- a/packages/react-hooks/package.json
+++ b/packages/react-hooks/package.json
@@ -1,6 +1,6 @@
{
"name": "@medplum/react-hooks",
- "version": "3.1.2",
+ "version": "3.1.3",
"description": "Medplum React Hooks Library",
"keywords": [
"medplum",
@@ -57,27 +57,27 @@
"test": "jest"
},
"devDependencies": {
- "@medplum/core": "3.1.2",
- "@medplum/definitions": "3.1.2",
- "@medplum/fhirtypes": "3.1.2",
- "@medplum/mock": "3.1.2",
+ "@medplum/core": "3.1.3",
+ "@medplum/definitions": "3.1.3",
+ "@medplum/fhirtypes": "3.1.3",
+ "@medplum/mock": "3.1.3",
"@testing-library/dom": "10.0.0",
"@testing-library/jest-dom": "6.4.2",
- "@testing-library/react": "14.3.0",
+ "@testing-library/react": "15.0.2",
"@types/jest": "29.5.12",
- "@types/node": "20.12.5",
- "@types/react": "18.2.74",
- "@types/react-dom": "18.2.24",
+ "@types/node": "20.12.7",
+ "@types/react": "18.2.78",
+ "@types/react-dom": "18.2.25",
"jest": "29.7.0",
"jest-each": "29.7.0",
"jest-websocket-mock": "2.5.0",
"react": "18.2.0",
"react-dom": "18.2.0",
"rimraf": "5.0.5",
- "typescript": "5.4.4"
+ "typescript": "5.4.5"
},
"peerDependencies": {
- "@medplum/core": "3.1.2",
+ "@medplum/core": "3.1.3",
"react": "^17.0.2 || ^18.0.0",
"react-dom": "^17.0.2 || ^18.0.0"
},
diff --git a/packages/react/.storybook/main.ts b/packages/react/.storybook/main.ts
index 32dc913395..0e88894b41 100644
--- a/packages/react/.storybook/main.ts
+++ b/packages/react/.storybook/main.ts
@@ -16,6 +16,7 @@ const config: StorybookConfig = {
},
},
},
+ 'storybook-addon-mantine',
],
staticDirs: ['../public'],
framework: {
diff --git a/packages/react/.storybook/preview-head.html b/packages/react/.storybook/preview-head.html
new file mode 100644
index 0000000000..314bf801a1
--- /dev/null
+++ b/packages/react/.storybook/preview-head.html
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/packages/react/.storybook/preview.tsx b/packages/react/.storybook/preview.tsx
index 44092fa60e..03bc56098a 100644
--- a/packages/react/.storybook/preview.tsx
+++ b/packages/react/.storybook/preview.tsx
@@ -1,9 +1,9 @@
-import { MantineProvider, MantineThemeOverride } from '@mantine/core';
import '@mantine/core/styles.css';
import { MockClient } from '@medplum/mock';
import { MedplumProvider } from '@medplum/react-hooks';
import { BrowserRouter } from 'react-router-dom';
import { createGlobalTimer } from '../src/stories/MockDateWrapper.utils';
+import { themes } from './themes';
export const parameters = {
layout: 'fullscreen',
@@ -26,32 +26,12 @@ medplum.get('/').then(() => {
clock.restore();
});
-const theme: MantineThemeOverride = {
- headings: {
- sizes: {
- h1: {
- fontSize: '1.125rem',
- fontWeight: '500',
- lineHeight: '2.0',
- },
- },
- },
- fontSizes: {
- xs: '0.6875rem',
- sm: '0.875rem',
- md: '0.875rem',
- lg: '1.0rem',
- xl: '1.125rem',
- },
-};
-
export const decorators = [
+ themes,
(Story) => (
-
-
-
+
),
diff --git a/packages/react/.storybook/themes.ts b/packages/react/.storybook/themes.ts
new file mode 100644
index 0000000000..c6dca1c83a
--- /dev/null
+++ b/packages/react/.storybook/themes.ts
@@ -0,0 +1,481 @@
+import { createTheme } from '@mantine/core';
+import { withMantineThemes } from 'storybook-addon-mantine';
+
+const medplumDefault = createTheme({
+ headings: {
+ sizes: {
+ h1: {
+ fontSize: '1.125rem',
+ fontWeight: '500',
+ lineHeight: '2.0',
+ },
+ },
+ },
+ fontSizes: {
+ xs: '0.6875rem',
+ sm: '0.875rem',
+ md: '0.875rem',
+ lg: '1.0rem',
+ xl: '1.125rem',
+ },
+});
+
+const fooMedical = createTheme({
+ colors: {
+ // Replace or adjust with the exact colors used for your design
+ primary: [
+ '#f7f7f7', // primary[0]
+ '#eef6f4', // primary[1]
+ '#e3eff2', // primary[2]
+ '#d5ebec', // primary[3]
+ '#cfe7e9', // primary[4]
+ '#b0d7db', // primary[5]
+ '#39acbc', // primary[6]
+ '#005450', // primary[7]
+ '#004d49', // primary[8]
+ '#00353a', // primary[9] (adjusted for a darker shade)
+ ],
+ secondary: [
+ '#fff7eb', // secondary[0]
+ '#ffedce', // secondary[1]
+ '#fae3c3', // secondary[2]
+ '#e9d1b9', // secondary[3]
+ '#e8c9a6', // secondary[4]
+ '#f1dfca', // secondary[5]
+ '#ffc776', // secondary[6]
+ '#fa645a', // secondary[7]
+ '#b57931', // secondary[8]
+ '#935923', // secondary[9] (adjusted for a darker shade)
+ ],
+ },
+ primaryColor: 'primary',
+ fontFamily: 'Ginto, helvetica',
+ radius: {
+ xs: '.5rem',
+ sm: '.75rem',
+ md: '1rem',
+ lg: '1.5rem',
+ xl: '2.5rem',
+ },
+ spacing: {
+ xs: '.25rem',
+ sm: '.33rem',
+ md: '.5rem',
+ lg: '.66rem',
+ xl: '1rem',
+ },
+ defaultRadius: 'xl',
+ shadows: {
+ xs: '0px 0px 0px rgba(0, 0, 0, 0)',
+ md: '2px 2px 1.5px rgba(0, 0, 0, .25)',
+ xl: '5px 5px 3px rgba(0, 0, 0, .25)',
+ },
+ headings: {
+ fontFamily: 'GT Super Display, serif',
+ sizes: {
+ h1: { fontSize: '30px', lineHeight: '1.4' },
+ h2: { fontSize: '24px', lineHeight: '1.35' },
+ h3: { fontSize: '20px', lineHeight: '1.3' },
+ h4: { fontSize: '18px', lineHeight: '1.25' },
+ h5: { fontSize: '16px', lineHeight: '1.2' },
+ h6: { fontSize: '14px', lineHeight: '1.15' },
+ },
+ },
+});
+
+const bonFoo = createTheme({
+ components: {
+ Paper: {
+ defaultProps: {
+ p: 'sm',
+ shadow: 'xs',
+ },
+ },
+ Table: {
+ defaultProps: {
+ striped: false,
+ // m: '16px',
+ },
+ },
+ },
+ fontFamily:
+ '-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Helvetica,Arial,sans-serif,Apple Color Emoji,Segoe UI Emoji',
+ shadows: {
+ xs: 'none',
+ sm: 'none',
+ md: 'none',
+ lg: 'none',
+ xl: 'none',
+ },
+ spacing: {
+ xs: '8px',
+ sm: '10px',
+ md: '12px',
+ lg: '14px',
+ xl: '16px',
+ },
+ colors: {
+ destructive: [
+ '#FFF5F5',
+ '#FFE3E3',
+ '#FFC9C9',
+ '#FFA8A8',
+ '#FF8787',
+ '#FF6B6B',
+ '#FA5252',
+ '#F03E3E',
+ '#E03131',
+ '#C92A2A',
+ ],
+ dark: [
+ '#C1C2C5',
+ '#A6A7AB',
+ '#909296',
+ '#5C5F66',
+ '#373A40',
+ '#2C2E33',
+ '#25262B',
+ '#1A1B1E',
+ '#141517',
+ '#101113',
+ ],
+ primary: [
+ '#E7F5FF',
+ '#D0EBFF',
+ '#A5D8FF',
+ '#74C0FC',
+ '#4DABF7',
+ '#339AF0',
+ '#228BE6',
+ '#1C7ED6',
+ '#1971C2',
+ '#1864AB',
+ ],
+ neutral: [
+ '#F8F9FA',
+ '#F1F3F5',
+ '#E9ECEF',
+ '#DEE2E6',
+ '#CED4DA',
+ '#ADB5BD',
+ '#868E96',
+ '#495057',
+ '#343A40',
+ '#212529',
+ ],
+ },
+});
+
+const plumMedical = createTheme({
+ components: {
+ Divider: {
+ defaultProps: {
+ my: '0',
+ },
+ },
+ },
+ colors: {
+ primary: [
+ '#eef6f4',
+ '#00D7AB',
+ '#00B395',
+ '#00907E',
+ '#008062',
+ '#005450',
+ '#004D49',
+ '#003F3B',
+ '#003231',
+ '#002824',
+ ],
+ destructive: [
+ '#e8d3cf',
+ '#ddb3b0',
+ '#d59091',
+ '#cf6e77',
+ '#ca4956',
+ '#bc2f3d',
+ '#a0222f',
+ '#821722',
+ '#620e16',
+ '#400707',
+ ],
+ },
+ primaryColor: 'primary',
+ primaryShade: 6,
+ shadows: {
+ xs: '0px 0px 0px rgba(0, 0, 0, 0)',
+ sm: '0px 0px 0px rgba(0, 0, 0, 0)',
+ md: '0px 0px 0px rgba(0, 0, 0, 0)',
+ lg: '0px 0px 0px rgba(0, 0, 0, 0)',
+ xl: '0px 0px 0px rgba(0, 0, 0, 0)',
+ },
+ fontFamily: '"Ginto", helvetica, "sans-serif"',
+ headings: { fontFamily: 'GT Super Display, Times New Roman, "serif"' },
+ fontSizes: {
+ xs: '12px',
+ sm: '14px',
+ md: '18px',
+ lg: '22px',
+ xl: '30px',
+ },
+ radius: {
+ xs: '5px',
+ sm: '7px',
+ md: '9px',
+ lg: '11px',
+ xl: '13px',
+ },
+ defaultRadius: 'lg',
+ spacing: {
+ xs: '.5rem',
+ sm: '1rem',
+ md: '1.5rem',
+ lg: '2rem',
+ xl: '2.5rem',
+ },
+});
+
+const materialUi = createTheme({
+ fontFamily: 'Roboto, Helvetica, Arial, "sans-serif"',
+ fontSizes: {
+ xs: '.7rem',
+ sm: '.85rem',
+ md: '1rem',
+ lg: '1.2rem',
+ xl: '1.4rem',
+ },
+ colors: {
+ primary: [
+ '#cce5ff',
+ '#99ccff',
+ '#66b2ff',
+ '#3399ff',
+ '#0073e6',
+ '#0288d1',
+ '#006bd6',
+ '#0061c2',
+ '#004c99',
+ '#0037a5',
+ ],
+ },
+ primaryColor: 'primary',
+ primaryShade: 4,
+ shadows: {
+ xs: '0px 2px 1px -1px rgba(0, 0, 0, 0.2)',
+ sm: '0px 2px 1px -1px rgba(0, 0, 0, 0.2)',
+ md: '0px 1px 1px 0px rgba(0, 0, 0, 0.14)',
+ lg: '0px 1px 1px 0px rgba(0, 0, 0, 0.14)',
+ xl: '0px 1px 3px 0px rgba(0, 0, 0, 0.12)',
+ },
+ radius: {
+ xs: '0px',
+ sm: '2px',
+ md: '4px',
+ lg: '6px',
+ xl: '8px',
+ },
+ spacing: {
+ xs: '4px 8px',
+ sm: '6px 12px',
+ md: '8px 16px',
+ lg: '10px 20px',
+ xl: '12px 24px',
+ },
+});
+
+const sciFi = createTheme({
+ fontFamily: '"Gill Sans", arial, "sans-serif"',
+ colors: {
+ primary: [
+ '#FFFBB7',
+ '#FFF891',
+ '#FFF56A',
+ '#FFF244',
+ '#FFE81F',
+ '#FFD900',
+ '#ffb300',
+ '#f59b00',
+ '#eb8500',
+ '#e07000',
+ ],
+ },
+ primaryColor: 'primary',
+ primaryShade: 6,
+ black: '#412538',
+ radius: {
+ xs: '20px 10px',
+ sm: '30px 15px',
+ md: '40px 20px',
+ lg: '50px 25px',
+ xl: '60px 30px',
+ },
+ shadows: {
+ xs: '2px 1px 1px -1px #939393',
+ md: '3px 2px 1px -1px #939393',
+ xl: '4px 3px 1px -1px #939393',
+ },
+ spacing: {
+ xs: '1px',
+ sm: '3px',
+ md: '5px',
+ lg: '7px',
+ xl: '9px',
+ },
+ lineHeights: {
+ xs: '12px',
+ sm: '16px',
+ md: '20px',
+ lg: '24px',
+ xl: '30px',
+ },
+});
+
+const cursive = createTheme({
+ fontFamily: '"Brush Script MT", serif',
+ colors: {
+ primary: [
+ '#fce8e8',
+ '#f7cfd5',
+ '#f1bcc9',
+ '#e7a6c0',
+ '#d987b0',
+ '#c770a4',
+ '#b65d9c',
+ '#a85d9a',
+ '#845282',
+ '#604965',
+ ],
+ },
+ primaryColor: 'primary',
+ primaryShade: 5,
+ radius: {
+ xs: '0',
+ sm: '0',
+ md: '0',
+ lg: '0',
+ xl: '0',
+ },
+ shadows: {
+ xs: '4px 4px 3px grey',
+ sm: '8px 8px 3px grey',
+ md: '12px 12px 3px grey',
+ lg: '16px 16px 3px grey',
+ xl: '20px 20px 3px grey',
+ },
+});
+
+const caesar = createTheme({
+ fontFamily: '"Caesar Dressing", serif',
+ fontSizes: {
+ xs: '.8rem',
+ sm: '.9rem',
+ md: '1rem',
+ lg: '1.1rem',
+ xl: '1.2rem',
+ },
+ colors: {
+ primary: [
+ '#fd5d6b',
+ '#fb3737',
+ '#f81b1b',
+ '#d70909',
+ '#a00808',
+ '#810e0e',
+ '#601410',
+ '#4b1711',
+ '#34150f',
+ '#25120e',
+ ],
+ },
+ primaryColor: 'primary',
+ primaryShade: 4,
+ shadows: {
+ xs: '3px 3px 2px grey',
+ xl: '5px 5px 2px grey',
+ },
+});
+
+const wordArt = createTheme({
+ fontFamily: '"Bungee Spice", "sans-serif"',
+ defaultRadius: '0px',
+ shadows: {
+ xs: '0px 0px 0px',
+ sm: '0px 0px 0px',
+ md: '0px 0px 0px',
+ lg: '0px 0px 0px',
+ xl: '0px 0px 0px',
+ },
+ colors: {
+ primary: [
+ '#bcfeae',
+ '#90fa85',
+ '#64f55c',
+ '#34ed31',
+ '#1acf17',
+ '#1da21a',
+ '#1c7e1b',
+ '#1d5e20',
+ '#183f1c',
+ '#122b17',
+ ],
+ },
+ primaryColor: 'primary',
+ primaryShade: 4,
+ spacing: {
+ xs: '12px',
+ sm: '16px',
+ md: '20px',
+ lg: '24px',
+ xl: '30px',
+ },
+});
+
+export const themes = withMantineThemes({
+ themes: [
+ {
+ id: 'medplumDefault',
+ name: 'Medplum Default',
+ ...medplumDefault,
+ },
+ {
+ id: 'foomedical',
+ name: 'Foo Medical',
+ ...fooMedical,
+ },
+ {
+ id: 'bonfoo',
+ name: 'Bon Foo',
+ ...bonFoo,
+ },
+ {
+ id: 'plumMedical',
+ name: 'PlumMedical',
+ ...plumMedical,
+ },
+ {
+ id: 'materialUi',
+ name: 'Material UI',
+ ...materialUi,
+ },
+ {
+ id: 'sci-fi',
+ name: 'SciFi',
+ ...sciFi,
+ },
+ {
+ id: 'cursive',
+ name: 'Cursive',
+ ...cursive,
+ },
+ {
+ id: 'caesar',
+ name: 'Caesar',
+ ...caesar,
+ },
+ {
+ id: 'word-art',
+ name: 'Word Art',
+ ...wordArt,
+ },
+ ],
+});
diff --git a/packages/react/package.json b/packages/react/package.json
index 89d71cdab7..3e8089231f 100644
--- a/packages/react/package.json
+++ b/packages/react/package.json
@@ -1,6 +1,6 @@
{
"name": "@medplum/react",
- "version": "3.1.2",
+ "version": "3.1.3",
"description": "Medplum React Component Library",
"keywords": [
"medplum",
@@ -67,51 +67,52 @@
"test": "jest"
},
"devDependencies": {
- "@mantine/core": "7.7.1",
- "@mantine/hooks": "7.7.1",
- "@mantine/notifications": "7.7.1",
- "@medplum/core": "3.1.2",
- "@medplum/definitions": "3.1.2",
- "@medplum/fhirtypes": "3.1.2",
- "@medplum/mock": "3.1.2",
- "@medplum/react-hooks": "3.1.2",
- "@storybook/addon-actions": "8.0.6",
- "@storybook/addon-essentials": "8.0.6",
- "@storybook/addon-links": "8.0.6",
- "@storybook/addon-storysource": "8.0.6",
- "@storybook/blocks": "^8.0.6",
- "@storybook/builder-vite": "8.0.6",
- "@storybook/react": "8.0.6",
- "@storybook/react-vite": "8.0.6",
- "@tabler/icons-react": "3.1.0",
+ "@mantine/core": "7.8.0",
+ "@mantine/hooks": "7.8.0",
+ "@mantine/notifications": "7.8.0",
+ "@medplum/core": "3.1.3",
+ "@medplum/definitions": "3.1.3",
+ "@medplum/fhirtypes": "3.1.3",
+ "@medplum/mock": "3.1.3",
+ "@medplum/react-hooks": "3.1.3",
+ "@storybook/addon-actions": "8.0.8",
+ "@storybook/addon-essentials": "8.0.8",
+ "@storybook/addon-links": "8.0.8",
+ "@storybook/addon-storysource": "8.0.8",
+ "@storybook/blocks": "^8.0.8",
+ "@storybook/builder-vite": "8.0.8",
+ "@storybook/react": "8.0.8",
+ "@storybook/react-vite": "8.0.8",
+ "@tabler/icons-react": "3.2.0",
"@testing-library/dom": "10.0.0",
"@testing-library/jest-dom": "6.4.2",
- "@testing-library/react": "14.3.0",
+ "@testing-library/react": "15.0.2",
"@testing-library/user-event": "14.5.2",
"@types/jest": "29.5.12",
- "@types/node": "20.12.5",
- "@types/react": "18.2.74",
- "@types/react-dom": "18.2.24",
+ "@types/node": "20.12.7",
+ "@types/react": "18.2.78",
+ "@types/react-dom": "18.2.25",
"@vitejs/plugin-react": "4.2.1",
"chromatic": "11.0.0",
"jest": "29.7.0",
"jest-each": "29.7.0",
"postcss": "8.4.38",
- "postcss-preset-mantine": "1.13.0",
+ "postcss-preset-mantine": "1.14.4",
"react": "18.2.0",
"react-dom": "18.2.0",
"rfc6902": "5.1.1",
"rimraf": "5.0.5",
"sinon": "17.0.1",
- "storybook": "8.0.6",
- "typescript": "5.4.4",
+ "storybook": "8.0.8",
+ "typescript": "5.4.5",
+ "storybook-addon-mantine": "4.0.2",
"vite-plugin-turbosnap": "^1.0.3"
},
"peerDependencies": {
"@mantine/core": "^7.0.0",
"@mantine/hooks": "^7.0.0",
"@mantine/notifications": "^7.0.0",
- "@medplum/core": "3.1.2",
+ "@medplum/core": "3.1.3",
"react": "^17.0.2 || ^18.0.0",
"react-dom": "^17.0.2 || ^18.0.0",
"rfc6902": "^5.0.1"
diff --git a/packages/react/src/AsyncAutocomplete/AsyncAutocomplete.tsx b/packages/react/src/AsyncAutocomplete/AsyncAutocomplete.tsx
index 277ae18f37..fac146d6dd 100644
--- a/packages/react/src/AsyncAutocomplete/AsyncAutocomplete.tsx
+++ b/packages/react/src/AsyncAutocomplete/AsyncAutocomplete.tsx
@@ -11,7 +11,7 @@ import {
} from '@mantine/core';
import { showNotification } from '@mantine/notifications';
import { normalizeErrorString } from '@medplum/core';
-import { KeyboardEvent, ReactNode, useCallback, useEffect, useRef, useState } from 'react';
+import { KeyboardEvent, ReactNode, SyntheticEvent, useCallback, useEffect, useRef, useState } from 'react';
import { killEvent } from '../utils/dom';
export interface AsyncAutocompleteOption extends ComboboxItem {
@@ -129,7 +129,7 @@ export function AsyncAutocomplete(props: AsyncAutocompleteProps): JSX.Elem
}, [combobox, loadOptions, onChange, toOption]);
const handleSearchChange = useCallback(
- (e: React.SyntheticEvent): void => {
+ (e: SyntheticEvent): void => {
if ((options && options.length > 0) || creatable) {
combobox.openDropdown();
}
diff --git a/packages/react/src/CheckboxFormSection/CheckboxFormSection.tsx b/packages/react/src/CheckboxFormSection/CheckboxFormSection.tsx
index 6d985e7eee..0629914702 100644
--- a/packages/react/src/CheckboxFormSection/CheckboxFormSection.tsx
+++ b/packages/react/src/CheckboxFormSection/CheckboxFormSection.tsx
@@ -15,7 +15,7 @@ export interface CheckboxFormSectionProps {
export function CheckboxFormSection(props: CheckboxFormSectionProps): JSX.Element {
const { debugMode } = useContext(ElementsContext);
- let label: React.ReactNode;
+ let label: ReactNode;
if (debugMode && props.fhirPath) {
label = `${props.title} - ${props.fhirPath}`;
} else {
diff --git a/packages/react/src/ElementsInput/ElementsInput.utils.ts b/packages/react/src/ElementsInput/ElementsInput.utils.ts
index 67ff6a3e11..70e7da1a78 100644
--- a/packages/react/src/ElementsInput/ElementsInput.utils.ts
+++ b/packages/react/src/ElementsInput/ElementsInput.utils.ts
@@ -1,8 +1,8 @@
import { ElementsContextType, InternalSchemaElement, isPopulated } from '@medplum/core';
-import React from 'react';
+import { createContext } from 'react';
import { DEFAULT_IGNORED_NON_NESTED_PROPERTIES, DEFAULT_IGNORED_PROPERTIES } from '../constants';
-export const ElementsContext = React.createContext({
+export const ElementsContext = createContext({
path: '',
profileUrl: undefined,
elements: Object.create(null),
diff --git a/packages/react/src/FormSection/FormSection.tsx b/packages/react/src/FormSection/FormSection.tsx
index ef1e1c8707..06219fe1aa 100644
--- a/packages/react/src/FormSection/FormSection.tsx
+++ b/packages/react/src/FormSection/FormSection.tsx
@@ -1,8 +1,8 @@
import { Input } from '@mantine/core';
import { OperationOutcome } from '@medplum/fhirtypes';
import { ReactNode, useContext } from 'react';
-import { getErrorsForInput } from '../utils/outcomes';
import { ElementsContext } from '../ElementsInput/ElementsInput.utils';
+import { getErrorsForInput } from '../utils/outcomes';
export interface FormSectionProps {
readonly title?: string;
@@ -19,7 +19,7 @@ export interface FormSectionProps {
export function FormSection(props: FormSectionProps): JSX.Element {
const { debugMode } = useContext(ElementsContext);
- let label: React.ReactNode;
+ let label: ReactNode;
if (debugMode && props.fhirPath) {
label = `${props.title} - ${props.fhirPath}`;
} else {
diff --git a/packages/react/src/Panel/Panel.tsx b/packages/react/src/Panel/Panel.tsx
index 87ff3ec529..2d6aae14ac 100644
--- a/packages/react/src/Panel/Panel.tsx
+++ b/packages/react/src/Panel/Panel.tsx
@@ -1,11 +1,12 @@
import { Paper, PaperProps } from '@mantine/core';
import cx from 'clsx';
+import { ReactNode } from 'react';
import classes from './Panel.module.css';
export interface PanelProps extends PaperProps {
readonly width?: number;
readonly fill?: boolean;
- readonly children?: React.ReactNode;
+ readonly children?: ReactNode;
}
export function Panel(props: PanelProps): JSX.Element {
diff --git a/packages/react/src/ReferenceRangeEditor/ReferenceRangeEditor.tsx b/packages/react/src/ReferenceRangeEditor/ReferenceRangeEditor.tsx
index 78d5b8a124..941c052234 100644
--- a/packages/react/src/ReferenceRangeEditor/ReferenceRangeEditor.tsx
+++ b/packages/react/src/ReferenceRangeEditor/ReferenceRangeEditor.tsx
@@ -179,7 +179,7 @@ export function ReferenceRangeGroupEditor(props: ReferenceRangeGroupEditorProps)
data-testid={`remove-group-button-${intervalGroup.id}`}
key={`remove-group-button-${intervalGroup.id}`}
size="sm"
- onClick={(e: React.MouseEvent) => {
+ onClick={(e: MouseEvent) => {
killEvent(e);
props.onRemoveGroup(intervalGroup);
}}
@@ -209,7 +209,7 @@ export function ReferenceRangeGroupEditor(props: ReferenceRangeGroupEditorProps)
size="sm"
key={`remove-interval-${interval.id}`}
data-testid={`remove-interval-${interval.id}`}
- onClick={(e: React.MouseEvent) => {
+ onClick={(e: MouseEvent) => {
killEvent(e);
props.onRemove(intervalGroup.id, interval);
}}
@@ -232,7 +232,7 @@ export function ReferenceRangeGroupEditor(props: ReferenceRangeGroupEditorProps)
title="Add Interval"
variant="subtle"
size="sm"
- onClick={(e: React.MouseEvent) => {
+ onClick={(e: MouseEvent) => {
killEvent(e);
props.onAdd(intervalGroup.id, {
range: {
diff --git a/packages/react/src/SliceInput/SliceInput.tsx b/packages/react/src/SliceInput/SliceInput.tsx
index 1603a6a84f..6c953c6883 100644
--- a/packages/react/src/SliceInput/SliceInput.tsx
+++ b/packages/react/src/SliceInput/SliceInput.tsx
@@ -8,16 +8,16 @@ import {
isEmpty,
isPopulated,
} from '@medplum/core';
-import { useContext, useMemo, useState } from 'react';
+import { MouseEvent, useContext, useMemo, useState } from 'react';
import { ElementsContext } from '../ElementsInput/ElementsInput.utils';
import { FormSection } from '../FormSection/FormSection';
+import classes from '../ResourceArrayInput/ResourceArrayInput.module.css';
import { ElementDefinitionTypeInput } from '../ResourcePropertyInput/ResourcePropertyInput';
+import { BaseInputProps } from '../ResourcePropertyInput/ResourcePropertyInput.utils';
import { ArrayAddButton } from '../buttons/ArrayAddButton';
import { ArrayRemoveButton } from '../buttons/ArrayRemoveButton';
import { killEvent } from '../utils/dom';
-import classes from '../ResourceArrayInput/ResourceArrayInput.module.css';
import { maybeWrapWithContext } from '../utils/maybeWrapWithContext';
-import { BaseInputProps } from '../ResourcePropertyInput/ResourcePropertyInput.utils';
export interface SliceInputProps extends BaseInputProps {
readonly slice: SliceDefinitionWithTypes;
@@ -96,7 +96,7 @@ export function SliceInput(props: SliceInputProps): JSX.Element | null {
{
+ onClick={(e: MouseEvent) => {
killEvent(e);
const newValues = [...values];
newValues.splice(valueIndex, 1);
@@ -111,7 +111,7 @@ export function SliceInput(props: SliceInputProps): JSX.Element | null {
{
+ onClick={(e: MouseEvent) => {
killEvent(e);
const newValues = [...values, undefined];
setValuesWrapper(newValues);
diff --git a/packages/react/src/buttons/ArrayAddButton.tsx b/packages/react/src/buttons/ArrayAddButton.tsx
index 254818d9b6..874429ce1d 100644
--- a/packages/react/src/buttons/ArrayAddButton.tsx
+++ b/packages/react/src/buttons/ArrayAddButton.tsx
@@ -1,9 +1,10 @@
-import { Button, ActionIcon } from '@mantine/core';
+import { ActionIcon, Button } from '@mantine/core';
import { IconCirclePlus } from '@tabler/icons-react';
+import { MouseEventHandler } from 'react';
export interface ArrayAddButtonProps {
readonly propertyDisplayName?: string;
- readonly onClick: React.MouseEventHandler;
+ readonly onClick: MouseEventHandler;
readonly testId?: string;
}
diff --git a/packages/react/src/buttons/ArrayRemoveButton.tsx b/packages/react/src/buttons/ArrayRemoveButton.tsx
index bc78478711..40970a2b18 100644
--- a/packages/react/src/buttons/ArrayRemoveButton.tsx
+++ b/packages/react/src/buttons/ArrayRemoveButton.tsx
@@ -1,9 +1,10 @@
import { ActionIcon } from '@mantine/core';
import { IconCircleMinus } from '@tabler/icons-react';
+import { MouseEventHandler } from 'react';
export interface ArrayRemoveButtonProps {
readonly propertyDisplayName?: string;
- readonly onClick: React.MouseEventHandler;
+ readonly onClick: MouseEventHandler;
readonly testId?: string;
}
diff --git a/packages/react/src/chat/ChatModal/ChatModal.tsx b/packages/react/src/chat/ChatModal/ChatModal.tsx
index 4654078519..50dab73bdd 100644
--- a/packages/react/src/chat/ChatModal/ChatModal.tsx
+++ b/packages/react/src/chat/ChatModal/ChatModal.tsx
@@ -1,12 +1,12 @@
import { ActionIcon } from '@mantine/core';
import { useMedplumProfile } from '@medplum/react-hooks';
import { IconChevronDown, IconChevronUp } from '@tabler/icons-react';
-import { useEffect, useState } from 'react';
+import { ReactNode, useEffect, useState } from 'react';
import classes from './ChatModal.module.css';
export interface ChatModalProps {
readonly open?: boolean;
- readonly children: React.ReactNode;
+ readonly children: ReactNode;
}
export function ChatModal(props: ChatModalProps): JSX.Element | null {
diff --git a/packages/react/src/test-utils/render.tsx b/packages/react/src/test-utils/render.tsx
index 9a7e8639e7..8fcf582122 100644
--- a/packages/react/src/test-utils/render.tsx
+++ b/packages/react/src/test-utils/render.tsx
@@ -12,17 +12,15 @@ import {
within,
} from '@testing-library/react';
import userEvent from '@testing-library/user-event';
+import { ReactNode } from 'react';
export { act, fireEvent, screen, userEvent, waitFor, within };
const theme = {};
-export function render(
- ui: React.ReactNode,
- wrapper?: ({ children }: { children: React.ReactNode }) => JSX.Element
-): RenderResult {
+export function render(ui: ReactNode, wrapper?: ({ children }: { children: ReactNode }) => JSX.Element): RenderResult {
return testingLibraryRender(ui, {
- wrapper: ({ children }: { children: React.ReactNode }) => (
+ wrapper: ({ children }: { children: ReactNode }) => (
{wrapper ? wrapper({ children }) : children}
),
});
diff --git a/packages/react/src/utils/dom.ts b/packages/react/src/utils/dom.ts
index 551b089fef..7b94e7cef4 100644
--- a/packages/react/src/utils/dom.ts
+++ b/packages/react/src/utils/dom.ts
@@ -1,10 +1,12 @@
+import { SyntheticEvent } from 'react';
+
/**
* Kills a browser event.
* Prevents default behavior.
* Stops event propagation.
* @param e - The event.
*/
-export function killEvent(e: Event | React.SyntheticEvent): void {
+export function killEvent(e: Event | SyntheticEvent): void {
e.preventDefault();
e.stopPropagation();
}
diff --git a/packages/react/src/utils/maybeWrapWithContext.tsx b/packages/react/src/utils/maybeWrapWithContext.tsx
index c21d70acbb..08bfd8c3a1 100644
--- a/packages/react/src/utils/maybeWrapWithContext.tsx
+++ b/packages/react/src/utils/maybeWrapWithContext.tsx
@@ -1,5 +1,7 @@
+import { Context } from 'react';
+
export function maybeWrapWithContext(
- ContextProvider: React.Context['Provider'],
+ ContextProvider: Context['Provider'],
contextValue: T | undefined,
contents: JSX.Element
): JSX.Element {
diff --git a/packages/server/jest.config.json b/packages/server/jest.config.json
deleted file mode 100644
index 8e2a0d7497..0000000000
--- a/packages/server/jest.config.json
+++ /dev/null
@@ -1,13 +0,0 @@
-{
- "testEnvironment": "node",
- "testTimeout": 600000,
- "testSequencer": "/jest.sequencer.js",
- "transform": {
- "^.+\\.(js|jsx|ts|tsx)$": "babel-jest"
- },
- "moduleFileExtensions": ["ts", "js", "json", "node"],
- "testMatch": ["**/src/**/*.test.ts"],
- "coverageDirectory": "coverage",
- "coverageReporters": ["json", "text"],
- "collectCoverageFrom": ["**/src/**/*", "!**/src/__mocks__/**/*.ts", "!**/src/migrations/**/*.ts"]
-}
diff --git a/packages/server/jest.config.ts b/packages/server/jest.config.ts
new file mode 100644
index 0000000000..9c24d5533e
--- /dev/null
+++ b/packages/server/jest.config.ts
@@ -0,0 +1,15 @@
+import type { Config } from 'jest';
+
+export default {
+ testEnvironment: 'node',
+ testTimeout: 600000,
+ testSequencer: '/jest.sequencer.js',
+ transform: {
+ '^.+\\.(js|jsx|ts|tsx)$': 'babel-jest',
+ },
+ testMatch: ['/src/**/*.test.ts'],
+ moduleFileExtensions: ['ts', 'js', 'json', 'node'],
+ coverageDirectory: 'coverage',
+ coverageReporters: ['json', 'text'],
+ collectCoverageFrom: ['**/src/**/*', '!**/src/__mocks__/**/*.ts', '!**/src/migrations/**/*.ts'],
+} satisfies Config;
diff --git a/packages/server/jest.seed.config.ts b/packages/server/jest.seed.config.ts
new file mode 100644
index 0000000000..b428e8b3d5
--- /dev/null
+++ b/packages/server/jest.seed.config.ts
@@ -0,0 +1,8 @@
+import type { Config } from 'jest';
+import defaultConfig from './jest.config';
+
+export default {
+ ...defaultConfig,
+ testMatch: ['/seed-tests/**/*.test.ts'],
+ collectCoverageFrom: ['/seed-tests/**/*'],
+} satisfies Config;
diff --git a/packages/server/package.json b/packages/server/package.json
index a9993bf9a6..4abc16f616 100644
--- a/packages/server/package.json
+++ b/packages/server/package.json
@@ -1,6 +1,6 @@
{
"name": "@medplum/server",
- "version": "3.1.2",
+ "version": "3.1.3",
"description": "Medplum Server",
"homepage": "https://www.medplum.com/",
"bugs": {
@@ -18,21 +18,23 @@
"clean": "rimraf dist",
"dev": "ts-node-dev --poll --respawn --transpile-only --require ./src/otel/instrumentation.ts src/index.ts",
"start": "node --require ./dist/otel/instrumentation.js dist/index.js",
- "test": "jest --runInBand"
+ "test:seed:serial": "jest seed-serial.test.ts --config jest.seed.config.ts --coverageDirectory \"/coverage-seed/serial\"",
+ "test:seed:parallel": "jest seed.test.ts --config jest.seed.config.ts --coverageDirectory \"/coverage-seed/parallel\"",
+ "test": "docker-compose -f ../../docker-compose.seed.yml up -d && npm run test:seed:parallel && jest"
},
"dependencies": {
- "@aws-sdk/client-cloudwatch-logs": "3.549.0",
- "@aws-sdk/client-lambda": "3.549.0",
- "@aws-sdk/client-s3": "3.550.0",
- "@aws-sdk/client-secrets-manager": "3.549.0",
- "@aws-sdk/client-sesv2": "3.549.0",
- "@aws-sdk/client-ssm": "3.549.0",
+ "@aws-sdk/client-cloudwatch-logs": "3.554.0",
+ "@aws-sdk/client-lambda": "3.554.0",
+ "@aws-sdk/client-s3": "3.554.0",
+ "@aws-sdk/client-secrets-manager": "3.554.0",
+ "@aws-sdk/client-sesv2": "3.554.0",
+ "@aws-sdk/client-ssm": "3.554.0",
"@aws-sdk/cloudfront-signer": "3.541.0",
- "@aws-sdk/lib-storage": "3.550.0",
+ "@aws-sdk/lib-storage": "3.554.0",
"@aws-sdk/types": "3.535.0",
- "@medplum/core": "3.1.2",
- "@medplum/definitions": "3.1.2",
- "@medplum/fhir-router": "3.1.2",
+ "@medplum/core": "3.1.3",
+ "@medplum/definitions": "3.1.3",
+ "@medplum/fhir-router": "3.1.3",
"@opentelemetry/auto-instrumentations-node": "0.44.0",
"@opentelemetry/exporter-metrics-otlp-proto": "0.50.0",
"@opentelemetry/exporter-trace-otlp-proto": "0.50.0",
@@ -42,7 +44,7 @@
"@smithy/util-stream": "2.2.0",
"bcryptjs": "2.4.3",
"body-parser": "1.20.2",
- "bullmq": "5.5.4",
+ "bullmq": "5.7.1",
"bytes": "3.1.2",
"compression": "1.7.4",
"cookie-parser": "1.4.6",
@@ -71,7 +73,7 @@
},
"devDependencies": {
"@jest/test-sequencer": "29.7.0",
- "@medplum/fhirtypes": "3.1.2",
+ "@medplum/fhirtypes": "3.1.3",
"@types/bcryptjs": "2.4.6",
"@types/body-parser": "1.19.5",
"@types/bytes": "3.1.4",
@@ -82,10 +84,10 @@
"@types/express-rate-limit": "5.1.3",
"@types/json-schema": "7.0.15",
"@types/mailparser": "3.4.4",
- "@types/node": "20.12.5",
+ "@types/node": "20.12.7",
"@types/node-fetch": "2.6.11",
"@types/nodemailer": "6.4.14",
- "@types/pg": "8.11.2",
+ "@types/pg": "8.11.5",
"@types/set-cookie-parser": "2.4.7",
"@types/supertest": "6.0.2",
"@types/ua-parser-js": "0.7.39",
@@ -94,7 +96,7 @@
"@types/ws": "8.5.10",
"aws-sdk-client-mock": "4.0.0",
"aws-sdk-client-mock-jest": "4.0.0",
- "mailparser": "3.6.9",
+ "mailparser": "3.7.0",
"openapi3-ts": "4.3.1",
"set-cookie-parser": "2.6.0",
"supertest": "6.3.4",
diff --git a/packages/server/src/seed.test.ts b/packages/server/seed-tests/seed-serial.test.ts
similarity index 56%
rename from packages/server/src/seed.test.ts
rename to packages/server/seed-tests/seed-serial.test.ts
index 2d72346245..1e8500b6c2 100644
--- a/packages/server/src/seed.test.ts
+++ b/packages/server/seed-tests/seed-serial.test.ts
@@ -1,16 +1,19 @@
import { Project } from '@medplum/fhirtypes';
-import { initAppServices, shutdownApp } from './app';
-import { loadTestConfig } from './config';
-import { getDatabasePool } from './database';
-import { SelectQuery } from './fhir/sql';
-import { seedDatabase } from './seed';
-import { withTestContext } from './test.setup';
+import { initAppServices, shutdownApp } from '../src/app';
+import { loadTestConfig } from '../src/config';
+import { getDatabasePool } from '../src/database';
+import { SelectQuery } from '../src/fhir/sql';
+import { seedDatabase } from '../src/seed';
+import { withTestContext } from '../src/test.setup';
describe('Seed', () => {
beforeAll(async () => {
console.log = jest.fn();
const config = await loadTestConfig();
+ config.database.port = process.env['POSTGRES_SEED_PORT']
+ ? Number.parseInt(process.env['POSTGRES_SEED_PORT'], 10)
+ : 5433;
return withTestContext(() => initAppServices(config));
});
@@ -18,9 +21,9 @@ describe('Seed', () => {
await shutdownApp();
});
- test('Seeder completes successfully', async () => {
+ test('Seeder completes successfully -- serial version', async () => {
// First time, seeder should run
- await seedDatabase();
+ await seedDatabase({ parallel: false });
// Make sure the first project is a super admin
const rows = await new SelectQuery('Project')
@@ -34,6 +37,6 @@ describe('Seed', () => {
expect(project.strictMode).toBe(true);
// Second time, seeder should silently ignore
- await seedDatabase();
+ await seedDatabase({ parallel: false });
}, 240000);
});
diff --git a/packages/server/seed-tests/seed.test.ts b/packages/server/seed-tests/seed.test.ts
new file mode 100644
index 0000000000..129bd383d2
--- /dev/null
+++ b/packages/server/seed-tests/seed.test.ts
@@ -0,0 +1,42 @@
+import { Project } from '@medplum/fhirtypes';
+import { initAppServices, shutdownApp } from '../src/app';
+import { loadTestConfig } from '../src/config';
+import { getDatabasePool } from '../src/database';
+import { SelectQuery } from '../src/fhir/sql';
+import { seedDatabase } from '../src/seed';
+import { withTestContext } from '../src/test.setup';
+
+describe('Seed', () => {
+ beforeAll(async () => {
+ console.log = jest.fn();
+
+ const config = await loadTestConfig();
+ return withTestContext(() => initAppServices(config));
+ });
+
+ afterAll(async () => {
+ await shutdownApp();
+ });
+
+ test('Seeder completes successfully', async () => {
+ // First time, seeder should run
+ await seedDatabase();
+
+ // Make sure all database migrations have run
+ const pool = getDatabasePool();
+ const result = await pool.query('SELECT "version" FROM "DatabaseMigration"');
+ const version = result.rows[0]?.version ?? -1;
+ expect(version).toBeGreaterThanOrEqual(67);
+
+ // Make sure the first project is a super admin
+ const rows = await new SelectQuery('Project').column('content').where('name', '=', 'Super Admin').execute(pool);
+ expect(rows.length).toBe(1);
+
+ const project = JSON.parse(rows[0].content) as Project;
+ expect(project.superAdmin).toBe(true);
+ expect(project.strictMode).toBe(true);
+
+ // Second time, seeder should silently ignore
+ await seedDatabase();
+ }, 240000);
+});
diff --git a/packages/server/src/admin/invite.test.ts b/packages/server/src/admin/invite.test.ts
index 061f2bdae0..0487054490 100644
--- a/packages/server/src/admin/invite.test.ts
+++ b/packages/server/src/admin/invite.test.ts
@@ -10,7 +10,6 @@ import { simpleParser } from 'mailparser';
import fetch from 'node-fetch';
import { Readable } from 'stream';
import request from 'supertest';
-
import { initApp, shutdownApp } from '../app';
import { registerNew } from '../auth/register';
import { loadTestConfig } from '../config';
@@ -26,6 +25,7 @@ describe('Admin Invite', () => {
beforeAll(async () => {
const config = await loadTestConfig();
+ config.emailProvider = 'awsses';
await withTestContext(() => initApp(app, config));
});
diff --git a/packages/server/src/admin/project.test.ts b/packages/server/src/admin/project.test.ts
index b44cf7d887..5f10e047c9 100644
--- a/packages/server/src/admin/project.test.ts
+++ b/packages/server/src/admin/project.test.ts
@@ -1,4 +1,3 @@
-import { SendEmailCommand, SESv2Client } from '@aws-sdk/client-sesv2';
import { createReference } from '@medplum/core';
import { ProjectMembership } from '@medplum/fhirtypes';
import { randomUUID } from 'crypto';
@@ -7,12 +6,11 @@ import { pwnedPassword } from 'hibp';
import fetch from 'node-fetch';
import request from 'supertest';
import { initApp, shutdownApp } from '../app';
-import { registerNew, RegisterResponse } from '../auth/register';
+import { RegisterResponse, registerNew } from '../auth/register';
import { loadTestConfig } from '../config';
import { addTestUser, setupPwnedPasswordMock, setupRecaptchaMock, withTestContext } from '../test.setup';
import { inviteUser } from './invite';
-jest.mock('@aws-sdk/client-sesv2');
jest.mock('hibp');
jest.mock('node-fetch');
@@ -43,8 +41,6 @@ describe('Project Admin routes', () => {
});
beforeEach(() => {
- (SESv2Client as unknown as jest.Mock).mockClear();
- (SendEmailCommand as unknown as jest.Mock).mockClear();
(fetch as unknown as jest.Mock).mockClear();
(pwnedPassword as unknown as jest.Mock).mockClear();
setupPwnedPasswordMock(pwnedPassword as unknown as jest.Mock, 0);
diff --git a/packages/server/src/agent/utils.ts b/packages/server/src/agent/utils.ts
new file mode 100644
index 0000000000..a5b4263552
--- /dev/null
+++ b/packages/server/src/agent/utils.ts
@@ -0,0 +1,11 @@
+export enum AgentConnectionState {
+ UNKNOWN = 'unknown',
+ CONNECTED = 'connected',
+ DISCONNECTED = 'disconnected',
+}
+
+export type AgentInfo = {
+ status: AgentConnectionState;
+ version: string;
+ lastUpdated?: string;
+};
diff --git a/packages/server/src/agent/websockets.test.ts b/packages/server/src/agent/websockets.test.ts
index eec0385fc0..d0fa8abb0f 100644
--- a/packages/server/src/agent/websockets.test.ts
+++ b/packages/server/src/agent/websockets.test.ts
@@ -1,11 +1,13 @@
-import { allOk, ContentType, getReferenceString, Hl7Message } from '@medplum/core';
+import { allOk, ContentType, getReferenceString, Hl7Message, MEDPLUM_VERSION, sleep } from '@medplum/core';
import { Agent, Bot, Device } from '@medplum/fhirtypes';
import express from 'express';
import { Server } from 'http';
import request from 'superwstest';
import { initApp, shutdownApp } from '../app';
import { loadTestConfig, MedplumServerConfig } from '../config';
+import { getRedis } from '../redis';
import { initTestAuth } from '../test.setup';
+import { AgentConnectionState, AgentInfo } from './utils';
const app = express();
let config: MedplumServerConfig;
@@ -19,6 +21,7 @@ describe('Agent WebSockets', () => {
beforeAll(async () => {
config = await loadTestConfig();
config.vmContextBotsEnabled = true;
+ config.heartbeatMilliseconds = 5000;
server = await initApp(app, config);
accessToken = await initTestAuth({ membership: { admin: true } });
@@ -325,6 +328,7 @@ describe('Agent WebSockets', () => {
.set('Authorization', 'Bearer ' + accessToken)
.send({
waitForResponse: true,
+ waitTimeout: 500,
destination: getReferenceString(device),
contentType: ContentType.HL7_V2,
body:
@@ -407,14 +411,30 @@ describe('Agent WebSockets', () => {
agentId: agent.id,
})
)
- .expectText('{"type":"agent:connect:response"}')
+ .expectJson({ type: 'agent:connect:response' })
+ .expectJson({ type: 'agent:heartbeat:request' })
// Send a ping
- .sendText(JSON.stringify({ type: 'agent:heartbeat:request' }))
- .expectText('{"type":"agent:heartbeat:response"}')
+ .sendJson({ type: 'agent:heartbeat:request' })
+ .expectJson({ type: 'agent:heartbeat:response', version: MEDPLUM_VERSION })
// Simulate a ping response
- .sendText(JSON.stringify({ type: 'agent:heartbeat:response' }))
+ .sendJson({ type: 'agent:heartbeat:response', version: MEDPLUM_VERSION })
.close()
.expectClosed();
+
+ let info: AgentInfo = { status: AgentConnectionState.UNKNOWN, version: 'unknown' };
+ for (let i = 0; i < 5; i++) {
+ await sleep(50);
+ const infoStr = (await getRedis().get(`medplum:agent:${agent.id as string}:info`)) as string;
+ info = JSON.parse(infoStr) as AgentInfo;
+ if (info.status === AgentConnectionState.DISCONNECTED) {
+ break;
+ }
+ }
+ expect(info).toMatchObject({
+ status: AgentConnectionState.DISCONNECTED,
+ version: MEDPLUM_VERSION,
+ lastUpdated: expect.any(String),
+ });
});
test('Ping IP', async () => {
diff --git a/packages/server/src/agent/websockets.ts b/packages/server/src/agent/websockets.ts
index d6cfba398e..feb5885c94 100644
--- a/packages/server/src/agent/websockets.ts
+++ b/packages/server/src/agent/websockets.ts
@@ -4,21 +4,24 @@ import {
AgentTransmitRequest,
ContentType,
Hl7Message,
+ MEDPLUM_VERSION,
getReferenceString,
normalizeErrorString,
} from '@medplum/core';
import { Agent, Bot, Reference } from '@medplum/fhirtypes';
-import { AsyncLocalStorage } from 'async_hooks';
-import { IncomingMessage } from 'http';
import { Redis } from 'ioredis';
+import { AsyncLocalStorage } from 'node:async_hooks';
+import { IncomingMessage } from 'node:http';
import ws from 'ws';
import { getRepoForLogin } from '../fhir/accesspolicy';
import { executeBot } from '../fhir/operations/execute';
import { heartbeat } from '../heartbeat';
+import { globalLogger } from '../logger';
import { getLoginForAccessToken } from '../oauth/utils';
-import { getRedis } from '../redis';
+import { getRedis, getRedisSubscriber } from '../redis';
+import { AgentConnectionState, AgentInfo } from './utils';
-const STATUS_EX_SECONDS = 24 * 60 * 60; // 24 hours in seconds
+const INFO_EX_SECONDS = 24 * 60 * 60; // 24 hours in seconds
/**
* Handles a new WebSocket connection to the agent service.
@@ -51,11 +54,11 @@ export async function handleAgentConnection(socket: ws.WebSocket, request: Incom
break;
case 'agent:heartbeat:request':
- sendMessage({ type: 'agent:heartbeat:response' });
+ sendMessage({ type: 'agent:heartbeat:response', version: MEDPLUM_VERSION });
break;
case 'agent:heartbeat:response':
- await updateStatus('connected');
+ await updateAgentInfo({ status: AgentConnectionState.CONNECTED, version: command.version });
break;
// @ts-expect-error - Deprecated message type
@@ -83,7 +86,7 @@ export async function handleAgentConnection(socket: ws.WebSocket, request: Incom
socket.on(
'close',
AsyncLocalStorage.bind(async () => {
- await updateStatus('disconnected');
+ await updateAgentStatus(AgentConnectionState.DISCONNECTED);
heartbeat.removeEventListener('heartbeat', heartbeatHandler);
redisSubscriber?.disconnect();
redisSubscriber = undefined;
@@ -110,12 +113,18 @@ export async function handleAgentConnection(socket: ws.WebSocket, request: Incom
agentId = command.agentId;
- const { login, project, membership } = await getLoginForAccessToken(command.accessToken);
+ const authState = await getLoginForAccessToken(command.accessToken);
+ if (!authState) {
+ sendError('Invalid access token');
+ return;
+ }
+
+ const { login, project, membership } = authState;
const repo = await getRepoForLogin(login, membership, project, true);
const agent = await repo.readResource('Agent', agentId);
// Connect to Redis
- redisSubscriber = getRedis().duplicate();
+ redisSubscriber = getRedisSubscriber();
await redisSubscriber.subscribe(getReferenceString(agent));
redisSubscriber.on('message', (_channel: string, message: string) => {
// When a message is received, send it to the agent
@@ -129,7 +138,7 @@ export async function handleAgentConnection(socket: ws.WebSocket, request: Incom
sendMessage({ type: 'agent:connect:response' });
// Update the agent status in Redis
- await updateStatus('connected');
+ await updateAgentStatus(AgentConnectionState.CONNECTED);
}
/**
@@ -158,7 +167,13 @@ export async function handleAgentConnection(socket: ws.WebSocket, request: Incom
return;
}
- const { login, project, membership } = await getLoginForAccessToken(command.accessToken);
+ const authState = await getLoginForAccessToken(command.accessToken);
+ if (!authState) {
+ sendError('Invalid access token');
+ return;
+ }
+
+ const { login, project, membership } = authState;
const repo = await getRepoForLogin(login, membership, project, true);
const agent = await repo.readResource('Agent', agentId);
const channel = agent?.channel?.find((c) => c.name === command.channel);
@@ -204,23 +219,53 @@ export async function handleAgentConnection(socket: ws.WebSocket, request: Incom
}
/**
- * Updates the agent status in Redis.
- * This is used by the Agent "$status" operation to monitor agent status.
+ * Updates the agent info in Redis.
+ * This is used by the Agent "$status" operation to monitor agent status and other info.
* See packages/server/src/fhir/operations/agentstatus.ts for more details.
- * @param status - The new status.
+ * @param info - The latest info received from the Agent.
*/
- async function updateStatus(status: string): Promise {
+ async function updateAgentInfo(info: AgentInfo): Promise {
if (!agentId) {
// Not connected
}
- await getRedis().set(
- `medplum:agent:${agentId}:status`,
+
+ let redis: Redis;
+ try {
+ redis = getRedis();
+ } catch (err) {
+ globalLogger.warn(`[Agent]: Attempted to update agent info after server closed. ${normalizeErrorString(err)}`);
+ return;
+ }
+
+ await redis.set(
+ `medplum:agent:${agentId}:info`,
JSON.stringify({
- status,
+ ...info,
lastUpdated: new Date().toISOString(),
- }),
+ } satisfies AgentInfo),
'EX',
- STATUS_EX_SECONDS
+ INFO_EX_SECONDS
);
}
+
+ async function updateAgentStatus(status: AgentConnectionState): Promise {
+ if (!agentId) {
+ // Not connected
+ }
+
+ let redis: Redis;
+ try {
+ redis = getRedis();
+ } catch (err) {
+ globalLogger.warn(`[Agent]: Attempted to update agent status after server closed. ${normalizeErrorString(err)}`);
+ return;
+ }
+
+ const lastInfo = await redis.get(`medplum:agent:${agentId}:info`);
+ if (!lastInfo) {
+ await updateAgentInfo({ status, version: 'unknown', lastUpdated: new Date().toISOString() });
+ return;
+ }
+ await updateAgentInfo({ ...(JSON.parse(lastInfo) as AgentInfo), status });
+ }
}
diff --git a/packages/server/src/app.test.ts b/packages/server/src/app.test.ts
index 2251926ba6..f134b5ab3e 100644
--- a/packages/server/src/app.test.ts
+++ b/packages/server/src/app.test.ts
@@ -63,6 +63,28 @@ describe('App', () => {
expect(await shutdownApp()).toBeUndefined();
});
+ test('X-Forwarded-For spoofing', async () => {
+ const app = express();
+ const config = await loadTestConfig();
+ config.logLevel = 'info';
+ config.logRequests = true;
+
+ const originalWrite = process.stdout.write;
+ process.stdout.write = jest.fn();
+
+ await initApp(app, config);
+ const res = await request(app).get('/').set('X-Forwarded-For', '1.1.1.1, 2.2.2.2');
+ expect(res.status).toBe(200);
+ expect(process.stdout.write).toHaveBeenCalledTimes(1);
+
+ const logLine = (process.stdout.write as jest.Mock).mock.calls[0][0];
+ const logObj = JSON.parse(logLine);
+ expect(logObj.ip).toBe('2.2.2.2');
+
+ expect(await shutdownApp()).toBeUndefined();
+ process.stdout.write = originalWrite;
+ });
+
test('Internal Server Error', async () => {
const app = express();
app.get('/throw', () => {
diff --git a/packages/server/src/app.ts b/packages/server/src/app.ts
index 510113a0b5..ee51a467b7 100644
--- a/packages/server/src/app.ts
+++ b/packages/server/src/app.ts
@@ -36,7 +36,7 @@ import { keyValueRouter } from './keyvalue/routes';
import { initKeys } from './oauth/keys';
import { oauthRouter } from './oauth/routes';
import { openApiHandler } from './openapi';
-import { closeRateLimiter } from './ratelimit';
+import { closeRateLimiter, getRateLimiter } from './ratelimit';
import { closeRedis, initRedis } from './redis';
import { scimRouter } from './scim/routes';
import { seedDatabase } from './seed';
@@ -135,12 +135,13 @@ export async function initApp(app: Express, config: MedplumServerConfig): Promis
initWebSockets(server);
app.set('etag', false);
- app.set('trust proxy', true);
+ app.set('trust proxy', 1);
app.set('x-powered-by', false);
app.use(standardHeaders);
app.use(cors(corsOptions));
app.use(compression());
app.use(attachRequestContext);
+ app.use(getRateLimiter());
app.use('/fhir/R4/Binary', binaryRouter);
app.use(
urlencoded({
diff --git a/packages/server/src/auth/changepassword.test.ts b/packages/server/src/auth/changepassword.test.ts
index b3fda1c1b7..61d3f8f71b 100644
--- a/packages/server/src/auth/changepassword.test.ts
+++ b/packages/server/src/auth/changepassword.test.ts
@@ -1,4 +1,3 @@
-import { SendEmailCommand, SESv2Client } from '@aws-sdk/client-sesv2';
import { badRequest } from '@medplum/core';
import { randomUUID } from 'crypto';
import express from 'express';
@@ -10,7 +9,6 @@ import { loadTestConfig } from '../config';
import { setupPwnedPasswordMock, setupRecaptchaMock, withTestContext } from '../test.setup';
import { registerNew } from './register';
-jest.mock('@aws-sdk/client-sesv2');
jest.mock('hibp');
jest.mock('node-fetch');
@@ -27,8 +25,6 @@ describe('Change Password', () => {
});
beforeEach(() => {
- (SESv2Client as unknown as jest.Mock).mockClear();
- (SendEmailCommand as unknown as jest.Mock).mockClear();
(fetch as unknown as jest.Mock).mockClear();
(pwnedPassword as unknown as jest.Mock).mockClear();
setupPwnedPasswordMock(pwnedPassword as unknown as jest.Mock, 0);
diff --git a/packages/server/src/auth/external.test.ts b/packages/server/src/auth/external.test.ts
index 3b7d193604..f12ce7e175 100644
--- a/packages/server/src/auth/external.test.ts
+++ b/packages/server/src/auth/external.test.ts
@@ -106,6 +106,12 @@ describe('External', () => {
expect(res.body.issue[0].details.text).toBe('Missing state');
});
+ test('Invalid JSON state', async () => {
+ const res = await request(app).get('/auth/external?code=xyz&state=xyz');
+ expect(res.status).toBe(400);
+ expect(res.body.issue[0].details.text).toBe('Invalid state');
+ });
+
test('Unknown domain', async () => {
// Build the external callback URL with an unrecognized domain
const url = appendQueryParams('/auth/external', {
diff --git a/packages/server/src/auth/external.ts b/packages/server/src/auth/external.ts
index 82ad68d16c..2174ca2bd4 100644
--- a/packages/server/src/auth/external.ts
+++ b/packages/server/src/auth/external.ts
@@ -47,7 +47,13 @@ export const externalCallbackHandler = async (req: Request, res: Response): Prom
return;
}
- const body = JSON.parse(state) as ExternalAuthState;
+ let body: ExternalAuthState;
+ try {
+ body = JSON.parse(state);
+ } catch (err) {
+ sendOutcome(res, badRequest('Invalid state'));
+ return;
+ }
const { idp, client } = await getIdentityProvider(body);
if (!idp) {
diff --git a/packages/server/src/auth/login.test.ts b/packages/server/src/auth/login.test.ts
index c3a5f01b88..75719c9bfa 100644
--- a/packages/server/src/auth/login.test.ts
+++ b/packages/server/src/auth/login.test.ts
@@ -29,6 +29,7 @@ describe('Login', () => {
beforeAll(() =>
withTestContext(async () => {
const config = await loadTestConfig();
+ config.emailProvider = 'awsses';
await initApp(app, config);
// Create a test project
diff --git a/packages/server/src/auth/profile.test.ts b/packages/server/src/auth/profile.test.ts
index f85275bbcd..6787de46af 100644
--- a/packages/server/src/auth/profile.test.ts
+++ b/packages/server/src/auth/profile.test.ts
@@ -9,8 +9,6 @@ import { getSystemRepo } from '../fhir/repo';
import { withTestContext } from '../test.setup';
import { registerNew } from './register';
-jest.mock('@aws-sdk/client-sesv2');
-
const app = express();
const email = `multi${randomUUID()}@example.com`;
const password = randomUUID();
diff --git a/packages/server/src/auth/resetpassword.test.ts b/packages/server/src/auth/resetpassword.test.ts
index 42490c3748..72fbd8d5b6 100644
--- a/packages/server/src/auth/resetpassword.test.ts
+++ b/packages/server/src/auth/resetpassword.test.ts
@@ -24,6 +24,7 @@ describe('Reset Password', () => {
beforeAll(async () => {
const config = await loadTestConfig();
+ config.emailProvider = 'awsses';
await initApp(app, config);
});
diff --git a/packages/server/src/auth/revoke.test.ts b/packages/server/src/auth/revoke.test.ts
index 0686ed1317..aa91a9cc2d 100644
--- a/packages/server/src/auth/revoke.test.ts
+++ b/packages/server/src/auth/revoke.test.ts
@@ -9,8 +9,6 @@ import { withTestContext } from '../test.setup';
import { registerNew } from './register';
import { setPassword } from './setpassword';
-jest.mock('@aws-sdk/client-sesv2');
-
const app = express();
describe('Revoke', () => {
diff --git a/packages/server/src/auth/routes.ts b/packages/server/src/auth/routes.ts
index ed21829ef7..ee02e35db3 100644
--- a/packages/server/src/auth/routes.ts
+++ b/packages/server/src/auth/routes.ts
@@ -1,7 +1,8 @@
+import { badRequest } from '@medplum/core';
+import { OperationOutcome, Project } from '@medplum/fhirtypes';
import { Router } from 'express';
import { asyncWrap } from '../async';
import { authenticateRequest } from '../oauth/middleware';
-import { getRateLimiter } from '../ratelimit';
import { changePasswordHandler, changePasswordValidator } from './changepassword';
import { exchangeHandler, exchangeValidator } from './exchange';
import { externalCallbackHandler } from './external';
@@ -18,14 +19,11 @@ import { resetPasswordHandler, resetPasswordValidator } from './resetpassword';
import { revokeHandler, revokeValidator } from './revoke';
import { scopeHandler, scopeValidator } from './scope';
import { setPasswordHandler, setPasswordValidator } from './setpassword';
-import { verifyEmailHandler, verifyEmailValidator } from './verifyemail';
import { statusHandler, statusValidator } from './status';
-import { badRequest } from '@medplum/core';
-import { OperationOutcome, Project } from '@medplum/fhirtypes';
import { validateRecaptcha } from './utils';
+import { verifyEmailHandler, verifyEmailValidator } from './verifyemail';
export const authRouter = Router();
-authRouter.use(getRateLimiter());
authRouter.use('/mfa', mfaRouter);
authRouter.post('/method', methodValidator, asyncWrap(methodHandler));
authRouter.get('/external', asyncWrap(externalCallbackHandler));
diff --git a/packages/server/src/auth/setpassword.test.ts b/packages/server/src/auth/setpassword.test.ts
index d0a230bad5..ffa3ee9b2a 100644
--- a/packages/server/src/auth/setpassword.test.ts
+++ b/packages/server/src/auth/setpassword.test.ts
@@ -23,6 +23,7 @@ const app = express();
describe('Set Password', () => {
beforeAll(async () => {
const config = await loadTestConfig();
+ config.emailProvider = 'awsses';
await initApp(app, config);
});
diff --git a/packages/server/src/cloud/aws/config.test.ts b/packages/server/src/cloud/aws/config.test.ts
new file mode 100644
index 0000000000..dd77a4a498
--- /dev/null
+++ b/packages/server/src/cloud/aws/config.test.ts
@@ -0,0 +1,71 @@
+import { GetSecretValueCommand, SecretsManagerClient } from '@aws-sdk/client-secrets-manager';
+import { GetParametersByPathCommand, SSMClient } from '@aws-sdk/client-ssm';
+import { AwsClientStub, mockClient } from 'aws-sdk-client-mock';
+import 'aws-sdk-client-mock-jest';
+import { getConfig, loadConfig } from '../../config';
+
+describe('Config', () => {
+ let mockSSMClient: AwsClientStub;
+ let mockSecretsManagerClient: AwsClientStub;
+
+ beforeEach(() => {
+ mockSSMClient = mockClient(SSMClient);
+ mockSecretsManagerClient = mockClient(SecretsManagerClient);
+
+ mockSecretsManagerClient.on(GetSecretValueCommand).resolves({
+ SecretString: JSON.stringify({ host: 'host', port: 123 }),
+ });
+
+ mockSSMClient.on(GetParametersByPathCommand).resolves({
+ Parameters: [
+ { Name: 'baseUrl', Value: 'https://www.example.com/' },
+ { Name: 'database.ssl.require', Value: 'true' },
+ { Name: 'database.ssl.rejectUnauthorized', Value: 'true' },
+ { Name: 'database.ssl.ca', Value: 'DatabaseSslCa' },
+ { Name: 'DatabaseSecrets', Value: 'DatabaseSecretsArn' },
+ { Name: 'RedisSecrets', Value: 'RedisSecretsArn' },
+ { Name: 'port', Value: '8080' },
+ { Name: 'botCustomFunctionsEnabled', Value: 'true' },
+ { Name: 'logAuditEvents', Value: 'true' },
+ { Name: 'registerEnabled', Value: 'false' },
+ ],
+ });
+ });
+
+ afterEach(() => {
+ mockSSMClient.restore();
+ mockSecretsManagerClient.restore();
+ });
+
+ test('Load AWS config', async () => {
+ const config = await loadConfig('aws:test');
+ expect(config).toBeDefined();
+ expect(config.baseUrl).toBeDefined();
+ expect(config.port).toEqual(8080);
+ expect(config.botCustomFunctionsEnabled).toEqual(true);
+ expect(config.logAuditEvents).toEqual(true);
+ expect(config.registerEnabled).toEqual(false);
+ expect(config.database).toBeDefined();
+ expect(config.database.ssl).toBeDefined();
+ expect(config.database.ssl?.require).toEqual(true);
+ expect(config.database.ssl?.rejectUnauthorized).toEqual(true);
+ expect(config.database.ssl?.ca).toEqual('DatabaseSslCa');
+ expect(getConfig()).toBe(config);
+ expect(mockSSMClient).toReceiveCommand(GetParametersByPathCommand);
+ });
+
+ test('Load region AWS config', async () => {
+ const config = await loadConfig('aws:ap-southeast-2:test');
+ expect(config).toBeDefined();
+ expect(config.baseUrl).toBeDefined();
+ expect(config.port).toEqual(8080);
+ expect(getConfig()).toBe(config);
+ expect(mockSecretsManagerClient).toReceiveCommand(GetSecretValueCommand);
+ expect(mockSecretsManagerClient).toReceiveCommandWith(GetSecretValueCommand, {
+ SecretId: 'DatabaseSecretsArn',
+ });
+ expect(mockSecretsManagerClient).toReceiveCommandWith(GetSecretValueCommand, {
+ SecretId: 'RedisSecretsArn',
+ });
+ });
+});
diff --git a/packages/server/src/cloud/aws/config.ts b/packages/server/src/cloud/aws/config.ts
new file mode 100644
index 0000000000..9ca28b8550
--- /dev/null
+++ b/packages/server/src/cloud/aws/config.ts
@@ -0,0 +1,118 @@
+import { GetSecretValueCommand, SecretsManagerClient } from '@aws-sdk/client-secrets-manager';
+import { GetParametersByPathCommand, Parameter, SSMClient } from '@aws-sdk/client-ssm';
+import { splitN } from '@medplum/core';
+import { MedplumServerConfig } from '../../config';
+
+const DEFAULT_AWS_REGION = 'us-east-1';
+
+/**
+ * Loads configuration settings from AWS SSM Parameter Store.
+ * @param path - The AWS SSM Parameter Store path prefix.
+ * @returns The loaded configuration.
+ */
+export async function loadAwsConfig(path: string): Promise {
+ let region = DEFAULT_AWS_REGION;
+ if (path.includes(':')) {
+ [region, path] = splitN(path, ':', 2);
+ }
+
+ const client = new SSMClient({ region });
+ const config: Record = {};
+ const parameters = [] as Parameter[];
+ let nextToken: string | undefined;
+ do {
+ const response = await client.send(
+ new GetParametersByPathCommand({
+ Path: path,
+ NextToken: nextToken,
+ WithDecryption: true,
+ })
+ );
+ if (response.Parameters) {
+ parameters.push(...response.Parameters);
+ }
+ nextToken = response.NextToken;
+ } while (nextToken);
+
+ // Load special AWS Secrets Manager secrets first
+ for (const param of parameters) {
+ const key = (param.Name as string).replace(path, '');
+ const value = param.Value as string;
+ if (key === 'DatabaseSecrets') {
+ config['database'] = await loadAwsSecrets(region, value);
+ } else if (key === 'RedisSecrets') {
+ config['redis'] = await loadAwsSecrets(region, value);
+ }
+ }
+
+ // Then load other parameters, which may override the secrets
+ for (const param of parameters) {
+ const key = (param.Name as string).replace(path, '');
+ const value = param.Value as string;
+ setValue(config, key, value);
+ }
+
+ return config as MedplumServerConfig;
+}
+
+/**
+ * Returns the AWS Database Secret data as a JSON map.
+ * @param region - The AWS region.
+ * @param secretId - Secret ARN
+ * @returns The secret data as a JSON map.
+ */
+async function loadAwsSecrets(region: string, secretId: string): Promise | undefined> {
+ const client = new SecretsManagerClient({ region });
+ const result = await client.send(new GetSecretValueCommand({ SecretId: secretId }));
+
+ if (!result.SecretString) {
+ return undefined;
+ }
+
+ return JSON.parse(result.SecretString);
+}
+
+function setValue(config: Record, key: string, value: string): void {
+ const keySegments = key.split('.');
+ let obj = config;
+
+ while (keySegments.length > 1) {
+ const segment = keySegments.shift() as string;
+ if (!obj[segment]) {
+ obj[segment] = {};
+ }
+ obj = obj[segment] as Record;
+ }
+
+ let parsedValue: any = value;
+ if (isIntegerConfig(key)) {
+ parsedValue = parseInt(value, 10);
+ } else if (isBooleanConfig(key)) {
+ parsedValue = value === 'true';
+ } else if (isObjectConfig(key)) {
+ parsedValue = JSON.parse(value);
+ }
+
+ obj[keySegments[0]] = parsedValue;
+}
+
+function isIntegerConfig(key: string): boolean {
+ return key === 'port' || key === 'accurateCountThreshold';
+}
+
+function isBooleanConfig(key: string): boolean {
+ return (
+ key === 'botCustomFunctionsEnabled' ||
+ key === 'database.ssl.rejectUnauthorized' ||
+ key === 'database.ssl.require' ||
+ key === 'logRequests' ||
+ key === 'logAuditEvents' ||
+ key === 'registerEnabled' ||
+ key === 'require' ||
+ key === 'rejectUnauthorized'
+ );
+}
+
+function isObjectConfig(key: string): boolean {
+ return key === 'tls';
+}
diff --git a/packages/server/src/cloud/aws/deploy.test.ts b/packages/server/src/cloud/aws/deploy.test.ts
new file mode 100644
index 0000000000..2482d5f102
--- /dev/null
+++ b/packages/server/src/cloud/aws/deploy.test.ts
@@ -0,0 +1,255 @@
+import {
+ CreateFunctionCommand,
+ GetFunctionCommand,
+ GetFunctionConfigurationCommand,
+ LambdaClient,
+ ListLayerVersionsCommand,
+ UpdateFunctionCodeCommand,
+ UpdateFunctionConfigurationCommand,
+} from '@aws-sdk/client-lambda';
+import { ContentType } from '@medplum/core';
+import { Bot } from '@medplum/fhirtypes';
+import { AwsClientStub, mockClient } from 'aws-sdk-client-mock';
+import 'aws-sdk-client-mock-jest';
+import express from 'express';
+import request from 'supertest';
+import { initApp, shutdownApp } from '../../app';
+import { loadTestConfig } from '../../config';
+import { initTestAuth } from '../../test.setup';
+
+const app = express();
+let accessToken: string;
+let mockLambdaClient: AwsClientStub;
+
+describe('Deploy', () => {
+ beforeAll(async () => {
+ const config = await loadTestConfig();
+ await initApp(app, config);
+ accessToken = await initTestAuth();
+ });
+
+ afterAll(async () => {
+ await shutdownApp();
+ });
+
+ beforeEach(() => {
+ let created = false;
+
+ mockLambdaClient = mockClient(LambdaClient);
+
+ mockLambdaClient.on(CreateFunctionCommand).callsFake(({ FunctionName }) => {
+ created = true;
+
+ return {
+ Configuration: {
+ FunctionName,
+ },
+ };
+ });
+
+ mockLambdaClient.on(GetFunctionCommand).callsFake(({ FunctionName }) => {
+ if (created) {
+ return {
+ Configuration: {
+ FunctionName,
+ },
+ };
+ }
+
+ return {
+ Configuration: {},
+ };
+ });
+
+ mockLambdaClient.on(GetFunctionConfigurationCommand).callsFake(({ FunctionName }) => {
+ return {
+ FunctionName,
+ Runtime: 'nodejs18.x',
+ Handler: 'index.handler',
+ State: 'Active',
+ Layers: [
+ {
+ Arn: 'arn:aws:lambda:us-east-1:123456789012:layer:test-layer:1',
+ },
+ ],
+ };
+ });
+
+ mockLambdaClient.on(ListLayerVersionsCommand).resolves({
+ LayerVersions: [
+ {
+ LayerVersionArn: 'arn:aws:lambda:us-east-1:123456789012:layer:test-layer:1',
+ },
+ ],
+ });
+
+ mockLambdaClient.on(UpdateFunctionCodeCommand).callsFake(({ FunctionName }) => ({
+ Configuration: {
+ FunctionName,
+ },
+ }));
+ });
+
+ afterEach(() => {
+ mockLambdaClient.restore();
+ });
+
+ test('Happy path', async () => {
+ // Step 1: Create a bot
+ const res1 = await request(app)
+ .post(`/fhir/R4/Bot`)
+ .set('Content-Type', ContentType.FHIR_JSON)
+ .set('Authorization', 'Bearer ' + accessToken)
+ .send({
+ resourceType: 'Bot',
+ name: 'Test Bot',
+ runtimeVersion: 'awslambda',
+ code: `
+ export async function handler() {
+ console.log('input', input);
+ return input;
+ }
+ `,
+ });
+ expect(res1.status).toBe(201);
+
+ const bot = res1.body as Bot;
+ const name = `medplum-bot-lambda-${bot.id}`;
+
+ // Step 2: Deploy the bot
+ const res2 = await request(app)
+ .post(`/fhir/R4/Bot/${bot.id}/$deploy`)
+ .set('Content-Type', ContentType.FHIR_JSON)
+ .set('Authorization', 'Bearer ' + accessToken)
+ .send({
+ code: `
+ export async function handler() {
+ console.log('input', input);
+ return input;
+ }
+ `,
+ });
+ expect(res2.status).toBe(200);
+
+ expect(mockLambdaClient).toHaveReceivedCommandTimes(GetFunctionCommand, 1);
+ expect(mockLambdaClient).toHaveReceivedCommandTimes(ListLayerVersionsCommand, 1);
+ expect(mockLambdaClient).toHaveReceivedCommandTimes(CreateFunctionCommand, 1);
+ expect(mockLambdaClient).toHaveReceivedCommandWith(GetFunctionCommand, {
+ FunctionName: name,
+ });
+ expect(mockLambdaClient).toHaveReceivedCommandWith(CreateFunctionCommand, {
+ FunctionName: name,
+ });
+ mockLambdaClient.resetHistory();
+
+ // Step 3: Deploy again to trigger the update path
+ const res3 = await request(app)
+ .post(`/fhir/R4/Bot/${bot.id}/$deploy`)
+ .set('Content-Type', ContentType.FHIR_JSON)
+ .set('Authorization', 'Bearer ' + accessToken)
+ .send({
+ code: `
+ export async function handler() {
+ console.log('input', input);
+ return input;
+ }
+ `,
+ filename: 'updated.js',
+ });
+ expect(res3.status).toBe(200);
+
+ expect(mockLambdaClient).toHaveReceivedCommandTimes(GetFunctionCommand, 1);
+ expect(mockLambdaClient).toHaveReceivedCommandTimes(ListLayerVersionsCommand, 1);
+ expect(mockLambdaClient).toHaveReceivedCommandTimes(GetFunctionConfigurationCommand, 1);
+ expect(mockLambdaClient).toHaveReceivedCommandTimes(UpdateFunctionConfigurationCommand, 0);
+ expect(mockLambdaClient).toHaveReceivedCommandTimes(UpdateFunctionCodeCommand, 1);
+ expect(mockLambdaClient).toHaveReceivedCommandWith(GetFunctionCommand, {
+ FunctionName: name,
+ });
+ });
+
+ test('Deploy bot with lambda layer update', async () => {
+ // When deploying a bot, we check if we need to update the bot configuration.
+ // This test verifies that we correctly update the bot configuration when the lambda layer changes.
+ // Step 1: Create a bot
+ const res1 = await request(app)
+ .post(`/fhir/R4/Bot`)
+ .set('Content-Type', ContentType.FHIR_JSON)
+ .set('Authorization', 'Bearer ' + accessToken)
+ .send({
+ resourceType: 'Bot',
+ name: 'Test Bot',
+ runtimeVersion: 'awslambda',
+ code: `
+ export async function handler() {
+ console.log('input', input);
+ return input;
+ }
+ `,
+ });
+ expect(res1.status).toBe(201);
+
+ const bot = res1.body as Bot;
+ const name = `medplum-bot-lambda-${bot.id}`;
+
+ // Step 2: Deploy the bot
+ const res2 = await request(app)
+ .post(`/fhir/R4/Bot/${bot.id}/$deploy`)
+ .set('Content-Type', ContentType.FHIR_JSON)
+ .set('Authorization', 'Bearer ' + accessToken)
+ .send({
+ code: `
+ export async function handler() {
+ console.log('input', input);
+ return input;
+ }
+ `,
+ });
+ expect(res2.status).toBe(200);
+
+ expect(mockLambdaClient).toHaveReceivedCommandTimes(GetFunctionCommand, 1);
+ expect(mockLambdaClient).toHaveReceivedCommandTimes(ListLayerVersionsCommand, 1);
+ expect(mockLambdaClient).toHaveReceivedCommandTimes(CreateFunctionCommand, 1);
+ expect(mockLambdaClient).toHaveReceivedCommandWith(GetFunctionCommand, {
+ FunctionName: name,
+ });
+ expect(mockLambdaClient).toHaveReceivedCommandWith(CreateFunctionCommand, {
+ FunctionName: name,
+ });
+ mockLambdaClient.resetHistory();
+
+ // Step 3: Simulate releasing a new version of the lambda layer
+ mockLambdaClient.on(ListLayerVersionsCommand).resolves({
+ LayerVersions: [
+ {
+ LayerVersionArn: 'new-layer-version-arn',
+ },
+ ],
+ });
+
+ // Step 4: Deploy again to trigger the update path
+ const res3 = await request(app)
+ .post(`/fhir/R4/Bot/${bot.id}/$deploy`)
+ .set('Content-Type', ContentType.FHIR_JSON)
+ .set('Authorization', 'Bearer ' + accessToken)
+ .send({
+ code: `
+ export async function handler() {
+ console.log('input', input);
+ return input;
+ }
+ `,
+ filename: 'updated.js',
+ });
+ expect(res3.status).toBe(200);
+
+ expect(mockLambdaClient).toHaveReceivedCommandTimes(GetFunctionCommand, 1);
+ expect(mockLambdaClient).toHaveReceivedCommandTimes(ListLayerVersionsCommand, 1);
+ expect(mockLambdaClient).toHaveReceivedCommandTimes(GetFunctionConfigurationCommand, 2);
+ expect(mockLambdaClient).toHaveReceivedCommandTimes(UpdateFunctionConfigurationCommand, 1);
+ expect(mockLambdaClient).toHaveReceivedCommandTimes(UpdateFunctionCodeCommand, 1);
+ expect(mockLambdaClient).toHaveReceivedCommandWith(GetFunctionCommand, {
+ FunctionName: name,
+ });
+ });
+});
diff --git a/packages/server/src/cloud/aws/deploy.ts b/packages/server/src/cloud/aws/deploy.ts
new file mode 100644
index 0000000000..50dc4477dd
--- /dev/null
+++ b/packages/server/src/cloud/aws/deploy.ts
@@ -0,0 +1,260 @@
+import {
+ CreateFunctionCommand,
+ GetFunctionCommand,
+ GetFunctionConfigurationCommand,
+ GetFunctionConfigurationCommandOutput,
+ LambdaClient,
+ ListLayerVersionsCommand,
+ PackageType,
+ UpdateFunctionCodeCommand,
+ UpdateFunctionConfigurationCommand,
+} from '@aws-sdk/client-lambda';
+import { sleep } from '@medplum/core';
+import { Bot } from '@medplum/fhirtypes';
+import { ConfiguredRetryStrategy } from '@smithy/util-retry';
+import JSZip from 'jszip';
+import { getConfig } from '../../config';
+import { getRequestContext } from '../../context';
+
+const LAMBDA_RUNTIME = 'nodejs18.x';
+
+const LAMBDA_HANDLER = 'index.handler';
+
+const LAMBDA_MEMORY = 1024;
+
+const WRAPPER_CODE = `const { ContentType, Hl7Message, MedplumClient } = require("@medplum/core");
+const fetch = require("node-fetch");
+const PdfPrinter = require("pdfmake");
+const userCode = require("./user.js");
+
+exports.handler = async (event, context) => {
+ const { bot, baseUrl, accessToken, contentType, secrets, traceId } = event;
+ const medplum = new MedplumClient({
+ baseUrl,
+ fetch: function(url, options = {}) {
+ options.headers ||= {};
+ options.headers['X-Trace-Id'] = traceId;
+ options.headers['traceparent'] = traceId;
+ return fetch(url, options);
+ },
+ createPdf,
+ });
+ medplum.setAccessToken(accessToken);
+ try {
+ let input = event.input;
+ if (contentType === ContentType.HL7_V2 && input) {
+ input = Hl7Message.parse(input);
+ }
+ let result = await userCode.handler(medplum, { bot, input, contentType, secrets, traceId });
+ if (contentType === ContentType.HL7_V2 && result) {
+ result = result.toString();
+ }
+ return result;
+ } catch (err) {
+ if (err instanceof Error) {
+ console.log("Unhandled error: " + err.message + "\\n" + err.stack);
+ } else if (typeof err === "object") {
+ console.log("Unhandled error: " + JSON.stringify(err, undefined, 2));
+ } else {
+ console.log("Unhandled error: " + err);
+ }
+ throw err;
+ }
+};
+
+function createPdf(docDefinition, tableLayouts, fonts) {
+ if (!fonts) {
+ fonts = {
+ Helvetica: {
+ normal: 'Helvetica',
+ bold: 'Helvetica-Bold',
+ italics: 'Helvetica-Oblique',
+ bolditalics: 'Helvetica-BoldOblique',
+ },
+ Roboto: {
+ normal: '/opt/fonts/Roboto/Roboto-Regular.ttf',
+ bold: '/opt/fonts/Roboto/Roboto-Medium.ttf',
+ italics: '/opt/fonts/Roboto/Roboto-Italic.ttf',
+ bolditalics: '/opt/fonts/Roboto/Roboto-MediumItalic.ttf'
+ },
+ Avenir: {
+ normal: '/opt/fonts/Avenir/Avenir.ttf'
+ }
+ };
+ }
+ return new Promise((resolve, reject) => {
+ const printer = new PdfPrinter(fonts);
+ const pdfDoc = printer.createPdfKitDocument(docDefinition, { tableLayouts });
+ const chunks = [];
+ pdfDoc.on('data', (chunk) => chunks.push(chunk));
+ pdfDoc.on('end', () => resolve(Buffer.concat(chunks)));
+ pdfDoc.on('error', reject);
+ pdfDoc.end();
+ });
+}
+`;
+
+export async function deployLambda(bot: Bot, code: string): Promise {
+ const ctx = getRequestContext();
+
+ // Create a new AWS Lambda client
+ // Use a custom retry strategy to avoid throttling errors
+ // This is especially important when updating lambdas which also
+ // involve upgrading the layer version.
+ const client = new LambdaClient({
+ region: getConfig().awsRegion,
+ retryStrategy: new ConfiguredRetryStrategy(
+ 5, // max attempts
+ (attempt: number) => 500 * 2 ** attempt // Exponential backoff
+ ),
+ });
+
+ const name = `medplum-bot-lambda-${bot.id}`;
+ ctx.logger.info('Deploying lambda function for bot', { name });
+ const zipFile = await createZipFile(code);
+ ctx.logger.debug('Lambda function zip size', { bytes: zipFile.byteLength });
+
+ const exists = await lambdaExists(client, name);
+ if (!exists) {
+ await createLambda(client, name, zipFile);
+ } else {
+ await updateLambda(client, name, zipFile);
+ }
+}
+
+async function createZipFile(code: string): Promise {
+ const zip = new JSZip();
+ zip.file('user.js', code);
+ zip.file('index.js', WRAPPER_CODE);
+ return zip.generateAsync({ type: 'uint8array' });
+}
+
+/**
+ * Returns true if the AWS Lambda exists for the bot name.
+ * @param client - The AWS Lambda client.
+ * @param name - The bot name.
+ * @returns True if the bot exists.
+ */
+async function lambdaExists(client: LambdaClient, name: string): Promise {
+ try {
+ const command = new GetFunctionCommand({ FunctionName: name });
+ const response = await client.send(command);
+ return response.Configuration?.FunctionName === name;
+ } catch (err) {
+ return false;
+ }
+}
+
+/**
+ * Creates a new AWS Lambda for the bot name.
+ * @param client - The AWS Lambda client.
+ * @param name - The bot name.
+ * @param zipFile - The zip file with the bot code.
+ */
+async function createLambda(client: LambdaClient, name: string, zipFile: Uint8Array): Promise {
+ const layerVersion = await getLayerVersion(client);
+
+ await client.send(
+ new CreateFunctionCommand({
+ FunctionName: name,
+ Role: getConfig().botLambdaRoleArn,
+ Runtime: LAMBDA_RUNTIME,
+ Handler: LAMBDA_HANDLER,
+ MemorySize: LAMBDA_MEMORY,
+ PackageType: PackageType.Zip,
+ Layers: [layerVersion],
+ Code: {
+ ZipFile: zipFile,
+ },
+ Publish: true,
+ Timeout: 10, // seconds
+ })
+ );
+}
+
+/**
+ * Updates an existing AWS Lambda for the bot name.
+ * @param client - The AWS Lambda client.
+ * @param name - The bot name.
+ * @param zipFile - The zip file with the bot code.
+ */
+async function updateLambda(client: LambdaClient, name: string, zipFile: Uint8Array): Promise {
+ // First, make sure the lambda configuration is up to date
+ await updateLambdaConfig(client, name);
+
+ // Then update the code
+ await client.send(
+ new UpdateFunctionCodeCommand({
+ FunctionName: name,
+ ZipFile: zipFile,
+ Publish: true,
+ })
+ );
+}
+
+/**
+ * Updates the lambda configuration.
+ * @param client - The AWS Lambda client.
+ * @param name - The lambda name.
+ */
+async function updateLambdaConfig(client: LambdaClient, name: string): Promise {
+ const layerVersion = await getLayerVersion(client);
+ const functionConfig = await getLambdaConfig(client, name);
+ if (
+ functionConfig.Runtime === LAMBDA_RUNTIME &&
+ functionConfig.Handler === LAMBDA_HANDLER &&
+ functionConfig.Layers?.[0].Arn === layerVersion
+ ) {
+ // Everything is up-to-date
+ return;
+ }
+
+ // Need to update
+ await client.send(
+ new UpdateFunctionConfigurationCommand({
+ FunctionName: name,
+ Role: getConfig().botLambdaRoleArn,
+ Runtime: LAMBDA_RUNTIME,
+ Handler: LAMBDA_HANDLER,
+ Layers: [layerVersion],
+ })
+ );
+
+ // Wait for the update to complete before returning
+ // Wait up to 5 seconds
+ // See: https://github.com/aws/aws-toolkit-visual-studio/issues/197
+ // See: https://aws.amazon.com/blogs/compute/coming-soon-expansion-of-aws-lambda-states-to-all-functions/
+ for (let i = 0; i < 5; i++) {
+ const config = await getLambdaConfig(client, name);
+ // Valid Values: Pending | Active | Inactive | Failed
+ // See: https://docs.aws.amazon.com/lambda/latest/dg/API_GetFunctionConfiguration.html
+ if (config.State === 'Active') {
+ return;
+ }
+ await sleep(1000);
+ }
+}
+
+async function getLambdaConfig(client: LambdaClient, name: string): Promise {
+ return client.send(
+ new GetFunctionConfigurationCommand({
+ FunctionName: name,
+ })
+ );
+}
+
+/**
+ * Returns the latest layer version for the Medplum bot layer.
+ * The first result is the latest version.
+ * See: https://stackoverflow.com/a/55752188
+ * @param client - The AWS Lambda client.
+ * @returns The most recent layer version ARN.
+ */
+async function getLayerVersion(client: LambdaClient): Promise {
+ const command = new ListLayerVersionsCommand({
+ LayerName: getConfig().botLambdaLayerName,
+ MaxItems: 1,
+ });
+ const response = await client.send(command);
+ return response.LayerVersions?.[0].LayerVersionArn as string;
+}
diff --git a/packages/server/src/cloud/aws/email.ts b/packages/server/src/cloud/aws/email.ts
new file mode 100644
index 0000000000..d495c0a507
--- /dev/null
+++ b/packages/server/src/cloud/aws/email.ts
@@ -0,0 +1,41 @@
+import { SendEmailCommand, SESv2Client } from '@aws-sdk/client-sesv2';
+import { badRequest, normalizeErrorString, OperationOutcomeError } from '@medplum/core';
+import Mail from 'nodemailer/lib/mailer';
+import { getConfig } from '../../config';
+import { addressToString, buildAddresses, buildRawMessage } from '../../email/utils';
+
+/**
+ * Sends an email via AWS SES.
+ * @param options - The nodemailer options.
+ */
+export async function sendEmailViaSes(options: Mail.Options): Promise {
+ const config = getConfig();
+ const fromAddress = addressToString(options.from);
+ const toAddresses = buildAddresses(options.to);
+ const ccAddresses = buildAddresses(options.cc);
+ const bccAddresses = buildAddresses(options.bcc);
+
+ let msg: Uint8Array;
+ try {
+ msg = await buildRawMessage(options);
+ } catch (err) {
+ throw new OperationOutcomeError(badRequest('Invalid email options: ' + normalizeErrorString(err)), err);
+ }
+
+ const sesClient = new SESv2Client({ region: config.awsRegion });
+ await sesClient.send(
+ new SendEmailCommand({
+ FromEmailAddress: fromAddress,
+ Destination: {
+ ToAddresses: toAddresses,
+ CcAddresses: ccAddresses,
+ BccAddresses: bccAddresses,
+ },
+ Content: {
+ Raw: {
+ Data: msg,
+ },
+ },
+ })
+ );
+}
diff --git a/packages/server/src/cloud/aws/execute.test.ts b/packages/server/src/cloud/aws/execute.test.ts
new file mode 100644
index 0000000000..912321a1f3
--- /dev/null
+++ b/packages/server/src/cloud/aws/execute.test.ts
@@ -0,0 +1,231 @@
+import { InvokeCommand, LambdaClient, ListLayerVersionsCommand } from '@aws-sdk/client-lambda';
+import { ContentType } from '@medplum/core';
+import { Bot } from '@medplum/fhirtypes';
+import { AwsClientStub, mockClient } from 'aws-sdk-client-mock';
+import { randomUUID } from 'crypto';
+import express from 'express';
+import request from 'supertest';
+import { initApp, shutdownApp } from '../../app';
+import { getConfig, loadTestConfig } from '../../config';
+import { getBinaryStorage } from '../../fhir/storage';
+import { initTestAuth } from '../../test.setup';
+import { getLambdaFunctionName } from './execute';
+
+const app = express();
+let accessToken: string;
+let bot: Bot;
+
+describe('Execute', () => {
+ let mockLambdaClient: AwsClientStub;
+
+ beforeEach(() => {
+ mockLambdaClient = mockClient(LambdaClient);
+
+ mockLambdaClient.on(ListLayerVersionsCommand).resolves({
+ LayerVersions: [
+ {
+ LayerVersionArn: 'xyz',
+ },
+ ],
+ });
+
+ mockLambdaClient.on(InvokeCommand).callsFake(({ Payload }) => {
+ const decoder = new TextDecoder();
+ const event = JSON.parse(decoder.decode(Payload));
+ const output = JSON.stringify(event.input);
+ const encoder = new TextEncoder();
+
+ return {
+ LogResult: `U1RBUlQgUmVxdWVzdElkOiAxNDZmY2ZjZi1jMzJiLTQzZjUtODJhNi1lZTBmMzEzMmQ4NzMgVmVyc2lvbjogJExBVEVTVAoyMDIyLTA1LTMwVDE2OjEyOjIyLjY4NVoJMTQ2ZmNmY2YtYzMyYi00M2Y1LTgyYTYtZWUwZjMxMzJkODczCUlORk8gdGVzdApFTkQgUmVxdWVzdElkOiAxNDZmY2ZjZi1jMzJiLTQzZjUtODJhNi1lZTBmMzEzMmQ4NzMKUkVQT1JUIFJlcXVlc3RJZDogMTQ2ZmNmY2YtYzMyYi00M2Y1LTgyYTYtZWUwZjMxMzJkODcz`,
+ Payload: encoder.encode(output),
+ };
+ });
+ });
+
+ afterEach(() => {
+ mockLambdaClient.restore();
+ });
+
+ beforeAll(async () => {
+ const config = await loadTestConfig();
+ await initApp(app, config);
+ accessToken = await initTestAuth();
+
+ const res = await request(app)
+ .post(`/fhir/R4/Bot`)
+ .set('Content-Type', ContentType.FHIR_JSON)
+ .set('Authorization', 'Bearer ' + accessToken)
+ .send({
+ resourceType: 'Bot',
+ identifier: [{ system: 'https://example.com/bot', value: randomUUID() }],
+ name: 'Test Bot',
+ runtimeVersion: 'awslambda',
+ code: `
+ export async function handler(medplum, event) {
+ console.log('input', event.input);
+ return event.input;
+ }
+ `,
+ });
+ expect(res.status).toBe(201);
+ bot = res.body as Bot;
+ });
+
+ afterAll(async () => {
+ await shutdownApp();
+ });
+
+ test('Submit plain text', async () => {
+ const res = await request(app)
+ .post(`/fhir/R4/Bot/${bot.id}/$execute`)
+ .set('Content-Type', ContentType.TEXT)
+ .set('Authorization', 'Bearer ' + accessToken)
+ .send('input');
+ expect(res.status).toBe(200);
+ expect(res.headers['content-type']).toBe('text/plain; charset=utf-8');
+ expect(res.text).toEqual('input');
+ });
+
+ test('Submit FHIR with content type', async () => {
+ const res = await request(app)
+ .post(`/fhir/R4/Bot/${bot.id}/$execute`)
+ .set('Content-Type', ContentType.FHIR_JSON)
+ .set('Authorization', 'Bearer ' + accessToken)
+ .send({
+ resourceType: 'Patient',
+ name: [{ given: ['John'], family: ['Doe'] }],
+ });
+ expect(res.status).toBe(200);
+ expect(res.headers['content-type']).toBe('application/fhir+json; charset=utf-8');
+ });
+
+ test('Submit FHIR without content type', async () => {
+ const res = await request(app)
+ .post(`/fhir/R4/Bot/${bot.id}/$execute`)
+ .set('Authorization', 'Bearer ' + accessToken)
+ .send({
+ resourceType: 'Patient',
+ name: [{ given: ['John'], family: ['Doe'] }],
+ });
+ expect(res.status).toBe(200);
+ expect(res.headers['content-type']).toBe('application/json; charset=utf-8');
+ });
+
+ test('Submit HL7', async () => {
+ const binaryStorage = getBinaryStorage();
+ const writeFileSpy = jest.spyOn(binaryStorage, 'writeFile');
+
+ const text =
+ 'MSH|^~\\&|Main_HIS|XYZ_HOSPITAL|iFW|ABC_Lab|20160915003015||ACK|9B38584D|P|2.6.1|\r' +
+ 'MSA|AA|9B38584D|Everything was okay dokay!|';
+
+ const res = await request(app)
+ .post(`/fhir/R4/Bot/${bot.id}/$execute`)
+ .set('Content-Type', ContentType.HL7_V2)
+ .set('Authorization', 'Bearer ' + accessToken)
+ .send(text);
+ expect(res.status).toBe(200);
+ expect(res.headers['content-type']).toBe('x-application/hl7-v2+er7; charset=utf-8');
+ expect(writeFileSpy).toHaveBeenCalledTimes(1);
+
+ const args = writeFileSpy.mock.calls[0];
+ expect(args.length).toBe(3);
+ expect(args[0]).toMatch(/^bot\//);
+ expect(args[1]).toBe(ContentType.JSON);
+
+ const row = JSON.parse(args[2] as string);
+ expect(row.botId).toEqual(bot.id);
+ expect(row.hl7MessageType).toEqual('ACK');
+ expect(row.hl7Version).toEqual('2.6.1');
+ });
+
+ test('Execute without code', async () => {
+ // Create a bot with empty code
+ const res1 = await request(app)
+ .post(`/fhir/R4/Bot`)
+ .set('Content-Type', ContentType.FHIR_JSON)
+ .set('Authorization', 'Bearer ' + accessToken)
+ .send({
+ resourceType: 'Bot',
+ name: 'Test Bot',
+ code: '',
+ });
+ expect(res1.status).toBe(201);
+ const bot = res1.body as Bot;
+
+ // Execute the bot
+ const res2 = await request(app)
+ .post(`/fhir/R4/Bot/${bot.id}/$execute`)
+ .set('Content-Type', ContentType.FHIR_JSON)
+ .set('Authorization', 'Bearer ' + accessToken)
+ .send({});
+ expect(res2.status).toBe(400);
+ });
+
+ test('Unsupported runtime version', async () => {
+ const res1 = await request(app)
+ .post(`/fhir/R4/Bot`)
+ .set('Content-Type', ContentType.FHIR_JSON)
+ .set('Authorization', 'Bearer ' + accessToken)
+ .send({
+ resourceType: 'Bot',
+ name: 'Test Bot',
+ runtimeVersion: 'unsupported',
+ });
+ expect(res1.status).toBe(201);
+ const bot = res1.body as Bot;
+
+ // Step 2: Publish the bot
+ const res2 = await request(app)
+ .post(`/fhir/R4/Bot/${bot.id}/$deploy`)
+ .set('Content-Type', ContentType.FHIR_JSON)
+ .set('Authorization', 'Bearer ' + accessToken)
+ .send({
+ code: `
+ export async function handler() {
+ console.log('input', input);
+ return input;
+ }
+ `,
+ });
+ expect(res2.status).toBe(200);
+
+ // Step 3: Execute the bot
+ const res3 = await request(app)
+ .post(`/fhir/R4/Bot/${bot.id}/$execute`)
+ .set('Content-Type', ContentType.FHIR_JSON)
+ .set('Authorization', 'Bearer ' + accessToken)
+ .send({});
+ expect(res3.status).toBe(400);
+ });
+
+ test('Get function name', async () => {
+ const config = getConfig();
+ const normalBot: Bot = { resourceType: 'Bot', id: '123' };
+ const customBot: Bot = {
+ resourceType: 'Bot',
+ id: '456',
+ identifier: [{ system: 'https://medplum.com/bot-external-function-id', value: 'custom' }],
+ };
+
+ expect(getLambdaFunctionName(normalBot)).toEqual('medplum-bot-lambda-123');
+ expect(getLambdaFunctionName(customBot)).toEqual('medplum-bot-lambda-456');
+
+ // Temporarily enable custom bot support
+ config.botCustomFunctionsEnabled = true;
+ expect(getLambdaFunctionName(normalBot)).toEqual('medplum-bot-lambda-123');
+ expect(getLambdaFunctionName(customBot)).toEqual('custom');
+ config.botCustomFunctionsEnabled = false;
+ });
+
+ test('Execute by identifier', async () => {
+ const res = await request(app)
+ .post(`/fhir/R4/Bot/$execute?identifier=${bot.identifier?.[0]?.system}|${bot.identifier?.[0]?.value}`)
+ .set('Content-Type', ContentType.TEXT)
+ .set('Authorization', 'Bearer ' + accessToken)
+ .send('input');
+ expect(res.status).toBe(200);
+ expect(res.headers['content-type']).toBe('text/plain; charset=utf-8');
+ expect(res.text).toEqual('input');
+ });
+});
diff --git a/packages/server/src/cloud/aws/execute.ts b/packages/server/src/cloud/aws/execute.ts
new file mode 100644
index 0000000000..50e28d0dd7
--- /dev/null
+++ b/packages/server/src/cloud/aws/execute.ts
@@ -0,0 +1,108 @@
+import { InvokeCommand, LambdaClient } from '@aws-sdk/client-lambda';
+import { Hl7Message, createReference, getIdentifier, normalizeErrorString } from '@medplum/core';
+import { Bot } from '@medplum/fhirtypes';
+import { TextDecoder, TextEncoder } from 'util';
+import { getConfig } from '../../config';
+import { BotExecutionContext, BotExecutionResult } from '../../fhir/operations/execute';
+
+/**
+ * Executes a Bot in an AWS Lambda.
+ * @param request - The bot request.
+ * @returns The bot execution result.
+ */
+export async function runInLambda(request: BotExecutionContext): Promise {
+ const { bot, accessToken, secrets, input, contentType, traceId } = request;
+ const config = getConfig();
+ const client = new LambdaClient({ region: config.awsRegion });
+ const name = getLambdaFunctionName(bot);
+ const payload = {
+ bot: createReference(bot),
+ baseUrl: config.baseUrl,
+ accessToken,
+ input: input instanceof Hl7Message ? input.toString() : input,
+ contentType,
+ secrets,
+ traceId,
+ };
+
+ // Build the command
+ const encoder = new TextEncoder();
+ const command = new InvokeCommand({
+ FunctionName: name,
+ InvocationType: 'RequestResponse',
+ LogType: 'Tail',
+ Payload: encoder.encode(JSON.stringify(payload)),
+ });
+
+ // Execute the command
+ try {
+ const response = await client.send(command);
+ const responseStr = response.Payload ? new TextDecoder().decode(response.Payload) : undefined;
+
+ // The response from AWS Lambda is always JSON, even if the function returns a string
+ // Therefore we always use JSON.parse to get the return value
+ // See: https://stackoverflow.com/a/49951946/2051724
+ const returnValue = responseStr ? JSON.parse(responseStr) : undefined;
+
+ return {
+ success: !response.FunctionError,
+ logResult: parseLambdaLog(response.LogResult as string),
+ returnValue,
+ };
+ } catch (err) {
+ return {
+ success: false,
+ logResult: normalizeErrorString(err),
+ };
+ }
+}
+
+/**
+ * Returns the AWS Lambda function name for the given bot.
+ * By default, the function name is based on the bot ID.
+ * If the bot has a custom function, and the server allows it, then that is used instead.
+ * @param bot - The Bot resource.
+ * @returns The AWS Lambda function name.
+ */
+export function getLambdaFunctionName(bot: Bot): string {
+ if (getConfig().botCustomFunctionsEnabled) {
+ const customFunction = getIdentifier(bot, 'https://medplum.com/bot-external-function-id');
+ if (customFunction) {
+ return customFunction;
+ }
+ }
+
+ // By default, use the bot ID as the Lambda function name
+ return `medplum-bot-lambda-${bot.id}`;
+}
+
+/**
+ * Parses the AWS Lambda log result.
+ *
+ * The raw logs include markup metadata such as timestamps and billing information.
+ *
+ * We only want to include the actual log contents in the AuditEvent,
+ * so we attempt to scrub away all of that extra metadata.
+ *
+ * See: https://docs.aws.amazon.com/lambda/latest/dg/nodejs-logging.html
+ * @param logResult - The raw log result from the AWS lambda event.
+ * @returns The parsed log result.
+ */
+function parseLambdaLog(logResult: string): string {
+ const logBuffer = Buffer.from(logResult, 'base64');
+ const log = logBuffer.toString('ascii');
+ const lines = log.split('\n');
+ const result = [];
+ for (const line of lines) {
+ if (line.startsWith('START RequestId: ')) {
+ // Ignore start line
+ continue;
+ }
+ if (line.startsWith('END RequestId: ') || line.startsWith('REPORT RequestId: ')) {
+ // Stop at end lines
+ break;
+ }
+ result.push(line);
+ }
+ return result.join('\n').trim();
+}
diff --git a/packages/server/src/fhir/signer.md b/packages/server/src/cloud/aws/signer.md
similarity index 100%
rename from packages/server/src/fhir/signer.md
rename to packages/server/src/cloud/aws/signer.md
diff --git a/packages/server/src/fhir/signer.test.ts b/packages/server/src/cloud/aws/signer.test.ts
similarity index 93%
rename from packages/server/src/fhir/signer.test.ts
rename to packages/server/src/cloud/aws/signer.test.ts
index f86ec8ddda..c4b4f698c4 100644
--- a/packages/server/src/fhir/signer.test.ts
+++ b/packages/server/src/cloud/aws/signer.test.ts
@@ -1,6 +1,6 @@
import { Binary } from '@medplum/fhirtypes';
import { randomUUID } from 'crypto';
-import { loadTestConfig } from '../config';
+import { loadTestConfig } from '../../config';
import { getPresignedUrl } from './signer';
describe('Signer', () => {
diff --git a/packages/server/src/fhir/signer.ts b/packages/server/src/cloud/aws/signer.ts
similarity index 95%
rename from packages/server/src/fhir/signer.ts
rename to packages/server/src/cloud/aws/signer.ts
index 0bcb896eaf..e33001b78f 100644
--- a/packages/server/src/fhir/signer.ts
+++ b/packages/server/src/cloud/aws/signer.ts
@@ -1,6 +1,6 @@
import { getSignedUrl } from '@aws-sdk/cloudfront-signer';
import { Binary } from '@medplum/fhirtypes';
-import { getConfig } from '../config';
+import { getConfig } from '../../config';
/**
* Returns a presigned URL for the Binary resource content.
diff --git a/packages/server/src/cloud/aws/storage.test.ts b/packages/server/src/cloud/aws/storage.test.ts
new file mode 100644
index 0000000000..050ebb705f
--- /dev/null
+++ b/packages/server/src/cloud/aws/storage.test.ts
@@ -0,0 +1,188 @@
+import { CopyObjectCommand, GetObjectCommand, PutObjectCommand, S3Client } from '@aws-sdk/client-s3';
+import { ContentType } from '@medplum/core';
+import { Binary } from '@medplum/fhirtypes';
+import { sdkStreamMixin } from '@smithy/util-stream';
+import { AwsClientStub, mockClient } from 'aws-sdk-client-mock';
+import 'aws-sdk-client-mock-jest';
+import { Request } from 'express';
+import internal, { Readable } from 'stream';
+import { loadTestConfig } from '../../config';
+import { getBinaryStorage, initBinaryStorage } from '../../fhir/storage';
+
+describe('Storage', () => {
+ let mockS3Client: AwsClientStub;
+
+ beforeAll(async () => {
+ await loadTestConfig();
+ });
+
+ beforeEach(() => {
+ mockS3Client = mockClient(S3Client);
+ });
+
+ afterEach(() => {
+ mockS3Client.restore();
+ });
+
+ test('Undefined binary storage', () => {
+ initBinaryStorage('binary');
+ expect(() => getBinaryStorage()).toThrow();
+ });
+
+ test('S3 storage', async () => {
+ initBinaryStorage('s3:foo');
+
+ const storage = getBinaryStorage();
+ expect(storage).toBeDefined();
+
+ // Write a file
+ const binary = {
+ resourceType: 'Binary',
+ id: '123',
+ meta: {
+ versionId: '456',
+ },
+ } as Binary;
+ const req = new Readable();
+ req.push('foo');
+ req.push(null);
+ (req as any).headers = {};
+
+ const sdkStream = sdkStreamMixin(req);
+ mockS3Client.on(GetObjectCommand).resolves({ Body: sdkStream });
+
+ await storage.writeBinary(binary, 'test.txt', ContentType.TEXT, req as Request);
+
+ expect(mockS3Client.send.callCount).toBe(1);
+ expect(mockS3Client).toReceiveCommandWith(PutObjectCommand, {
+ Bucket: 'foo',
+ Key: 'binary/123/456',
+ ContentType: ContentType.TEXT,
+ });
+
+ // Read a file
+ const stream = await storage.readBinary(binary);
+ expect(stream).toBeDefined();
+ expect(mockS3Client).toHaveReceivedCommand(GetObjectCommand);
+ });
+
+ test('Missing metadata', async () => {
+ initBinaryStorage('s3:foo');
+
+ const storage = getBinaryStorage();
+ expect(storage).toBeDefined();
+
+ // Write a file
+ const binary = {
+ resourceType: 'Binary',
+ id: '123',
+ meta: {
+ versionId: '456',
+ },
+ } as Binary;
+ const req = new Readable();
+ req.push('foo');
+ req.push(null);
+ (req as any).headers = {};
+
+ const sdkStream = sdkStreamMixin(req);
+ mockS3Client.on(GetObjectCommand).resolves({ Body: sdkStream });
+
+ await storage.writeBinary(binary, undefined, undefined, req as Request);
+ expect(mockS3Client.send.callCount).toBe(1);
+ expect(mockS3Client).toReceiveCommandWith(PutObjectCommand, {
+ Bucket: 'foo',
+ Key: 'binary/123/456',
+ ContentType: 'application/octet-stream',
+ });
+
+ // Read a file
+ const stream = await storage.readBinary(binary);
+ expect(stream).toBeDefined();
+ expect(mockS3Client).toHaveReceivedCommand(GetObjectCommand);
+ });
+
+ test('Invalid file extension', async () => {
+ initBinaryStorage('s3:foo');
+
+ const storage = getBinaryStorage();
+ expect(storage).toBeDefined();
+
+ const binary = null as unknown as Binary;
+ const stream = null as unknown as internal.Readable;
+ try {
+ await storage.writeBinary(binary, 'test.exe', ContentType.TEXT, stream);
+ fail('Expected error');
+ } catch (err) {
+ expect((err as Error).message).toEqual('Invalid file extension');
+ }
+ expect(mockS3Client).not.toHaveReceivedCommand(PutObjectCommand);
+ });
+
+ test('Invalid content type', async () => {
+ initBinaryStorage('s3:foo');
+
+ const storage = getBinaryStorage();
+ expect(storage).toBeDefined();
+
+ const binary = null as unknown as Binary;
+ const stream = null as unknown as internal.Readable;
+ try {
+ await storage.writeBinary(binary, 'test.sh', 'application/x-sh', stream);
+ fail('Expected error');
+ } catch (err) {
+ expect((err as Error).message).toEqual('Invalid content type');
+ }
+ expect(mockS3Client).not.toHaveReceivedCommand(PutObjectCommand);
+ });
+
+ test('Copy S3 object', async () => {
+ initBinaryStorage('s3:foo');
+
+ const storage = getBinaryStorage();
+ expect(storage).toBeDefined();
+
+ // Write a file
+ const binary = {
+ resourceType: 'Binary',
+ id: '123',
+ meta: {
+ versionId: '456',
+ },
+ } as Binary;
+ const req = new Readable();
+ req.push('foo');
+ req.push(null);
+ (req as any).headers = {};
+
+ const sdkStream = sdkStreamMixin(req);
+ mockS3Client.on(GetObjectCommand).resolves({ Body: sdkStream });
+
+ await storage.writeBinary(binary, 'test.txt', ContentType.TEXT, req as Request);
+
+ expect(mockS3Client.send.callCount).toBe(1);
+ expect(mockS3Client).toReceiveCommandWith(PutObjectCommand, {
+ Bucket: 'foo',
+ Key: 'binary/123/456',
+ ContentType: ContentType.TEXT,
+ });
+ mockS3Client.reset();
+
+ // Copy the object
+ const destinationBinary = {
+ resourceType: 'Binary',
+ id: '789',
+ meta: {
+ versionId: '012',
+ },
+ } as Binary;
+ await storage.copyBinary(binary, destinationBinary);
+
+ expect(mockS3Client.send.callCount).toBe(1);
+ expect(mockS3Client).toReceiveCommandWith(CopyObjectCommand, {
+ CopySource: 'foo/binary/123/456',
+ Bucket: 'foo',
+ Key: 'binary/789/012',
+ });
+ });
+});
diff --git a/packages/server/src/cloud/aws/storage.ts b/packages/server/src/cloud/aws/storage.ts
new file mode 100644
index 0000000000..a237889ac9
--- /dev/null
+++ b/packages/server/src/cloud/aws/storage.ts
@@ -0,0 +1,133 @@
+import { CopyObjectCommand, GetObjectCommand, S3Client } from '@aws-sdk/client-s3';
+import { getSignedUrl } from '@aws-sdk/cloudfront-signer';
+import { Upload } from '@aws-sdk/lib-storage';
+import { Binary } from '@medplum/fhirtypes';
+import { Readable } from 'stream';
+import { getConfig } from '../../config';
+import { BinarySource, BinaryStorage, checkFileMetadata } from '../../fhir/storage';
+
+/**
+ * The S3Storage class stores binary data in an AWS S3 bucket.
+ * Files are stored in bucket/binary/binary.id/binary.meta.versionId.
+ */
+export class S3Storage implements BinaryStorage {
+ private readonly client: S3Client;
+ private readonly bucket: string;
+
+ constructor(bucket: string) {
+ this.client = new S3Client({ region: getConfig().awsRegion });
+ this.bucket = bucket;
+ }
+
+ /**
+ * Writes a binary blob to S3.
+ * @param binary - The binary resource destination.
+ * @param filename - Optional binary filename.
+ * @param contentType - Optional binary content type.
+ * @param stream - The Node.js stream of readable content.
+ * @returns Promise that resolves when the write is complete.
+ */
+ writeBinary(
+ binary: Binary,
+ filename: string | undefined,
+ contentType: string | undefined,
+ stream: BinarySource
+ ): Promise {
+ checkFileMetadata(filename, contentType);
+ return this.writeFile(this.getKey(binary), contentType, stream);
+ }
+
+ /**
+ * Writes a file to S3.
+ *
+ * Early implementations used the simple "PutObjectCommand" to write the blob to S3.
+ * However, PutObjectCommand does not support streaming.
+ *
+ * We now use the @aws-sdk/lib-storage package.
+ *
+ * Learn more:
+ * https://github.com/aws/aws-sdk-js-v3/blob/main/UPGRADING.md#s3-multipart-upload
+ * https://github.com/aws/aws-sdk-js-v3/tree/main/lib/lib-storage
+ *
+ * Be mindful of Cache-Control settings.
+ *
+ * Because we use signed URLs intended for one hour use,
+ * we set "max-age" to 1 hour = 3600 seconds.
+ *
+ * But we want CloudFront to cache the response for 1 day,
+ * so we set "s-maxage" to 1 day = 86400 seconds.
+ *
+ * Learn more:
+ * https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/Expiration.html
+ * @param key - The S3 key.
+ * @param contentType - Optional binary content type.
+ * @param stream - The Node.js stream of readable content.
+ */
+ async writeFile(key: string, contentType: string | undefined, stream: BinarySource): Promise {
+ const upload = new Upload({
+ params: {
+ Bucket: this.bucket,
+ Key: key,
+ CacheControl: 'max-age=3600, s-maxage=86400',
+ ContentType: contentType ?? 'application/octet-stream',
+ Body: stream,
+ },
+ client: this.client,
+ queueSize: 3,
+ });
+
+ await upload.done();
+ }
+
+ async readBinary(binary: Binary): Promise {
+ const output = await this.client.send(
+ new GetObjectCommand({
+ Bucket: this.bucket,
+ Key: this.getKey(binary),
+ })
+ );
+ return output.Body as Readable;
+ }
+
+ async copyBinary(sourceBinary: Binary, destinationBinary: Binary): Promise {
+ await this.copyFile(this.getKey(sourceBinary), this.getKey(destinationBinary));
+ }
+
+ async copyFile(sourceKey: string, destinationKey: string): Promise {
+ await this.client.send(
+ new CopyObjectCommand({
+ CopySource: `${this.bucket}/${sourceKey}`,
+ Bucket: this.bucket,
+ Key: destinationKey,
+ })
+ );
+ }
+
+ /**
+ * Returns a presigned URL for the Binary resource content.
+ *
+ * Reference:
+ * https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/modules/_aws_sdk_cloudfront_signer.html
+ *
+ * @param binary - Binary resource.
+ * @returns Presigned URL to access the binary data.
+ */
+ getPresignedUrl(binary: Binary): string {
+ const config = getConfig();
+ const storageBaseUrl = config.storageBaseUrl;
+ const unsignedUrl = `${storageBaseUrl}${binary.id}/${binary.meta?.versionId}`;
+ const dateLessThan = new Date();
+ dateLessThan.setHours(dateLessThan.getHours() + 1);
+ return getSignedUrl({
+ url: unsignedUrl,
+ keyPairId: config.signingKeyId,
+ dateLessThan: dateLessThan.toISOString(),
+ privateKey: config.signingKey,
+ passphrase: config.signingKeyPassphrase,
+ });
+ }
+
+ private getKey(binary: Binary): string {
+ return 'binary/' + binary.id + '/' + binary.meta?.versionId;
+ }
+}
diff --git a/packages/server/src/config.test.ts b/packages/server/src/config.test.ts
index 158816b8bc..05c383f522 100644
--- a/packages/server/src/config.test.ts
+++ b/packages/server/src/config.test.ts
@@ -1,43 +1,7 @@
-import { GetSecretValueCommand, SecretsManagerClient } from '@aws-sdk/client-secrets-manager';
-import { GetParametersByPathCommand, SSMClient } from '@aws-sdk/client-ssm';
-import { AwsClientStub, mockClient } from 'aws-sdk-client-mock';
-import 'aws-sdk-client-mock-jest';
import fs from 'fs';
import { getConfig, loadConfig } from './config';
describe('Config', () => {
- let mockSSMClient: AwsClientStub;
- let mockSecretsManagerClient: AwsClientStub;
-
- beforeEach(() => {
- mockSSMClient = mockClient(SSMClient);
- mockSecretsManagerClient = mockClient(SecretsManagerClient);
-
- mockSecretsManagerClient.on(GetSecretValueCommand).resolves({
- SecretString: JSON.stringify({ host: 'host', port: 123 }),
- });
-
- mockSSMClient.on(GetParametersByPathCommand).resolves({
- Parameters: [
- { Name: 'baseUrl', Value: 'https://www.example.com/' },
- { Name: 'database.ssl.require', Value: 'true' },
- { Name: 'database.ssl.rejectUnauthorized', Value: 'true' },
- { Name: 'database.ssl.ca', Value: 'DatabaseSslCa' },
- { Name: 'DatabaseSecrets', Value: 'DatabaseSecretsArn' },
- { Name: 'RedisSecrets', Value: 'RedisSecretsArn' },
- { Name: 'port', Value: '8080' },
- { Name: 'botCustomFunctionsEnabled', Value: 'true' },
- { Name: 'logAuditEvents', Value: 'true' },
- { Name: 'registerEnabled', Value: 'false' },
- ],
- });
- });
-
- afterEach(() => {
- mockSSMClient.restore();
- mockSecretsManagerClient.restore();
- });
-
test('Unrecognized config', async () => {
await expect(loadConfig('unrecognized')).rejects.toThrow();
});
@@ -53,38 +17,6 @@ describe('Config', () => {
expect(getConfig()).toBe(config);
});
- test('Load AWS config', async () => {
- const config = await loadConfig('aws:test');
- expect(config).toBeDefined();
- expect(config.baseUrl).toBeDefined();
- expect(config.port).toEqual(8080);
- expect(config.botCustomFunctionsEnabled).toEqual(true);
- expect(config.logAuditEvents).toEqual(true);
- expect(config.registerEnabled).toEqual(false);
- expect(config.database).toBeDefined();
- expect(config.database.ssl).toBeDefined();
- expect(config.database.ssl?.require).toEqual(true);
- expect(config.database.ssl?.rejectUnauthorized).toEqual(true);
- expect(config.database.ssl?.ca).toEqual('DatabaseSslCa');
- expect(getConfig()).toBe(config);
- expect(mockSSMClient).toReceiveCommand(GetParametersByPathCommand);
- });
-
- test('Load region AWS config', async () => {
- const config = await loadConfig('aws:ap-southeast-2:test');
- expect(config).toBeDefined();
- expect(config.baseUrl).toBeDefined();
- expect(config.port).toEqual(8080);
- expect(getConfig()).toBe(config);
- expect(mockSecretsManagerClient).toReceiveCommand(GetSecretValueCommand);
- expect(mockSecretsManagerClient).toReceiveCommandWith(GetSecretValueCommand, {
- SecretId: 'DatabaseSecretsArn',
- });
- expect(mockSecretsManagerClient).toReceiveCommandWith(GetSecretValueCommand, {
- SecretId: 'RedisSecretsArn',
- });
- });
-
test('Load env config', async () => {
process.env.MEDPLUM_BASE_URL = 'http://localhost:3000';
process.env.MEDPLUM_PORT = '3000';
diff --git a/packages/server/src/config.ts b/packages/server/src/config.ts
index 0bc5f9d5b9..76d5a43556 100644
--- a/packages/server/src/config.ts
+++ b/packages/server/src/config.ts
@@ -1,10 +1,9 @@
-import { GetSecretValueCommand, SecretsManagerClient } from '@aws-sdk/client-secrets-manager';
-import { GetParametersByPathCommand, Parameter, SSMClient } from '@aws-sdk/client-ssm';
import { splitN } from '@medplum/core';
import { KeepJobs } from 'bullmq';
import { mkdtempSync, readFileSync } from 'fs';
import { tmpdir } from 'os';
import { join, resolve } from 'path';
+import { loadAwsConfig } from './cloud/aws/config';
const DEFAULT_AWS_REGION = 'us-east-1';
@@ -24,9 +23,11 @@ export interface MedplumServerConfig {
signingKeyId: string;
signingKeyPassphrase: string;
supportEmail: string;
+ approvedSenderEmails?: string;
database: MedplumDatabaseConfig;
databaseProxyEndpoint?: string;
redis: MedplumRedisConfig;
+ emailProvider?: 'none' | 'awsses' | 'smtp';
smtp?: MedplumSmtpConfig;
bullmq?: MedplumBullmqConfig;
googleClientId?: string;
@@ -52,6 +53,17 @@ export interface MedplumServerConfig {
heartbeatEnabled?: boolean;
accurateCountThreshold: number;
defaultBotRuntimeVersion: 'awslambda' | 'vmcontext';
+ defaultProjectFeatures?:
+ | (
+ | 'email'
+ | 'bots'
+ | 'cron'
+ | 'google-auth-required'
+ | 'graphql-introspection'
+ | 'terminology'
+ | 'websocket-subscriptions'
+ )[]
+ | undefined;
/** Temporary feature flag, to be removed */
chainedSearchWithReferenceTables?: boolean;
@@ -163,10 +175,13 @@ export async function loadTestConfig(): Promise {
config.binaryStorage = 'file:' + mkdtempSync(join(tmpdir(), 'medplum-temp-storage'));
config.allowedOrigins = undefined;
config.database.host = process.env['POSTGRES_HOST'] ?? 'localhost';
- config.database.port = process.env['POSTGRES_PORT'] ? parseInt(process.env['POSTGRES_PORT'], 10) : 5432;
+ config.database.port = process.env['POSTGRES_PORT'] ? Number.parseInt(process.env['POSTGRES_PORT'], 10) : 5432;
config.database.dbname = 'medplum_test';
config.redis.db = 7; // Select logical DB `7` so we don't collide with existing dev Redis cache.
config.redis.password = process.env['REDIS_PASSWORD_DISABLED_IN_TESTS'] ? undefined : config.redis.password;
+ config.approvedSenderEmails = 'no-reply@example.com';
+ config.emailProvider = 'none';
+ config.logLevel = 'error';
return config;
}
@@ -222,97 +237,6 @@ async function loadFileConfig(path: string): Promise {
return JSON.parse(readFileSync(resolve(__dirname, '../', path), { encoding: 'utf8' }));
}
-/**
- * Loads configuration settings from AWS SSM Parameter Store.
- * @param path - The AWS SSM Parameter Store path prefix.
- * @returns The loaded configuration.
- */
-async function loadAwsConfig(path: string): Promise {
- let region = DEFAULT_AWS_REGION;
- if (path.includes(':')) {
- [region, path] = splitN(path, ':', 2);
- }
-
- const client = new SSMClient({ region });
- const config: Record = {};
- const parameters = [] as Parameter[];
- let nextToken: string | undefined;
- do {
- const response = await client.send(
- new GetParametersByPathCommand({
- Path: path,
- NextToken: nextToken,
- WithDecryption: true,
- })
- );
- if (response.Parameters) {
- parameters.push(...response.Parameters);
- }
- nextToken = response.NextToken;
- } while (nextToken);
-
- // Load special AWS Secrets Manager secrets first
- for (const param of parameters) {
- const key = (param.Name as string).replace(path, '');
- const value = param.Value as string;
- if (key === 'DatabaseSecrets') {
- config['database'] = await loadAwsSecrets(region, value);
- } else if (key === 'RedisSecrets') {
- config['redis'] = await loadAwsSecrets(region, value);
- }
- }
-
- // Then load other parameters, which may override the secrets
- for (const param of parameters) {
- const key = (param.Name as string).replace(path, '');
- const value = param.Value as string;
- setValue(config, key, value);
- }
-
- return config as MedplumServerConfig;
-}
-
-/**
- * Returns the AWS Database Secret data as a JSON map.
- * @param region - The AWS region.
- * @param secretId - Secret ARN
- * @returns The secret data as a JSON map.
- */
-async function loadAwsSecrets(region: string, secretId: string): Promise | undefined> {
- const client = new SecretsManagerClient({ region });
- const result = await client.send(new GetSecretValueCommand({ SecretId: secretId }));
-
- if (!result.SecretString) {
- return undefined;
- }
-
- return JSON.parse(result.SecretString);
-}
-
-function setValue(config: MedplumDatabaseConfig, key: string, value: string): void {
- const keySegments = key.split('.');
- let obj = config as Record;
-
- while (keySegments.length > 1) {
- const segment = keySegments.shift() as string;
- if (!obj[segment]) {
- obj[segment] = {};
- }
- obj = obj[segment] as Record;
- }
-
- let parsedValue: any = value;
- if (isIntegerConfig(key)) {
- parsedValue = parseInt(value, 10);
- } else if (isBooleanConfig(key)) {
- parsedValue = value === 'true';
- } else if (isObjectConfig(key)) {
- parsedValue = JSON.parse(value);
- }
-
- obj[keySegments[0]] = parsedValue;
-}
-
/**
* Adds default values to the config.
* @param config - The input config as loaded from the config file.
@@ -334,6 +258,8 @@ function addDefaults(config: MedplumServerConfig): MedplumServerConfig {
config.shutdownTimeoutMilliseconds = config.shutdownTimeoutMilliseconds ?? 30000;
config.accurateCountThreshold = config.accurateCountThreshold ?? 1000000;
config.defaultBotRuntimeVersion = config.defaultBotRuntimeVersion ?? 'awslambda';
+ config.defaultProjectFeatures = config.defaultProjectFeatures ?? [];
+ config.emailProvider = config.emailProvider || (config.smtp ? 'smtp' : 'awsses');
return config;
}
diff --git a/packages/server/src/context.test.ts b/packages/server/src/context.test.ts
index fd139a7da2..35df84789b 100644
--- a/packages/server/src/context.test.ts
+++ b/packages/server/src/context.test.ts
@@ -1,4 +1,5 @@
import { Request } from 'express';
+import { loadTestConfig } from './config';
import {
RequestContext,
buildTracingExtension,
@@ -12,6 +13,10 @@ import {
import { withTestContext } from './test.setup';
describe('RequestContext', () => {
+ beforeAll(async () => {
+ await loadTestConfig();
+ });
+
test('tryGetRequestContext', async () => {
expect(tryGetRequestContext()).toBeUndefined();
withTestContext(() => expect(tryGetRequestContext()).toBeDefined());
diff --git a/packages/server/src/context.ts b/packages/server/src/context.ts
index 2717e8f577..bfeb604736 100644
--- a/packages/server/src/context.ts
+++ b/packages/server/src/context.ts
@@ -1,9 +1,12 @@
-import { LogLevel, Logger, ProfileResource, isUUID } from '@medplum/core';
+import { LogLevel, Logger, ProfileResource, isUUID, parseLogLevel } from '@medplum/core';
import { Extension, Login, Project, ProjectMembership, Reference } from '@medplum/fhirtypes';
import { AsyncLocalStorage } from 'async_hooks';
import { randomUUID } from 'crypto';
import { NextFunction, Request, Response } from 'express';
+import { getConfig } from './config';
+import { getRepoForLogin } from './fhir/accesspolicy';
import { Repository, getSystemRepo } from './fhir/repo';
+import { authenticateTokenImpl, isExtendedMode } from './oauth/middleware';
import { parseTraceparent } from './traceparent';
export class RequestContext {
@@ -14,9 +17,7 @@ export class RequestContext {
constructor(requestId: string, traceId: string, logger?: Logger) {
this.requestId = requestId;
this.traceId = traceId;
- this.logger =
- logger ??
- new Logger(write, { requestId, traceId }, process.env.NODE_ENV === 'test' ? LogLevel.ERROR : LogLevel.INFO);
+ this.logger = logger ?? new Logger(write, { requestId, traceId }, parseLogLevel(getConfig().logLevel ?? 'info'));
}
close(): void {
@@ -44,10 +45,9 @@ export class AuthenticatedRequestContext extends RequestContext {
project: Project,
membership: ProjectMembership,
repo: Repository,
- logger?: Logger,
accessToken?: string
) {
- super(ctx.requestId, ctx.traceId, logger);
+ super(ctx.requestId, ctx.traceId, ctx.logger);
this.repo = repo;
this.project = project;
@@ -63,12 +63,11 @@ export class AuthenticatedRequestContext extends RequestContext {
static system(ctx?: { requestId?: string; traceId?: string }): AuthenticatedRequestContext {
return new AuthenticatedRequestContext(
- new RequestContext(ctx?.requestId ?? '', ctx?.traceId ?? ''),
+ new RequestContext(ctx?.requestId ?? '', ctx?.traceId ?? '', systemLogger),
{} as unknown as Login,
{} as unknown as Project,
{} as unknown as ProjectMembership,
- getSystemRepo(),
- systemLogger
+ getSystemRepo()
);
}
}
@@ -97,7 +96,17 @@ export function getAuthenticatedContext(): AuthenticatedRequestContext {
export async function attachRequestContext(req: Request, res: Response, next: NextFunction): Promise {
const { requestId, traceId } = requestIds(req);
- requestContextStore.run(new RequestContext(requestId, traceId), () => next());
+
+ let ctx = new RequestContext(requestId, traceId);
+
+ const authState = await authenticateTokenImpl(req);
+ if (authState) {
+ const { login, membership, project, accessToken } = authState;
+ const repo = await getRepoForLogin(login, membership, project, isExtendedMode(req));
+ ctx = new AuthenticatedRequestContext(ctx, login, project, membership, repo, accessToken);
+ }
+
+ requestContextStore.run(ctx, () => next());
}
export function closeRequestContext(): void {
diff --git a/packages/server/src/email/email.test.ts b/packages/server/src/email/email.test.ts
index 498e39a317..b3ab339d40 100644
--- a/packages/server/src/email/email.test.ts
+++ b/packages/server/src/email/email.test.ts
@@ -21,6 +21,7 @@ describe('Email', () => {
beforeAll(async () => {
const config = await loadTestConfig();
+ config.emailProvider = 'awsses';
config.storageBaseUrl = 'https://storage.example.com/';
await initAppServices(config);
});
@@ -39,8 +40,10 @@ describe('Email', () => {
});
test('Send text email', async () => {
+ const fromAddress = 'gibberish@example.com';
const toAddresses = 'alice@example.com';
await sendEmail(systemRepo, {
+ from: fromAddress,
to: toAddresses,
cc: 'bob@example.com',
subject: 'Hello',
@@ -52,6 +55,32 @@ describe('Email', () => {
const inputArgs = mockSESv2Client.commandCalls(SendEmailCommand)[0].args[0].input;
+ expect(inputArgs?.FromEmailAddress).toBe(getConfig().supportEmail);
+ expect(inputArgs?.Destination?.ToAddresses?.[0] ?? '').toBe('alice@example.com');
+ expect(inputArgs?.Destination?.CcAddresses?.[0] ?? '').toBe('bob@example.com');
+
+ const parsed = await simpleParser(Readable.from(inputArgs?.Content?.Raw?.Data ?? ''));
+ expect(parsed.subject).toBe('Hello');
+ expect(parsed.text).toBe('Hello Alice\n');
+ });
+
+ test('Send text email from approved sender', async () => {
+ const fromAddress = 'no-reply@example.com';
+ const toAddresses = 'alice@example.com';
+ await sendEmail(systemRepo, {
+ from: fromAddress,
+ to: toAddresses,
+ cc: 'bob@example.com',
+ subject: 'Hello',
+ text: 'Hello Alice',
+ });
+
+ expect(mockSESv2Client.send.callCount).toBe(1);
+ expect(mockSESv2Client).toHaveReceivedCommandTimes(SendEmailCommand, 1);
+
+ const inputArgs = mockSESv2Client.commandCalls(SendEmailCommand)[0].args[0].input;
+
+ expect(inputArgs?.FromEmailAddress).toBe(fromAddress);
expect(inputArgs?.Destination?.ToAddresses?.[0] ?? '').toBe('alice@example.com');
expect(inputArgs?.Destination?.CcAddresses?.[0] ?? '').toBe('bob@example.com');
diff --git a/packages/server/src/email/email.ts b/packages/server/src/email/email.ts
index 595cc1d2cd..33b7f40d6d 100644
--- a/packages/server/src/email/email.ts
+++ b/packages/server/src/email/email.ts
@@ -1,13 +1,12 @@
-import { SendEmailCommand, SESv2Client } from '@aws-sdk/client-sesv2';
-import { badRequest, normalizeErrorString, OperationOutcomeError } from '@medplum/core';
import { Binary } from '@medplum/fhirtypes';
import { createTransport } from 'nodemailer';
-import MailComposer from 'nodemailer/lib/mail-composer';
-import Mail, { Address } from 'nodemailer/lib/mailer';
+import Mail from 'nodemailer/lib/mailer';
+import { sendEmailViaSes } from '../cloud/aws/email';
import { getConfig, MedplumSmtpConfig } from '../config';
import { Repository } from '../fhir/repo';
import { getBinaryStorage } from '../fhir/storage';
import { globalLogger } from '../logger';
+import { getFromAddress } from './utils';
/**
* Sends an email using the AWS SES service.
@@ -18,10 +17,8 @@ import { globalLogger } from '../logger';
*/
export async function sendEmail(repo: Repository, options: Mail.Options): Promise {
const config = getConfig();
- const fromAddress = config.supportEmail;
- const toAddresses = buildAddresses(options.to);
+ const fromAddress = getFromAddress(options);
- // Always set the from and sender to the support email address
options.from = fromAddress;
options.sender = fromAddress;
@@ -33,65 +30,15 @@ export async function sendEmail(repo: Repository, options: Mail.Options): Promis
// "if set to true then fails with an error when a node tries to load content from a file"
options.disableFileAccess = true;
- globalLogger.info('Sending email', { to: toAddresses?.join(', '), subject: options.subject });
+ globalLogger.info('Sending email', { to: options.to, subject: options.subject });
if (config.smtp) {
await sendEmailViaSmpt(config.smtp, options);
- } else {
+ } else if (config.emailProvider === 'awsses') {
await sendEmailViaSes(options);
}
}
-/**
- * Converts nodemailer addresses to an array of strings.
- * @param input - nodemailer address input.
- * @returns Array of string addresses.
- */
-function buildAddresses(input: string | Address | (string | Address)[] | undefined): string[] | undefined {
- if (!input) {
- return undefined;
- }
- if (Array.isArray(input)) {
- return input.map(addressToString) as string[];
- }
- return [addressToString(input) as string];
-}
-
-/**
- * Converts a nodemailer address to a string.
- * @param address - nodemailer address input.
- * @returns String address.
- */
-function addressToString(address: Address | string | undefined): string | undefined {
- if (address) {
- if (typeof address === 'string') {
- return address;
- }
- if (typeof address === 'object' && 'address' in address) {
- return address.address;
- }
- }
- return undefined;
-}
-
-/**
- * Builds a raw email message using nodemailer MailComposer.
- * @param options - The nodemailer options.
- * @returns The raw email message.
- */
-function buildRawMessage(options: Mail.Options): Promise {
- const msg = new MailComposer(options);
- return new Promise((resolve, reject) => {
- msg.compile().build((err, message) => {
- if (err) {
- reject(err);
- return;
- }
- resolve(message);
- });
- });
-}
-
/**
* Validates an array of nodemailer attachments.
* @param repo - The user repository.
@@ -151,39 +98,3 @@ async function sendEmailViaSmpt(smtpConfig: MedplumSmtpConfig, options: Mail.Opt
});
await transport.sendMail(options);
}
-
-/**
- * Sends an email via AWS SES.
- * @param options - The nodemailer options.
- */
-async function sendEmailViaSes(options: Mail.Options): Promise {
- const config = getConfig();
- const fromAddress = config.supportEmail;
- const toAddresses = buildAddresses(options.to);
- const ccAddresses = buildAddresses(options.cc);
- const bccAddresses = buildAddresses(options.bcc);
-
- let msg: Uint8Array;
- try {
- msg = await buildRawMessage(options);
- } catch (err) {
- throw new OperationOutcomeError(badRequest('Invalid email options: ' + normalizeErrorString(err)), err);
- }
-
- const sesClient = new SESv2Client({ region: config.awsRegion });
- await sesClient.send(
- new SendEmailCommand({
- FromEmailAddress: fromAddress,
- Destination: {
- ToAddresses: toAddresses,
- CcAddresses: ccAddresses,
- BccAddresses: bccAddresses,
- },
- Content: {
- Raw: {
- Data: msg,
- },
- },
- })
- );
-}
diff --git a/packages/server/src/email/routes.test.ts b/packages/server/src/email/routes.test.ts
index 9615153759..e4192fa3cf 100644
--- a/packages/server/src/email/routes.test.ts
+++ b/packages/server/src/email/routes.test.ts
@@ -10,13 +10,12 @@ import { initTestAuth } from '../test.setup';
jest.mock('@aws-sdk/client-sesv2');
const app = express();
-let accessToken: string;
describe('Email API Routes', () => {
beforeAll(async () => {
const config = await loadTestConfig();
+ config.emailProvider = 'awsses';
await initApp(app, config);
- accessToken = await initTestAuth();
});
beforeEach(() => {
@@ -39,7 +38,22 @@ describe('Email API Routes', () => {
expect(SendEmailCommand).toHaveBeenCalledTimes(0);
});
+ test('Forbidden for non project admin', async () => {
+ const accessToken = await initTestAuth({ membership: { admin: false } });
+ const res = await request(app)
+ .post(`/email/v1/send`)
+ .set('Authorization', 'Bearer ' + accessToken)
+ .set('Content-Type', ContentType.JSON)
+ .send({
+ to: 'alice@example.com',
+ subject: 'Subject',
+ text: 'Body',
+ });
+ expect(res.status).toBe(403);
+ });
+
test('Wrong content type', async () => {
+ const accessToken = await initTestAuth({ membership: { admin: true } });
const res = await request(app)
.post(`/email/v1/send`)
.set('Authorization', 'Bearer ' + accessToken)
@@ -48,7 +62,8 @@ describe('Email API Routes', () => {
expect(res.status).toBe(400);
});
- test('Send email', async () => {
+ test('Send email as project admin', async () => {
+ const accessToken = await initTestAuth({ membership: { admin: true } });
const res = await request(app)
.post(`/email/v1/send`)
.set('Authorization', 'Bearer ' + accessToken)
diff --git a/packages/server/src/email/routes.ts b/packages/server/src/email/routes.ts
index 480ccc3c22..ab02d780d5 100644
--- a/packages/server/src/email/routes.ts
+++ b/packages/server/src/email/routes.ts
@@ -2,11 +2,11 @@ import { allOk, ContentType, forbidden } from '@medplum/core';
import { Request, Response, Router } from 'express';
import { body, check } from 'express-validator';
import { asyncWrap } from '../async';
+import { getAuthenticatedContext } from '../context';
import { sendOutcome } from '../fhir/outcomes';
import { authenticateRequest } from '../oauth/middleware';
-import { sendEmail } from './email';
-import { getAuthenticatedContext } from '../context';
import { makeValidationMiddleware } from '../util/validator';
+import { sendEmail } from './email';
export const emailRouter = Router();
emailRouter.use(authenticateRequest);
@@ -24,7 +24,7 @@ emailRouter.post(
const ctx = getAuthenticatedContext();
// Make sure the user project has the email feature enabled
- if (!ctx.project.features?.includes('email')) {
+ if (!ctx.project.features?.includes('email') || !ctx.membership.admin) {
sendOutcome(res, forbidden);
return;
}
diff --git a/packages/server/src/email/utils.ts b/packages/server/src/email/utils.ts
new file mode 100644
index 0000000000..df5a96b5b2
--- /dev/null
+++ b/packages/server/src/email/utils.ts
@@ -0,0 +1,73 @@
+import MailComposer from 'nodemailer/lib/mail-composer';
+import Mail, { Address } from 'nodemailer/lib/mailer';
+import { getConfig } from '../config';
+
+/**
+ * Returns the from address to use.
+ * If the user specified a from address, it must be an approved sender.
+ * Otherwise uses the support email address.
+ * @param options - The user specified nodemailer options.
+ * @returns The from address to use.
+ */
+export function getFromAddress(options: Mail.Options): string {
+ const config = getConfig();
+
+ if (options.from) {
+ const fromAddress = addressToString(options.from);
+ if (fromAddress && config.approvedSenderEmails?.split(',')?.includes(fromAddress)) {
+ return fromAddress;
+ }
+ }
+
+ return config.supportEmail;
+}
+
+/**
+ * Converts nodemailer addresses to an array of strings.
+ * @param input - nodemailer address input.
+ * @returns Array of string addresses.
+ */
+export function buildAddresses(input: string | Address | (string | Address)[] | undefined): string[] | undefined {
+ if (!input) {
+ return undefined;
+ }
+ if (Array.isArray(input)) {
+ return input.map(addressToString) as string[];
+ }
+ return [addressToString(input) as string];
+}
+
+/**
+ * Converts a nodemailer address to a string.
+ * @param address - nodemailer address input.
+ * @returns String address.
+ */
+export function addressToString(address: Address | string | undefined): string | undefined {
+ if (address) {
+ if (typeof address === 'string') {
+ return address;
+ }
+ if (typeof address === 'object' && 'address' in address) {
+ return address.address;
+ }
+ }
+ return undefined;
+}
+
+/**
+ * Builds a raw email message using nodemailer MailComposer.
+ * @param options - The nodemailer options.
+ * @returns The raw email message.
+ */
+export function buildRawMessage(options: Mail.Options): Promise {
+ const msg = new MailComposer(options);
+ return new Promise((resolve, reject) => {
+ msg.compile().build((err, message) => {
+ if (err) {
+ reject(err);
+ return;
+ }
+ resolve(message);
+ });
+ });
+}
diff --git a/packages/server/src/fhir/binary.ts b/packages/server/src/fhir/binary.ts
index 203f0b077b..ad3f6ffdc6 100644
--- a/packages/server/src/fhir/binary.ts
+++ b/packages/server/src/fhir/binary.ts
@@ -8,7 +8,6 @@ import { getAuthenticatedContext, getLogger } from '../context';
import { authenticateRequest } from '../oauth/middleware';
import { sendOutcome } from './outcomes';
import { sendResponse, sendResponseHeaders } from './response';
-import { getPresignedUrl } from './signer';
import { BinarySource, getBinaryStorage } from './storage';
export const binaryRouter = Router().use(authenticateRequest);
@@ -119,7 +118,7 @@ async function handleBinaryWriteRequest(req: Request, res: Response): Promise {
+ const app = express();
+ let accessToken: string;
+ const agents: Agent[] = [];
+ let connectedAgent: Agent;
+ let disabledAgent: Agent;
+
+ beforeAll(async () => {
+ const config = await loadTestConfig();
+ await initApp(app, config);
+ accessToken = await initTestAuth();
+
+ const promises = Array.from({ length: NUM_DEFAULT_AGENTS }) as Promise[];
+ for (let i = 0; i < NUM_DEFAULT_AGENTS; i++) {
+ promises[i] = request(app)
+ .post('/fhir/R4/Agent')
+ .set('Content-Type', ContentType.FHIR_JSON)
+ .set('Authorization', 'Bearer ' + accessToken)
+ .send({
+ resourceType: 'Agent',
+ identifier: [{ system: 'https://example.com/agent', value: randomUUID() }],
+ name: `Test Agent ${i + 1}`,
+ status: 'active',
+ });
+ }
+
+ const responses = await Promise.all(promises);
+ for (let i = 0; i < NUM_DEFAULT_AGENTS; i++) {
+ expect(responses[i].status).toBe(201);
+ agents[i] = responses[i].body;
+ }
+
+ const agent1Res = await request(app)
+ .post('/fhir/R4/Agent')
+ .set('Authorization', 'Bearer ' + accessToken)
+ .type('json')
+ .send({
+ identifier: [{ system: 'https://example.com/agent', value: randomUUID() }],
+ resourceType: 'Agent',
+ name: 'Medplum Agent',
+ status: 'active',
+ } satisfies Agent);
+ expect(agent1Res.status).toEqual(201);
+
+ const agent2Res = await request(app)
+ .post('/fhir/R4/Agent')
+ .set('Authorization', 'Bearer ' + accessToken)
+ .type('json')
+ .send({
+ identifier: [{ system: 'https://example.com/agent', value: randomUUID() }],
+ resourceType: 'Agent',
+ name: 'Old Medplum Agent',
+ status: 'off',
+ } satisfies Agent);
+ expect(agent2Res.status).toEqual(201);
+
+ connectedAgent = agent1Res.body;
+ disabledAgent = agent2Res.body;
+
+ // Emulate a connection
+ await getRedis().set(
+ `medplum:agent:${connectedAgent.id}:info`,
+ JSON.stringify({
+ status: AgentConnectionState.CONNECTED,
+ version: '3.1.4',
+ lastUpdated: new Date().toISOString(),
+ }),
+ 'EX',
+ 60
+ );
+
+ // Emulate a disconnected agent
+ await getRedis().set(
+ `medplum:agent:${disabledAgent.id}:info`,
+ JSON.stringify({
+ status: AgentConnectionState.DISCONNECTED,
+ version: '3.1.2',
+ lastUpdated: new Date().toISOString(),
+ }),
+ 'EX',
+ 60
+ );
+ });
+
+ afterAll(async () => {
+ await shutdownApp();
+ });
+
+ test('Get all agent statuses', async () => {
+ const res = await request(app)
+ .get('/fhir/R4/Agent/$bulk-status')
+ .set('Authorization', 'Bearer ' + accessToken);
+ expect(res.status).toBe(200);
+
+ const bundle = res.body as Bundle;
+ expect(bundle.resourceType).toBe('Bundle');
+ expect(bundle.entry).toHaveLength(4);
+
+ const bundleEntries = bundle.entry as BundleEntry[];
+ for (const entry of bundleEntries) {
+ const parameters = entry.resource as Parameters;
+ expect(parameters).toBeDefined();
+ expect(parameters.resourceType).toEqual('Parameters');
+ expect(parameters.parameter?.length).toEqual(2);
+ }
+
+ expectBundleToContainStatusEntry(bundle, connectedAgent, {
+ status: AgentConnectionState.CONNECTED,
+ version: '3.1.4',
+ lastUpdated: expect.any(String),
+ });
+
+ expectBundleToContainStatusEntry(bundle, disabledAgent, {
+ status: AgentConnectionState.DISCONNECTED,
+ version: '3.1.2',
+ lastUpdated: expect.any(String),
+ });
+
+ expectBundleToContainStatusEntry(bundle, agents[0], {
+ status: AgentConnectionState.UNKNOWN,
+ version: 'unknown',
+ });
+ });
+
+ test('Get agent statuses for agent with name containing Medplum', async () => {
+ const res = await request(app)
+ .get('/fhir/R4/Agent/$bulk-status')
+ .query({ 'name:contains': 'Medplum' })
+ .set('Authorization', 'Bearer ' + accessToken);
+ expect(res.status).toBe(200);
+
+ const bundle = res.body as Bundle;
+ expect(bundle.resourceType).toBe('Bundle');
+ expect(bundle.entry).toHaveLength(2);
+
+ const bundleEntries = bundle.entry as BundleEntry[];
+ for (let i = 0; i < 2; i++) {
+ const parameters = bundleEntries[i].resource as Parameters;
+ expect(parameters).toBeDefined();
+ expect(parameters.resourceType).toEqual('Parameters');
+ expect(parameters.parameter?.length).toEqual(2);
+ }
+
+ expectBundleToContainStatusEntry(bundle, connectedAgent, {
+ status: AgentConnectionState.CONNECTED,
+ version: '3.1.4',
+ lastUpdated: expect.any(String),
+ });
+
+ expectBundleToContainStatusEntry(bundle, disabledAgent, {
+ status: AgentConnectionState.DISCONNECTED,
+ version: '3.1.2',
+ lastUpdated: expect.any(String),
+ });
+ });
+
+ test('Get agent statuses for ACTIVE agents with name containing Medplum', async () => {
+ const res = await request(app)
+ .get('/fhir/R4/Agent/$bulk-status')
+ .query({ 'name:contains': 'Medplum', status: 'active' })
+ .set('Authorization', 'Bearer ' + accessToken);
+ expect(res.status).toBe(200);
+
+ const bundle = res.body as Bundle;
+ expect(bundle.resourceType).toBe('Bundle');
+ expect(bundle.entry).toHaveLength(1);
+
+ const bundleEntries = bundle.entry as BundleEntry[];
+ for (let i = 0; i < 1; i++) {
+ const parameters = bundleEntries[i].resource as Parameters;
+ expect(parameters).toBeDefined();
+ expect(parameters.resourceType).toEqual('Parameters');
+ expect(parameters.parameter?.length).toEqual(2);
+ }
+
+ expectBundleToContainStatusEntry(bundle, connectedAgent, {
+ status: AgentConnectionState.CONNECTED,
+ version: '3.1.4',
+ lastUpdated: expect.any(String),
+ });
+ });
+
+ test('Get agent statuses -- no matching agents', async () => {
+ const res = await request(app)
+ .get('/fhir/R4/Agent/$bulk-status')
+ .query({ name: 'INVALID_AGENT', status: 'active' })
+ .set('Authorization', 'Bearer ' + accessToken);
+ expect(res.status).toBe(400);
+
+ expect(res.body).toMatchObject({
+ resourceType: 'OperationOutcome',
+ issue: expect.arrayContaining([
+ expect.objectContaining({ severity: 'error', code: 'invalid' }),
+ ]),
+ });
+ });
+
+ test('Get agent statuses -- invalid AgentInfo from Redis', async () => {
+ await getRedis().set(
+ `medplum:agent:${agents[1].id as string}:info`,
+ JSON.stringify({
+ version: '3.1.4',
+ lastUpdated: new Date().toISOString(),
+ }),
+ 'EX',
+ 60
+ );
+
+ const res = await request(app)
+ .get('/fhir/R4/Agent/$bulk-status')
+ .query({ name: 'Test Agent 2' })
+ .set('Authorization', 'Bearer ' + accessToken);
+ expect(res.status).toBe(200);
+
+ const bundle = res.body as Bundle;
+ expect(bundle.resourceType).toBe('Bundle');
+ expect(bundle.entry).toHaveLength(1);
+
+ expectBundleToContainOutcomeError(bundle, agents[1], {
+ issue: [expect.objectContaining({ severity: 'error', code: 'exception' })],
+ });
+
+ await getRedis().set(
+ `medplum:agent:${agents[1].id as string}:info`,
+ JSON.stringify({
+ status: AgentConnectionState.UNKNOWN,
+ version: 'unknown',
+ lastUpdated: new Date().toISOString(),
+ } satisfies AgentInfo),
+ 'EX',
+ 60
+ );
+ });
+
+ test('Get agent statuses -- `_count` exceeding max page size', async () => {
+ const res = await request(app)
+ .get('/fhir/R4/Agent/$bulk-status')
+ .query({ 'name:contains': 'Medplum', _count: MAX_AGENTS_PER_PAGE + 1 })
+ .set('Authorization', 'Bearer ' + accessToken);
+ expect(res.status).toBe(400);
+
+ expect(res.body).toMatchObject({
+ resourceType: 'OperationOutcome',
+ issue: expect.arrayContaining([
+ expect.objectContaining({ severity: 'error', code: 'invalid' }),
+ ]),
+ });
+ });
+});
+
+function expectBundleToContainStatusEntry(bundle: Bundle, agent: Agent, info: AgentInfo): void {
+ const entries = bundle.entry as BundleEntry[];
+ expect(entries).toContainEqual({
+ resource: expect.objectContaining({
+ resourceType: 'Parameters',
+ parameter: expect.arrayContaining([
+ expect.objectContaining({
+ name: 'agent',
+ resource: expect.objectContaining(agent),
+ }),
+ expect.objectContaining({
+ name: 'result',
+ resource: expect.objectContaining({
+ resourceType: 'Parameters',
+ parameter: expect.arrayContaining([
+ expect.objectContaining({
+ name: 'status',
+ valueCode: info.status,
+ }),
+ expect.objectContaining({
+ name: 'version',
+ valueString: info.version,
+ }),
+ ...(info.lastUpdated !== undefined
+ ? [
+ expect.objectContaining({
+ name: 'lastUpdated',
+ valueInstant: info.lastUpdated,
+ }),
+ ]
+ : []),
+ ]),
+ }),
+ }),
+ ]),
+ }),
+ });
+}
+
+function expectBundleToContainOutcomeError(
+ bundle: Bundle,
+ agent: Agent,
+ outcome: Partial & { issue: OperationOutcomeIssue[] }
+): void {
+ const entries = bundle.entry as BundleEntry[];
+ expect(entries).toContainEqual({
+ resource: expect.objectContaining({
+ resourceType: 'Parameters',
+ parameter: expect.arrayContaining([
+ expect.objectContaining({
+ name: 'agent',
+ resource: expect.objectContaining(agent),
+ }),
+ expect.objectContaining({
+ name: 'result',
+ resource: expect.objectContaining>(outcome),
+ }),
+ ]),
+ }),
+ });
+}
diff --git a/packages/server/src/fhir/operations/agentbulkstatus.ts b/packages/server/src/fhir/operations/agentbulkstatus.ts
new file mode 100644
index 0000000000..23ab29a21d
--- /dev/null
+++ b/packages/server/src/fhir/operations/agentbulkstatus.ts
@@ -0,0 +1,82 @@
+import { allOk, badRequest, isOk, serverError } from '@medplum/core';
+import { FhirRequest, FhirResponse } from '@medplum/fhir-router';
+import { Agent, Bundle, BundleEntry, OperationDefinition, OperationOutcome, Parameters } from '@medplum/fhirtypes';
+import { getAuthenticatedContext } from '../../context';
+import { agentStatusHandler } from './agentstatus';
+import { getAgentsForRequest } from './agentutils';
+
+export const MAX_AGENTS_PER_PAGE = 100;
+
+export const operation: OperationDefinition = {
+ resourceType: 'OperationDefinition',
+ name: 'agent-bulk-status',
+ status: 'active',
+ kind: 'operation',
+ code: 'bulk-status',
+ experimental: true,
+ resource: ['Agent'],
+ system: false,
+ type: true,
+ instance: false,
+ parameter: [{ use: 'out', name: 'return', type: 'Bundle', min: 1, max: '1' }],
+};
+
+/**
+ * Handles HTTP requests for the Agent $status operation.
+ * First reads the agent and makes sure it is valid and the user has access to it.
+ * Then tries to get the agent status from Redis.
+ * Returns the agent status details as a Parameters resource.
+ *
+ * @param req - The FHIR request.
+ * @returns The FHIR response.
+ */
+export async function agentBulkStatusHandler(req: FhirRequest): Promise {
+ const { repo } = getAuthenticatedContext();
+
+ if (req.query._count && Number.parseInt(req.query._count, 10) > MAX_AGENTS_PER_PAGE) {
+ return [badRequest(`'_count' of ${req.query._count} is greater than max of ${MAX_AGENTS_PER_PAGE}`)];
+ }
+
+ const agents = await getAgentsForRequest(req, repo);
+ if (!agents?.length) {
+ return [badRequest('No agent(s) for given query')];
+ }
+
+ const promises = agents.map((agent) => agentStatusHandler({ ...req, params: { id: agent.id as string } }));
+ const results = await Promise.allSettled(promises);
+ const entries = [] as BundleEntry[];
+ for (let i = 0; i < results.length; i++) {
+ const result = results[i];
+ if (result.status === 'rejected') {
+ entries.push(makeResultWrapperEntry(serverError(result.reason as Error), agents[i]));
+ continue;
+ }
+ const [outcome, params] = result.value;
+ if (!isOk(outcome)) {
+ entries.push(makeResultWrapperEntry(outcome, agents[i]));
+ continue;
+ }
+ entries.push(makeResultWrapperEntry(params as Parameters, agents[i]));
+ }
+
+ return [
+ allOk,
+ {
+ resourceType: 'Bundle',
+ type: 'collection',
+ entry: entries,
+ } satisfies Bundle,
+ ];
+}
+
+function makeResultWrapperEntry(result: Parameters | OperationOutcome, agent: Agent): BundleEntry {
+ return {
+ resource: {
+ resourceType: 'Parameters',
+ parameter: [
+ { name: 'agent', resource: agent },
+ { name: 'result', resource: result },
+ ],
+ },
+ };
+}
diff --git a/packages/server/src/fhir/operations/agentpush.test.ts b/packages/server/src/fhir/operations/agentpush.test.ts
index 111c8a9b6a..9101abe448 100644
--- a/packages/server/src/fhir/operations/agentpush.test.ts
+++ b/packages/server/src/fhir/operations/agentpush.test.ts
@@ -1,11 +1,20 @@
-import { allOk, ContentType, getReferenceString } from '@medplum/core';
-import { Agent, Device } from '@medplum/fhirtypes';
-import { randomUUID } from 'crypto';
+import {
+ AgentTransmitRequest,
+ AgentTransmitResponse,
+ allOk,
+ ContentType,
+ getReferenceString,
+ sleep,
+} from '@medplum/core';
+import { Agent, Device, OperationOutcome } from '@medplum/fhirtypes';
import express from 'express';
+import { randomUUID } from 'node:crypto';
import request from 'supertest';
import { initApp, shutdownApp } from '../../app';
import { loadTestConfig } from '../../config';
+import { getRedis } from '../../redis';
import { initTestAuth } from '../../test.setup';
+import { AgentPushParameters } from './agentpush';
const app = express();
let accessToken: string;
@@ -248,4 +257,201 @@ describe('Agent Push', () => {
expect(res.status).toBe(400);
expect(res.body.issue[0].details.text).toEqual('Invalid wait timeout');
});
+
+ test('Ping -- Successful ping to IP', async () => {
+ const redis = getRedis();
+ const publishSpy = jest.spyOn(redis, 'publish');
+
+ let resolve!: (value: request.Response) => void | PromiseLike;
+ let reject!: (err: Error) => void;
+
+ const deferredResponse = new Promise((_resolve, _reject) => {
+ resolve = _resolve;
+ reject = _reject;
+ });
+
+ request(app)
+ .post(`/fhir/R4/Agent/${agent.id}/$push`)
+ .set('Content-Type', ContentType.JSON)
+ .set('Authorization', 'Bearer ' + accessToken)
+ .send({
+ contentType: ContentType.PING,
+ body: 'PING',
+ destination: '8.8.8.8',
+ waitForResponse: true,
+ } satisfies AgentPushParameters)
+ .then(resolve)
+ .catch(reject);
+
+ let shouldThrow = false;
+ const timer = setTimeout(() => {
+ shouldThrow = true;
+ }, 3500);
+
+ while (!publishSpy.mock.lastCall) {
+ if (shouldThrow) {
+ throw new Error('Timeout');
+ }
+ await sleep(50);
+ }
+ clearTimeout(timer);
+
+ const transmitRequestStr = publishSpy.mock.lastCall?.[1]?.toString() as string;
+ expect(transmitRequestStr).toBeDefined();
+ const transmitRequest = JSON.parse(transmitRequestStr) as AgentTransmitRequest;
+
+ await getRedis().publish(
+ transmitRequest.callback as string,
+ JSON.stringify({
+ ...transmitRequest,
+ type: 'agent:transmit:response',
+ statusCode: 200,
+ contentType: ContentType.TEXT,
+ body: `
+PING 8.8.8.8 (8.8.8.8): 56 data bytes
+64 bytes from 8.8.8.8: icmp_seq=0 ttl=115 time=10.316 ms
+
+--- 8.8.8.8 ping statistics ---
+1 packets transmitted, 1 packets received, 0.0% packet loss
+round-trip min/avg/max/stddev = 10.316/10.316/10.316/nan ms`,
+ } satisfies AgentTransmitResponse)
+ );
+
+ const res = await deferredResponse;
+ expect(res.status).toEqual(200);
+ expect(res.text).toEqual(expect.stringMatching(/ping statistics/i));
+
+ publishSpy.mockRestore();
+ });
+
+ test('Ping -- Successful ping to hostname', async () => {
+ const redis = getRedis();
+ const publishSpy = jest.spyOn(redis, 'publish');
+
+ let resolve!: (value: request.Response) => void | PromiseLike;
+ let reject!: (err: Error) => void;
+
+ const deferredResponse = new Promise((_resolve, _reject) => {
+ resolve = _resolve;
+ reject = _reject;
+ });
+
+ request(app)
+ .post(`/fhir/R4/Agent/${agent.id}/$push`)
+ .set('Content-Type', ContentType.JSON)
+ .set('Authorization', 'Bearer ' + accessToken)
+ .send({
+ contentType: ContentType.PING,
+ body: 'PING',
+ destination: 'localhost',
+ waitForResponse: true,
+ } satisfies AgentPushParameters)
+ .then(resolve)
+ .catch(reject);
+
+ let shouldThrow = false;
+ const timer = setTimeout(() => {
+ shouldThrow = true;
+ }, 3500);
+
+ while (!publishSpy.mock.lastCall) {
+ if (shouldThrow) {
+ throw new Error('Timeout');
+ }
+ await sleep(50);
+ }
+ clearTimeout(timer);
+
+ const transmitRequestStr = publishSpy.mock.lastCall?.[1]?.toString() as string;
+ expect(transmitRequestStr).toBeDefined();
+ const transmitRequest = JSON.parse(transmitRequestStr) as AgentTransmitRequest;
+
+ await getRedis().publish(
+ transmitRequest.callback as string,
+ JSON.stringify({
+ ...transmitRequest,
+ type: 'agent:transmit:response',
+ statusCode: 200,
+ contentType: ContentType.TEXT,
+ body: `
+PING localhost (127.0.0.1): 56 data bytes
+64 bytes from 127.0.0.1: icmp_seq=0 ttl=64 time=0.081 ms
+
+--- localhost ping statistics ---
+1 packets transmitted, 1 packets received, 0.0% packet loss
+round-trip min/avg/max/stddev = 0.081/0.081/0.081/nan ms`,
+ } satisfies AgentTransmitResponse)
+ );
+
+ const res = await deferredResponse;
+ expect(res.status).toEqual(200);
+ expect(res.text).toEqual(expect.stringMatching(/ping statistics/i));
+
+ publishSpy.mockRestore();
+ });
+
+ test('Ping -- Error', async () => {
+ const redis = getRedis();
+ const publishSpy = jest.spyOn(redis, 'publish');
+
+ let resolve!: (value: request.Response) => void | PromiseLike;
+ let reject!: (err: Error) => void;
+
+ const deferredResponse = new Promise((_resolve, _reject) => {
+ resolve = _resolve;
+ reject = _reject;
+ });
+
+ request(app)
+ .post(`/fhir/R4/Agent/${agent.id}/$push`)
+ .set('Content-Type', ContentType.JSON)
+ .set('Authorization', 'Bearer ' + accessToken)
+ .send({
+ contentType: ContentType.PING,
+ body: 'PING',
+ destination: '8.8.8.8',
+ waitForResponse: true,
+ } satisfies AgentPushParameters)
+ .then(resolve)
+ .catch(reject);
+
+ let shouldThrow = false;
+ const timer = setTimeout(() => {
+ shouldThrow = true;
+ }, 3500);
+
+ while (!publishSpy.mock.lastCall) {
+ if (shouldThrow) {
+ throw new Error('Timeout');
+ }
+ await sleep(50);
+ }
+ clearTimeout(timer);
+
+ const transmitRequestStr = publishSpy.mock.lastCall?.[1]?.toString() as string;
+ expect(transmitRequestStr).toBeDefined();
+ const transmitRequest = JSON.parse(transmitRequestStr) as AgentTransmitRequest;
+
+ await getRedis().publish(
+ transmitRequest.callback as string,
+ JSON.stringify({
+ ...transmitRequest,
+ type: 'agent:transmit:response',
+ statusCode: 500,
+ contentType: ContentType.TEXT,
+ body: 'Error: Unable to ping "8.8.8.8"',
+ } satisfies AgentTransmitResponse)
+ );
+
+ const res = await deferredResponse;
+ expect(res.status).toEqual(500);
+
+ const body = res.body as OperationOutcome;
+ expect(body).toBeDefined();
+ expect(body.issue[0].severity).toEqual('error');
+ expect(body.issue[0]?.details?.text).toEqual(expect.stringMatching(/internal server error/i));
+ expect(body.issue[0]?.diagnostics).toEqual(expect.stringMatching(/unable to ping/i));
+
+ publishSpy.mockRestore();
+ });
});
diff --git a/packages/server/src/fhir/operations/agentpush.ts b/packages/server/src/fhir/operations/agentpush.ts
index 9f9c340355..4bdf0c7113 100644
--- a/packages/server/src/fhir/operations/agentpush.ts
+++ b/packages/server/src/fhir/operations/agentpush.ts
@@ -1,10 +1,18 @@
-import { AgentTransmitRequest, allOk, badRequest, BaseAgentRequestMessage, getReferenceString } from '@medplum/core';
+import {
+ AgentTransmitRequest,
+ AgentTransmitResponse,
+ allOk,
+ badRequest,
+ BaseAgentRequestMessage,
+ getReferenceString,
+ serverError,
+} from '@medplum/core';
import { Agent } from '@medplum/fhirtypes';
import { Request, Response } from 'express';
import { randomUUID } from 'node:crypto';
import { asyncWrap } from '../../async';
import { getAuthenticatedContext } from '../../context';
-import { getRedis } from '../../redis';
+import { getRedis, getRedisSubscriber } from '../../redis';
import { sendOutcome } from '../outcomes';
import { getAgentForRequest, getDevice } from './agentutils';
import { parseParameters } from './utils/parameters';
@@ -58,7 +66,7 @@ export const agentPushHandler = asyncWrap(async (req: Request, res: Response) =>
return;
}
- const device = await getDevice(repo, params.destination);
+ const device = await getDevice(repo, params);
if (!device) {
sendOutcome(res, badRequest('Destination device not found'));
return;
@@ -86,11 +94,18 @@ export const agentPushHandler = asyncWrap(async (req: Request, res: Response) =>
// Otherwise, open a new redis connection in "subscribe" state
message.callback = getReferenceString(agent) + '-' + randomUUID();
- const redisSubscriber = getRedis().duplicate();
+ const redisSubscriber = getRedisSubscriber();
await redisSubscriber.subscribe(message.callback);
redisSubscriber.on('message', (_channel: string, message: string) => {
- const response = JSON.parse(message);
- res.status(200).type(response.contentType).send(response.body);
+ const response = JSON.parse(message) as AgentTransmitResponse;
+ if (response.statusCode && response.statusCode >= 400) {
+ sendOutcome(res, serverError(new Error(response.body)));
+ } else {
+ res
+ .status(response.statusCode ?? 200)
+ .type(response.contentType)
+ .send(response.body);
+ }
cleanup();
});
diff --git a/packages/server/src/fhir/operations/agentstatus.test.ts b/packages/server/src/fhir/operations/agentstatus.test.ts
index db592eec22..3a4242efba 100644
--- a/packages/server/src/fhir/operations/agentstatus.test.ts
+++ b/packages/server/src/fhir/operations/agentstatus.test.ts
@@ -3,6 +3,7 @@ import { Agent, Parameters } from '@medplum/fhirtypes';
import { randomUUID } from 'crypto';
import express from 'express';
import request from 'supertest';
+import { AgentConnectionState } from '../../agent/utils';
import { initApp, shutdownApp } from '../../app';
import { loadTestConfig } from '../../config';
import { getRedis } from '../../redis';
@@ -44,14 +45,16 @@ describe('Agent Status', () => {
const parameters1 = res1.body as Parameters;
expect(parameters1.resourceType).toBe('Parameters');
- expect(parameters1.parameter).toHaveLength(1);
- expect(parameters1.parameter?.find((p) => p.name === 'status')?.valueCode).toBe('unknown');
+ expect(parameters1.parameter).toHaveLength(2);
+ expect(parameters1.parameter?.find((p) => p.name === 'status')?.valueCode).toBe(AgentConnectionState.UNKNOWN);
+ expect(parameters1.parameter?.find((p) => p.name === 'version')?.valueString).toBe('unknown');
// Emulate a connection
await getRedis().set(
- `medplum:agent:${agent.id}:status`,
+ `medplum:agent:${agent.id}:info`,
JSON.stringify({
- status: 'connected',
+ status: AgentConnectionState.CONNECTED,
+ version: '3.1.4',
lastUpdated: new Date().toISOString(),
}),
'EX',
@@ -65,8 +68,9 @@ describe('Agent Status', () => {
const parameters2 = res2.body as Parameters;
expect(parameters2.resourceType).toBe('Parameters');
- expect(parameters2.parameter).toHaveLength(2);
- expect(parameters2.parameter?.find((p) => p.name === 'status')?.valueCode).toBe('connected');
+ expect(parameters2.parameter).toHaveLength(3);
+ expect(parameters2.parameter?.find((p) => p.name === 'status')?.valueCode).toBe(AgentConnectionState.CONNECTED);
+ expect(parameters2.parameter?.find((p) => p.name === 'version')?.valueString).toBe('3.1.4');
expect(parameters2.parameter?.find((p) => p.name === 'lastUpdated')?.valueInstant).toBeTruthy();
});
});
diff --git a/packages/server/src/fhir/operations/agentstatus.ts b/packages/server/src/fhir/operations/agentstatus.ts
index e8e1ba1e1b..b19fdd3974 100644
--- a/packages/server/src/fhir/operations/agentstatus.ts
+++ b/packages/server/src/fhir/operations/agentstatus.ts
@@ -1,16 +1,12 @@
import { allOk, badRequest } from '@medplum/core';
import { FhirRequest, FhirResponse } from '@medplum/fhir-router';
import { OperationDefinition } from '@medplum/fhirtypes';
+import { AgentConnectionState, AgentInfo } from '../../agent/utils';
import { getAuthenticatedContext } from '../../context';
import { getRedis } from '../../redis';
import { getAgentForRequest } from './agentutils';
import { buildOutputParameters } from './utils/parameters';
-interface AgentStatusOutput {
- status: string;
- lastUpdated?: string;
-}
-
const operation: OperationDefinition = {
resourceType: 'OperationDefinition',
name: 'agent-status',
@@ -24,6 +20,7 @@ const operation: OperationDefinition = {
instance: false,
parameter: [
{ use: 'out', name: 'status', type: 'code', min: 1, max: '1' },
+ { use: 'out', name: 'version', type: 'string', min: 1, max: '1' },
{ use: 'out', name: 'lastUpdated', type: 'instant', min: 0, max: '1' },
],
};
@@ -46,16 +43,16 @@ export async function agentStatusHandler(req: FhirRequest): Promise {
+/**
+ * Returns the Agents for a request.
+ *
+ * @param req - The HTTP request.
+ * @param repo - The repository.
+ * @returns The agent, or undefined if not found.
+ */
+export async function getAgentsForRequest(req: FhirRequest, repo: Repository): Promise {
+ return repo.searchResources(parseSearchRequest('Agent', req.query));
+}
+
+export async function getDevice(repo: Repository, params: AgentPushParameters): Promise {
+ const { destination, contentType } = params;
if (destination.startsWith('Device/')) {
try {
return await repo.readReference({ reference: destination });
@@ -51,7 +64,7 @@ export async function getDevice(repo: Repository, destination: string): Promise<
if (destination.startsWith('Device?')) {
return repo.searchOne(parseSearchRequest(destination));
}
- if (isIPv4(destination)) {
+ if (contentType === ContentType.PING && (isIPv4(destination) || isValidHostname(destination))) {
return { resourceType: 'Device', url: destination };
}
return undefined;
diff --git a/packages/server/src/fhir/operations/deploy.test.ts b/packages/server/src/fhir/operations/deploy.test.ts
index ff4d4d0090..d7a1f49642 100644
--- a/packages/server/src/fhir/operations/deploy.test.ts
+++ b/packages/server/src/fhir/operations/deploy.test.ts
@@ -1,16 +1,5 @@
-import {
- CreateFunctionCommand,
- GetFunctionCommand,
- GetFunctionConfigurationCommand,
- LambdaClient,
- ListLayerVersionsCommand,
- UpdateFunctionCodeCommand,
- UpdateFunctionConfigurationCommand,
-} from '@aws-sdk/client-lambda';
import { ContentType } from '@medplum/core';
import { Bot } from '@medplum/fhirtypes';
-import { AwsClientStub, mockClient } from 'aws-sdk-client-mock';
-import 'aws-sdk-client-mock-jest';
import { randomUUID } from 'crypto';
import express from 'express';
import request from 'supertest';
@@ -21,7 +10,6 @@ import { initTestAuth, withTestContext } from '../../test.setup';
const app = express();
let accessToken: string;
-let mockLambdaClient: AwsClientStub;
describe('Deploy', () => {
beforeAll(async () => {
@@ -34,142 +22,6 @@ describe('Deploy', () => {
await shutdownApp();
});
- beforeEach(() => {
- let created = false;
-
- mockLambdaClient = mockClient(LambdaClient);
-
- mockLambdaClient.on(CreateFunctionCommand).callsFake(({ FunctionName }) => {
- created = true;
-
- return {
- Configuration: {
- FunctionName,
- },
- };
- });
-
- mockLambdaClient.on(GetFunctionCommand).callsFake(({ FunctionName }) => {
- if (created) {
- return {
- Configuration: {
- FunctionName,
- },
- };
- }
-
- return {
- Configuration: {},
- };
- });
-
- mockLambdaClient.on(GetFunctionConfigurationCommand).callsFake(({ FunctionName }) => {
- return {
- FunctionName,
- Runtime: 'nodejs18.x',
- Handler: 'index.handler',
- State: 'Active',
- Layers: [
- {
- Arn: 'arn:aws:lambda:us-east-1:123456789012:layer:test-layer:1',
- },
- ],
- };
- });
-
- mockLambdaClient.on(ListLayerVersionsCommand).resolves({
- LayerVersions: [
- {
- LayerVersionArn: 'arn:aws:lambda:us-east-1:123456789012:layer:test-layer:1',
- },
- ],
- });
-
- mockLambdaClient.on(UpdateFunctionCodeCommand).callsFake(({ FunctionName }) => ({
- Configuration: {
- FunctionName,
- },
- }));
- });
-
- afterEach(() => {
- mockLambdaClient.restore();
- });
-
- test('Happy path', async () => {
- // Step 1: Create a bot
- const res1 = await request(app)
- .post(`/fhir/R4/Bot`)
- .set('Content-Type', ContentType.FHIR_JSON)
- .set('Authorization', 'Bearer ' + accessToken)
- .send({
- resourceType: 'Bot',
- name: 'Test Bot',
- runtimeVersion: 'awslambda',
- code: `
- export async function handler() {
- console.log('input', input);
- return input;
- }
- `,
- });
- expect(res1.status).toBe(201);
-
- const bot = res1.body as Bot;
- const name = `medplum-bot-lambda-${bot.id}`;
-
- // Step 2: Deploy the bot
- const res2 = await request(app)
- .post(`/fhir/R4/Bot/${bot.id}/$deploy`)
- .set('Content-Type', ContentType.FHIR_JSON)
- .set('Authorization', 'Bearer ' + accessToken)
- .send({
- code: `
- export async function handler() {
- console.log('input', input);
- return input;
- }
- `,
- });
- expect(res2.status).toBe(200);
-
- expect(mockLambdaClient).toHaveReceivedCommandTimes(GetFunctionCommand, 1);
- expect(mockLambdaClient).toHaveReceivedCommandTimes(ListLayerVersionsCommand, 1);
- expect(mockLambdaClient).toHaveReceivedCommandTimes(CreateFunctionCommand, 1);
- expect(mockLambdaClient).toHaveReceivedCommandWith(GetFunctionCommand, {
- FunctionName: name,
- });
- expect(mockLambdaClient).toHaveReceivedCommandWith(CreateFunctionCommand, {
- FunctionName: name,
- });
- mockLambdaClient.resetHistory();
-
- // Step 3: Deploy again to trigger the update path
- const res3 = await request(app)
- .post(`/fhir/R4/Bot/${bot.id}/$deploy`)
- .set('Content-Type', ContentType.FHIR_JSON)
- .set('Authorization', 'Bearer ' + accessToken)
- .send({
- code: `
- export async function handler() {
- console.log('input', input);
- return input;
- }
- `,
- filename: 'updated.js',
- });
- expect(res3.status).toBe(200);
-
- expect(mockLambdaClient).toHaveReceivedCommandTimes(GetFunctionCommand, 1);
- expect(mockLambdaClient).toHaveReceivedCommandTimes(ListLayerVersionsCommand, 1);
- expect(mockLambdaClient).toHaveReceivedCommandTimes(GetFunctionConfigurationCommand, 1);
- expect(mockLambdaClient).toHaveReceivedCommandTimes(UpdateFunctionConfigurationCommand, 0);
- expect(mockLambdaClient).toHaveReceivedCommandTimes(UpdateFunctionCodeCommand, 1);
- expect(mockLambdaClient).toHaveReceivedCommandWith(GetFunctionCommand, {
- FunctionName: name,
- });
- });
-
test('Deploy bot with missing code', async () => {
// Step 1: Create a bot
const res1 = await request(app)
@@ -201,91 +53,6 @@ describe('Deploy', () => {
expect(res2.body.issue[0].details.text).toEqual('Missing code');
});
- test('Deploy bot with lambda layer update', async () => {
- // When deploying a bot, we check if we need to update the bot configuration.
- // This test verifies that we correctly update the bot configuration when the lambda layer changes.
- // Step 1: Create a bot
- const res1 = await request(app)
- .post(`/fhir/R4/Bot`)
- .set('Content-Type', ContentType.FHIR_JSON)
- .set('Authorization', 'Bearer ' + accessToken)
- .send({
- resourceType: 'Bot',
- name: 'Test Bot',
- runtimeVersion: 'awslambda',
- code: `
- export async function handler() {
- console.log('input', input);
- return input;
- }
- `,
- });
- expect(res1.status).toBe(201);
-
- const bot = res1.body as Bot;
- const name = `medplum-bot-lambda-${bot.id}`;
-
- // Step 2: Deploy the bot
- const res2 = await request(app)
- .post(`/fhir/R4/Bot/${bot.id}/$deploy`)
- .set('Content-Type', ContentType.FHIR_JSON)
- .set('Authorization', 'Bearer ' + accessToken)
- .send({
- code: `
- export async function handler() {
- console.log('input', input);
- return input;
- }
- `,
- });
- expect(res2.status).toBe(200);
-
- expect(mockLambdaClient).toHaveReceivedCommandTimes(GetFunctionCommand, 1);
- expect(mockLambdaClient).toHaveReceivedCommandTimes(ListLayerVersionsCommand, 1);
- expect(mockLambdaClient).toHaveReceivedCommandTimes(CreateFunctionCommand, 1);
- expect(mockLambdaClient).toHaveReceivedCommandWith(GetFunctionCommand, {
- FunctionName: name,
- });
- expect(mockLambdaClient).toHaveReceivedCommandWith(CreateFunctionCommand, {
- FunctionName: name,
- });
- mockLambdaClient.resetHistory();
-
- // Step 3: Simulate releasing a new version of the lambda layer
- mockLambdaClient.on(ListLayerVersionsCommand).resolves({
- LayerVersions: [
- {
- LayerVersionArn: 'new-layer-version-arn',
- },
- ],
- });
-
- // Step 4: Deploy again to trigger the update path
- const res3 = await request(app)
- .post(`/fhir/R4/Bot/${bot.id}/$deploy`)
- .set('Content-Type', ContentType.FHIR_JSON)
- .set('Authorization', 'Bearer ' + accessToken)
- .send({
- code: `
- export async function handler() {
- console.log('input', input);
- return input;
- }
- `,
- filename: 'updated.js',
- });
- expect(res3.status).toBe(200);
-
- expect(mockLambdaClient).toHaveReceivedCommandTimes(GetFunctionCommand, 1);
- expect(mockLambdaClient).toHaveReceivedCommandTimes(ListLayerVersionsCommand, 1);
- expect(mockLambdaClient).toHaveReceivedCommandTimes(GetFunctionConfigurationCommand, 2);
- expect(mockLambdaClient).toHaveReceivedCommandTimes(UpdateFunctionConfigurationCommand, 1);
- expect(mockLambdaClient).toHaveReceivedCommandTimes(UpdateFunctionCodeCommand, 1);
- expect(mockLambdaClient).toHaveReceivedCommandWith(GetFunctionCommand, {
- FunctionName: name,
- });
- });
-
test('Bots not enabled', async () => {
// First, Alice creates a project
const { project, accessToken } = await withTestContext(() =>
diff --git a/packages/server/src/fhir/operations/deploy.ts b/packages/server/src/fhir/operations/deploy.ts
index 6b8f447587..b065908cd9 100644
--- a/packages/server/src/fhir/operations/deploy.ts
+++ b/packages/server/src/fhir/operations/deploy.ts
@@ -1,104 +1,13 @@
-import {
- CreateFunctionCommand,
- GetFunctionCommand,
- GetFunctionConfigurationCommand,
- GetFunctionConfigurationCommandOutput,
- LambdaClient,
- ListLayerVersionsCommand,
- PackageType,
- UpdateFunctionCodeCommand,
- UpdateFunctionConfigurationCommand,
-} from '@aws-sdk/client-lambda';
-import { ContentType, allOk, badRequest, getReferenceString, normalizeOperationOutcome, sleep } from '@medplum/core';
+import { ContentType, allOk, badRequest, getReferenceString, normalizeOperationOutcome } from '@medplum/core';
import { FhirRequest, FhirResponse } from '@medplum/fhir-router';
import { Binary, Bot } from '@medplum/fhirtypes';
-import { ConfiguredRetryStrategy } from '@smithy/util-retry';
-import JSZip from 'jszip';
import { Readable } from 'stream';
-import { getConfig } from '../../config';
-import { getAuthenticatedContext, getRequestContext } from '../../context';
+import { deployLambda } from '../../cloud/aws/deploy';
+import { getAuthenticatedContext } from '../../context';
import { getSystemRepo } from '../repo';
import { getBinaryStorage } from '../storage';
import { isBotEnabled } from './execute';
-const LAMBDA_RUNTIME = 'nodejs18.x';
-
-const LAMBDA_HANDLER = 'index.handler';
-
-const LAMBDA_MEMORY = 1024;
-
-const WRAPPER_CODE = `const { ContentType, Hl7Message, MedplumClient } = require("@medplum/core");
-const fetch = require("node-fetch");
-const PdfPrinter = require("pdfmake");
-const userCode = require("./user.js");
-
-exports.handler = async (event, context) => {
- const { bot, baseUrl, accessToken, contentType, secrets, traceId } = event;
- const medplum = new MedplumClient({
- baseUrl,
- fetch: function(url, options = {}) {
- options.headers ||= {};
- options.headers['X-Trace-Id'] = traceId;
- options.headers['traceparent'] = traceId;
- return fetch(url, options);
- },
- createPdf,
- });
- medplum.setAccessToken(accessToken);
- try {
- let input = event.input;
- if (contentType === ContentType.HL7_V2 && input) {
- input = Hl7Message.parse(input);
- }
- let result = await userCode.handler(medplum, { bot, input, contentType, secrets, traceId });
- if (contentType === ContentType.HL7_V2 && result) {
- result = result.toString();
- }
- return result;
- } catch (err) {
- if (err instanceof Error) {
- console.log("Unhandled error: " + err.message + "\\n" + err.stack);
- } else if (typeof err === "object") {
- console.log("Unhandled error: " + JSON.stringify(err, undefined, 2));
- } else {
- console.log("Unhandled error: " + err);
- }
- throw err;
- }
-};
-
-function createPdf(docDefinition, tableLayouts, fonts) {
- if (!fonts) {
- fonts = {
- Helvetica: {
- normal: 'Helvetica',
- bold: 'Helvetica-Bold',
- italics: 'Helvetica-Oblique',
- bolditalics: 'Helvetica-BoldOblique',
- },
- Roboto: {
- normal: '/opt/fonts/Roboto/Roboto-Regular.ttf',
- bold: '/opt/fonts/Roboto/Roboto-Medium.ttf',
- italics: '/opt/fonts/Roboto/Roboto-Italic.ttf',
- bolditalics: '/opt/fonts/Roboto/Roboto-MediumItalic.ttf'
- },
- Avenir: {
- normal: '/opt/fonts/Avenir/Avenir.ttf'
- }
- };
- }
- return new Promise((resolve, reject) => {
- const printer = new PdfPrinter(fonts);
- const pdfDoc = printer.createPdfKitDocument(docDefinition, { tableLayouts });
- const chunks = [];
- pdfDoc.on('data', (chunk) => chunks.push(chunk));
- pdfDoc.on('end', () => resolve(Buffer.concat(chunks)));
- pdfDoc.on('error', reject);
- pdfDoc.end();
- });
-}
-`;
-
export async function deployHandler(req: FhirRequest): Promise {
const ctx = getAuthenticatedContext();
const { id } = req.params;
@@ -151,168 +60,3 @@ export async function deployHandler(req: FhirRequest): Promise {
return [normalizeOperationOutcome(err)];
}
}
-
-async function deployLambda(bot: Bot, code: string): Promise {
- const ctx = getRequestContext();
-
- // Create a new AWS Lambda client
- // Use a custom retry strategy to avoid throttling errors
- // This is especially important when updating lambdas which also
- // involve upgrading the layer version.
- const client = new LambdaClient({
- region: getConfig().awsRegion,
- retryStrategy: new ConfiguredRetryStrategy(
- 5, // max attempts
- (attempt: number) => 500 * 2 ** attempt // Exponential backoff
- ),
- });
-
- const name = `medplum-bot-lambda-${bot.id}`;
- ctx.logger.info('Deploying lambda function for bot', { name });
- const zipFile = await createZipFile(code);
- ctx.logger.debug('Lambda function zip size', { bytes: zipFile.byteLength });
-
- const exists = await lambdaExists(client, name);
- if (!exists) {
- await createLambda(client, name, zipFile);
- } else {
- await updateLambda(client, name, zipFile);
- }
-}
-
-async function createZipFile(code: string): Promise {
- const zip = new JSZip();
- zip.file('user.js', code);
- zip.file('index.js', WRAPPER_CODE);
- return zip.generateAsync({ type: 'uint8array' });
-}
-
-/**
- * Returns true if the AWS Lambda exists for the bot name.
- * @param client - The AWS Lambda client.
- * @param name - The bot name.
- * @returns True if the bot exists.
- */
-async function lambdaExists(client: LambdaClient, name: string): Promise {
- try {
- const command = new GetFunctionCommand({ FunctionName: name });
- const response = await client.send(command);
- return response.Configuration?.FunctionName === name;
- } catch (err) {
- return false;
- }
-}
-
-/**
- * Creates a new AWS Lambda for the bot name.
- * @param client - The AWS Lambda client.
- * @param name - The bot name.
- * @param zipFile - The zip file with the bot code.
- */
-async function createLambda(client: LambdaClient, name: string, zipFile: Uint8Array): Promise {
- const layerVersion = await getLayerVersion(client);
-
- await client.send(
- new CreateFunctionCommand({
- FunctionName: name,
- Role: getConfig().botLambdaRoleArn,
- Runtime: LAMBDA_RUNTIME,
- Handler: LAMBDA_HANDLER,
- MemorySize: LAMBDA_MEMORY,
- PackageType: PackageType.Zip,
- Layers: [layerVersion],
- Code: {
- ZipFile: zipFile,
- },
- Publish: true,
- Timeout: 10, // seconds
- })
- );
-}
-
-/**
- * Updates an existing AWS Lambda for the bot name.
- * @param client - The AWS Lambda client.
- * @param name - The bot name.
- * @param zipFile - The zip file with the bot code.
- */
-async function updateLambda(client: LambdaClient, name: string, zipFile: Uint8Array): Promise {
- // First, make sure the lambda configuration is up to date
- await updateLambdaConfig(client, name);
-
- // Then update the code
- await client.send(
- new UpdateFunctionCodeCommand({
- FunctionName: name,
- ZipFile: zipFile,
- Publish: true,
- })
- );
-}
-
-/**
- * Updates the lambda configuration.
- * @param client - The AWS Lambda client.
- * @param name - The lambda name.
- */
-async function updateLambdaConfig(client: LambdaClient, name: string): Promise {
- const layerVersion = await getLayerVersion(client);
- const functionConfig = await getLambdaConfig(client, name);
- if (
- functionConfig.Runtime === LAMBDA_RUNTIME &&
- functionConfig.Handler === LAMBDA_HANDLER &&
- functionConfig.Layers?.[0].Arn === layerVersion
- ) {
- // Everything is up-to-date
- return;
- }
-
- // Need to update
- await client.send(
- new UpdateFunctionConfigurationCommand({
- FunctionName: name,
- Role: getConfig().botLambdaRoleArn,
- Runtime: LAMBDA_RUNTIME,
- Handler: LAMBDA_HANDLER,
- Layers: [layerVersion],
- })
- );
-
- // Wait for the update to complete before returning
- // Wait up to 5 seconds
- // See: https://github.com/aws/aws-toolkit-visual-studio/issues/197
- // See: https://aws.amazon.com/blogs/compute/coming-soon-expansion-of-aws-lambda-states-to-all-functions/
- for (let i = 0; i < 5; i++) {
- const config = await getLambdaConfig(client, name);
- // Valid Values: Pending | Active | Inactive | Failed
- // See: https://docs.aws.amazon.com/lambda/latest/dg/API_GetFunctionConfiguration.html
- if (config.State === 'Active') {
- return;
- }
- await sleep(1000);
- }
-}
-
-async function getLambdaConfig(client: LambdaClient, name: string): Promise {
- return client.send(
- new GetFunctionConfigurationCommand({
- FunctionName: name,
- })
- );
-}
-
-/**
- * Returns the latest layer version for the Medplum bot layer.
- * The first result is the latest version.
- * See: https://stackoverflow.com/a/55752188
- * @param client - The AWS Lambda client.
- * @returns The most recent layer version ARN.
- */
-async function getLayerVersion(client: LambdaClient): Promise {
- const command = new ListLayerVersionsCommand({
- LayerName: getConfig().botLambdaLayerName,
- MaxItems: 1,
- });
- const response = await client.send(command);
- return response.LayerVersions?.[0].LayerVersionArn as string;
-}
diff --git a/packages/server/src/fhir/operations/execute.test.ts b/packages/server/src/fhir/operations/execute.test.ts
index db562824ae..977128cf7c 100644
--- a/packages/server/src/fhir/operations/execute.test.ts
+++ b/packages/server/src/fhir/operations/execute.test.ts
@@ -1,7 +1,5 @@
-import { InvokeCommand, LambdaClient, ListLayerVersionsCommand } from '@aws-sdk/client-lambda';
import { ContentType } from '@medplum/core';
import { Bot } from '@medplum/fhirtypes';
-import { AwsClientStub, mockClient } from 'aws-sdk-client-mock';
import { randomUUID } from 'crypto';
import express from 'express';
import request from 'supertest';
@@ -10,49 +8,19 @@ import { registerNew } from '../../auth/register';
import { getConfig, loadTestConfig } from '../../config';
import { initTestAuth, withTestContext } from '../../test.setup';
import { getBinaryStorage } from '../storage';
-import { getLambdaFunctionName } from './execute';
const app = express();
let accessToken: string;
let bot: Bot;
describe('Execute', () => {
- let mockLambdaClient: AwsClientStub;
-
- beforeEach(() => {
- mockLambdaClient = mockClient(LambdaClient);
-
- mockLambdaClient.on(ListLayerVersionsCommand).resolves({
- LayerVersions: [
- {
- LayerVersionArn: 'xyz',
- },
- ],
- });
-
- mockLambdaClient.on(InvokeCommand).callsFake(({ Payload }) => {
- const decoder = new TextDecoder();
- const event = JSON.parse(decoder.decode(Payload));
- const output = JSON.stringify(event.input);
- const encoder = new TextEncoder();
-
- return {
- LogResult: `U1RBUlQgUmVxdWVzdElkOiAxNDZmY2ZjZi1jMzJiLTQzZjUtODJhNi1lZTBmMzEzMmQ4NzMgVmVyc2lvbjogJExBVEVTVAoyMDIyLTA1LTMwVDE2OjEyOjIyLjY4NVoJMTQ2ZmNmY2YtYzMyYi00M2Y1LTgyYTYtZWUwZjMxMzJkODczCUlORk8gdGVzdApFTkQgUmVxdWVzdElkOiAxNDZmY2ZjZi1jMzJiLTQzZjUtODJhNi1lZTBmMzEzMmQ4NzMKUkVQT1JUIFJlcXVlc3RJZDogMTQ2ZmNmY2YtYzMyYi00M2Y1LTgyYTYtZWUwZjMxMzJkODcz`,
- Payload: encoder.encode(output),
- };
- });
- });
-
- afterEach(() => {
- mockLambdaClient.restore();
- });
-
beforeAll(async () => {
const config = await loadTestConfig();
+ config.vmContextBotsEnabled = true;
await initApp(app, config);
accessToken = await initTestAuth();
- const res = await request(app)
+ const res1 = await request(app)
.post(`/fhir/R4/Bot`)
.set('Content-Type', ContentType.FHIR_JSON)
.set('Authorization', 'Bearer ' + accessToken)
@@ -60,7 +28,7 @@ describe('Execute', () => {
resourceType: 'Bot',
identifier: [{ system: 'https://example.com/bot', value: randomUUID() }],
name: 'Test Bot',
- runtimeVersion: 'awslambda',
+ runtimeVersion: 'vmcontext',
code: `
export async function handler(medplum, event) {
console.log('input', event.input);
@@ -68,8 +36,22 @@ describe('Execute', () => {
}
`,
});
- expect(res.status).toBe(201);
- bot = res.body as Bot;
+ expect(res1.status).toBe(201);
+ bot = res1.body as Bot;
+
+ const res2 = await request(app)
+ .post(`/fhir/R4/Bot/${bot.id}/$deploy`)
+ .set('Content-Type', ContentType.FHIR_JSON)
+ .set('Authorization', 'Bearer ' + accessToken)
+ .send({
+ code: `
+ exports.handler = async function (medplum, event) {
+ console.log('input', event.input);
+ return event.input;
+ };
+ `,
+ });
+ expect(res2.status).toBe(200);
});
afterAll(async () => {
@@ -237,64 +219,6 @@ describe('Execute', () => {
expect(res3.body.issue[0].details.text).toEqual('Bots not enabled');
});
- test('Get function name', async () => {
- const config = getConfig();
- const normalBot: Bot = { resourceType: 'Bot', id: '123' };
- const customBot: Bot = {
- resourceType: 'Bot',
- id: '456',
- identifier: [{ system: 'https://medplum.com/bot-external-function-id', value: 'custom' }],
- };
-
- expect(getLambdaFunctionName(normalBot)).toEqual('medplum-bot-lambda-123');
- expect(getLambdaFunctionName(customBot)).toEqual('medplum-bot-lambda-456');
-
- // Temporarily enable custom bot support
- config.botCustomFunctionsEnabled = true;
- expect(getLambdaFunctionName(normalBot)).toEqual('medplum-bot-lambda-123');
- expect(getLambdaFunctionName(customBot)).toEqual('custom');
- config.botCustomFunctionsEnabled = false;
- });
-
- test('Execute by identifier', async () => {
- const res = await request(app)
- .post(`/fhir/R4/Bot/$execute?identifier=${bot.identifier?.[0]?.system}|${bot.identifier?.[0]?.value}`)
- .set('Content-Type', ContentType.TEXT)
- .set('Authorization', 'Bearer ' + accessToken)
- .send('input');
- expect(res.status).toBe(200);
- expect(res.headers['content-type']).toBe('text/plain; charset=utf-8');
- expect(res.text).toEqual('input');
- });
-
- test('Missing parameters', async () => {
- const res = await request(app)
- .post(`/fhir/R4/Bot/$execute`)
- .set('Content-Type', ContentType.TEXT)
- .set('Authorization', 'Bearer ' + accessToken)
- .send('input');
- expect(res.status).toBe(400);
- expect(res.body.issue[0].details.text).toEqual('Must specify bot ID or identifier.');
- });
-
- test('GET request with query params', async () => {
- const res = await request(app)
- .get(`/fhir/R4/Bot/${bot.id}/$execute?foo=bar`)
- .set('Authorization', 'Bearer ' + accessToken);
- expect(res.status).toBe(200);
- expect(res.body.foo).toBe('bar');
- });
-
- test('POST request with extra path', async () => {
- const res = await request(app)
- .post(`/fhir/R4/Bot/${bot.id}/$execute/RequestGroup`)
- .set('Authorization', 'Bearer ' + accessToken)
- .set('Content-Type', ContentType.FHIR_JSON)
- .send({ foo: 'bar' });
- expect(res.status).toBe(200);
- expect(res.body.foo).toBe('bar');
- });
-
test('VM context bot success', async () => {
// Temporarily enable VM context bots
getConfig().vmContextBotsEnabled = true;
diff --git a/packages/server/src/fhir/operations/execute.ts b/packages/server/src/fhir/operations/execute.ts
index cbe9069ac7..f7f29aa652 100644
--- a/packages/server/src/fhir/operations/execute.ts
+++ b/packages/server/src/fhir/operations/execute.ts
@@ -1,4 +1,3 @@
-import { InvokeCommand, LambdaClient } from '@aws-sdk/client-lambda';
import {
ContentType,
Hl7Message,
@@ -7,7 +6,6 @@ import {
allOk,
badRequest,
createReference,
- getIdentifier,
normalizeErrorString,
resolveId,
} from '@medplum/core';
@@ -21,7 +19,7 @@ import {
Organization,
Project,
ProjectMembership,
- ProjectSecret,
+ ProjectSetting,
Reference,
Subscription,
} from '@medplum/fhirtypes';
@@ -30,8 +28,8 @@ import fetch from 'node-fetch';
import { randomUUID } from 'node:crypto';
import { Readable } from 'node:stream';
import vm from 'node:vm';
-import { TextDecoder, TextEncoder } from 'util';
import { asyncWrap } from '../../async';
+import { runInLambda } from '../../cloud/aws/execute';
import { getConfig } from '../../config';
import { buildTracingExtension, getAuthenticatedContext, getLogger } from '../../context';
import { generateAccessToken } from '../../oauth/keys';
@@ -59,6 +57,11 @@ export interface BotExecutionRequest {
readonly traceId?: string;
}
+export interface BotExecutionContext extends BotExecutionRequest {
+ readonly accessToken: string;
+ readonly secrets: Record;
+}
+
export interface BotExecutionResult {
readonly success: boolean;
readonly logResult: string;
@@ -161,7 +164,7 @@ async function getBotForRequest(req: Request): Promise {
* @returns The bot execution result.
*/
export async function executeBot(request: BotExecutionRequest): Promise {
- const { bot } = request;
+ const { bot, runAs } = request;
const startTime = request.requestTime ?? new Date().toISOString();
let result: BotExecutionResult;
@@ -172,10 +175,16 @@ export async function executeBot(request: BotExecutionRequest): Promise {
- const { bot, runAs, input, contentType, traceId } = request;
- const config = getConfig();
- const accessToken = await getBotAccessToken(runAs);
- const secrets = await getBotSecrets(bot);
-
- const client = new LambdaClient({ region: config.awsRegion });
- const name = getLambdaFunctionName(bot);
- const payload = {
- bot: createReference(bot),
- baseUrl: config.baseUrl,
- accessToken,
- input: input instanceof Hl7Message ? input.toString() : input,
- contentType,
- secrets,
- traceId,
- };
-
- // Build the command
- const encoder = new TextEncoder();
- const command = new InvokeCommand({
- FunctionName: name,
- InvocationType: 'RequestResponse',
- LogType: 'Tail',
- Payload: encoder.encode(JSON.stringify(payload)),
- });
-
- // Execute the command
- try {
- const response = await client.send(command);
- const responseStr = response.Payload ? new TextDecoder().decode(response.Payload) : undefined;
-
- // The response from AWS Lambda is always JSON, even if the function returns a string
- // Therefore we always use JSON.parse to get the return value
- // See: https://stackoverflow.com/a/49951946/2051724
- const returnValue = responseStr ? JSON.parse(responseStr) : undefined;
-
- return {
- success: !response.FunctionError,
- logResult: parseLambdaLog(response.LogResult as string),
- returnValue,
- };
- } catch (err) {
- return {
- success: false,
- logResult: normalizeErrorString(err),
- };
- }
-}
-
-/**
- * Returns the AWS Lambda function name for the given bot.
- * By default, the function name is based on the bot ID.
- * If the bot has a custom function, and the server allows it, then that is used instead.
- * @param bot - The Bot resource.
- * @returns The AWS Lambda function name.
- */
-export function getLambdaFunctionName(bot: Bot): string {
- if (getConfig().botCustomFunctionsEnabled) {
- const customFunction = getIdentifier(bot, 'https://medplum.com/bot-external-function-id');
- if (customFunction) {
- return customFunction;
- }
- }
-
- // By default, use the bot ID as the Lambda function name
- return `medplum-bot-lambda-${bot.id}`;
-}
-
-/**
- * Parses the AWS Lambda log result.
- *
- * The raw logs include markup metadata such as timestamps and billing information.
- *
- * We only want to include the actual log contents in the AuditEvent,
- * so we attempt to scrub away all of that extra metadata.
- *
- * See: https://docs.aws.amazon.com/lambda/latest/dg/nodejs-logging.html
- * @param logResult - The raw log result from the AWS lambda event.
- * @returns The parsed log result.
- */
-function parseLambdaLog(logResult: string): string {
- const logBuffer = Buffer.from(logResult, 'base64');
- const log = logBuffer.toString('ascii');
- const lines = log.split('\n');
- const result = [];
- for (const line of lines) {
- if (line.startsWith('START RequestId: ')) {
- // Ignore start line
- continue;
- }
- if (line.startsWith('END RequestId: ') || line.startsWith('REPORT RequestId: ')) {
- // Stop at end lines
- break;
- }
- result.push(line);
- }
- return result.join('\n').trim();
-}
-
/**
* Executes a Bot on the server in a separate Node.js VM.
* @param request - The bot request.
* @returns The bot execution result.
*/
-async function runInVmContext(request: BotExecutionRequest): Promise {
- const { bot, runAs, input, contentType, traceId } = request;
+async function runInVmContext(request: BotExecutionContext): Promise {
+ const { bot, input, contentType, traceId } = request;
const config = getConfig();
if (!config.vmContextBotsEnabled) {
@@ -405,9 +309,6 @@ async function runInVmContext(request: BotExecutionRequest): Promise({ reference: codeUrl } as Reference);
const stream = await getBinaryStorage().readBinary(binary);
const code = await readStreamToString(stream);
-
- const accessToken = await getBotAccessToken(runAs);
- const secrets = await getBotSecrets(bot);
const botConsole = new MockConsole();
const sandbox = {
@@ -420,10 +321,10 @@ async function runInVmContext(request: BotExecutionRequest): Promise {
return accessToken;
}
-async function getBotSecrets(bot: Bot): Promise> {
+async function getBotSecrets(bot: Bot): Promise> {
const systemRepo = getSystemRepo();
const project = await systemRepo.readResource('Project', bot.meta?.project as string);
const secrets = Object.fromEntries(project.secret?.map((secret) => [secret.name, secret]) || []);
diff --git a/packages/server/src/fhir/operations/getwsbindingtoken.test.ts b/packages/server/src/fhir/operations/getwsbindingtoken.test.ts
index 980c30bd1c..a4e5541e33 100644
--- a/packages/server/src/fhir/operations/getwsbindingtoken.test.ts
+++ b/packages/server/src/fhir/operations/getwsbindingtoken.test.ts
@@ -25,7 +25,7 @@ describe('Get WebSocket binding token', () => {
withTestContext(async () => {
// Create Subscription
const res1 = await request(app)
- .post(`/fhir/R4/Subscription`)
+ .post('/fhir/R4/Subscription')
.set('Authorization', 'Bearer ' + accessToken)
.set('Content-Type', ContentType.FHIR_JSON)
.send({
@@ -51,7 +51,8 @@ describe('Get WebSocket binding token', () => {
const params = res2.body as Parameters;
expect(params.resourceType).toEqual('Parameters');
- expect(params.parameter?.length).toEqual(3);
+ expect(params.parameter?.length).toBeDefined();
+ expect([3, 4]).toContain(params.parameter?.length);
expect(params.parameter?.[0]).toBeDefined();
expect(params.parameter?.[0]?.name).toEqual('token');
@@ -69,9 +70,15 @@ describe('Get WebSocket binding token', () => {
expect(params.parameter?.[1]?.name).toEqual('expiration');
expect(params.parameter?.[1]?.valueDateTime).toBeDefined();
expect(new Date(params.parameter?.[1]?.valueDateTime as string).getTime()).toBeGreaterThanOrEqual(Date.now());
+
expect(params.parameter?.[2]).toBeDefined();
- expect(params.parameter?.[2]?.name).toEqual('websocket-url');
- expect(params.parameter?.[2]?.valueUrl).toBeDefined();
+ expect(params.parameter?.[2]?.name).toEqual('subscription');
+ expect(params.parameter?.[2]?.valueString).toBeDefined();
+ expect(params.parameter?.[2]?.valueString).toEqual(createdSub.id);
+
+ expect(params.parameter?.[3]).toBeDefined();
+ expect(params.parameter?.[3]?.name).toEqual('websocket-url');
+ expect(params.parameter?.[3]?.valueUrl).toBeDefined();
}));
test('should return OperationOutcome error if Subscription no longer exists', () =>
diff --git a/packages/server/src/fhir/operations/getwsbindingtoken.ts b/packages/server/src/fhir/operations/getwsbindingtoken.ts
index f6b5cd0ca7..ca89565b65 100644
--- a/packages/server/src/fhir/operations/getwsbindingtoken.ts
+++ b/packages/server/src/fhir/operations/getwsbindingtoken.ts
@@ -1,9 +1,10 @@
import { allOk, badRequest, normalizeErrorString, resolveId } from '@medplum/core';
import { FhirRequest, FhirResponse } from '@medplum/fhir-router';
-import { Parameters, Subscription } from '@medplum/fhirtypes';
+import { OperationDefinition, Subscription } from '@medplum/fhirtypes';
import { getConfig } from '../../config';
import { getAuthenticatedContext } from '../../context';
import { generateAccessToken } from '../../oauth/keys';
+import { buildOutputParameters } from './utils/parameters';
const ONE_HOUR = 60 * 60 * 1000;
@@ -11,6 +12,117 @@ export type AdditionalWsBindingClaims = {
subscription_id: string;
};
+// Source (for backport version): https://build.fhir.org/ig/HL7/fhir-subscription-backport-ig/OperationDefinition-backport-subscription-get-ws-binding-token.json.html
+// R5 definition: https://build.fhir.org/operation-subscription-get-ws-binding-token.json.html
+const operation: OperationDefinition = {
+ resourceType: 'OperationDefinition',
+ id: 'backport-subscription-get-ws-binding-token',
+ extension: [
+ {
+ url: 'http://hl7.org/fhir/StructureDefinition/structuredefinition-fmm',
+ valueInteger: 0,
+ },
+ {
+ url: 'http://hl7.org/fhir/StructureDefinition/structuredefinition-standards-status',
+ valueCode: 'trial-use',
+ },
+ {
+ url: 'http://hl7.org/fhir/StructureDefinition/structuredefinition-wg',
+ valueCode: 'fhir',
+ },
+ ],
+ url: 'http://hl7.org/fhir/uv/subscriptions-backport/OperationDefinition/backport-subscription-get-ws-binding-token',
+ version: '1.2.0-ballot',
+ name: 'R5SubscriptionGetWsBindingToken',
+ title: 'Get WS Binding Token for Subscription Operation',
+ status: 'active',
+ kind: 'operation',
+ date: '2020-11-30',
+ publisher: 'HL7 International / FHIR Infrastructure',
+ contact: [
+ {
+ name: 'HL7 International / FHIR Infrastructure',
+ telecom: [
+ {
+ system: 'url',
+ value: 'http://www.hl7.org/Special/committees/fiwg',
+ },
+ ],
+ },
+ {
+ name: 'Gino Canessa',
+ telecom: [
+ {
+ system: 'email',
+ value: 'mailto:gino.canessa@microsoft.com',
+ },
+ ],
+ },
+ ],
+ description:
+ 'This operation is used to get a token for a websocket client to use in order to bind to one or more subscriptions.',
+ jurisdiction: [
+ {
+ coding: [
+ {
+ system: 'http://unstats.un.org/unsd/methods/m49/m49.htm',
+ code: '001',
+ display: 'World',
+ },
+ ],
+ },
+ ],
+ affectsState: false,
+ code: 'get-ws-binding-token',
+ resource: ['Subscription'],
+ system: false,
+ type: true,
+ instance: true,
+ parameter: [
+ {
+ name: 'id',
+ use: 'in',
+ min: 0,
+ max: '*',
+ documentation:
+ 'At the Instance level, this parameter is ignored. At the Resource level, one or more parameters containing a FHIR id for a Subscription to get a token for. In the absense of any specified ids, the server may either return a token for all Subscriptions available to the caller with a channel-type of websocket or fail the request.',
+ type: 'id',
+ },
+ {
+ name: 'token',
+ use: 'out',
+ min: 1,
+ max: '1',
+ documentation: 'An access token that a client may use to show authorization during a websocket connection.',
+ type: 'string',
+ },
+ {
+ name: 'expiration',
+ use: 'out',
+ min: 1,
+ max: '1',
+ documentation: 'The date and time this token is valid until.',
+ type: 'dateTime',
+ },
+ {
+ name: 'subscription',
+ use: 'out',
+ min: 0,
+ max: '*',
+ documentation: 'The subscriptions this token is valid for.',
+ type: 'string',
+ },
+ {
+ name: 'websocket-url',
+ use: 'out',
+ min: 1,
+ max: '1',
+ documentation: 'The URL the client should use to connect to Websockets.',
+ type: 'url',
+ },
+ ],
+};
+
/**
* Handles a GetWsBindingToken request.
*
@@ -59,24 +171,12 @@ export async function getWsBindingTokenHandler(req: FhirRequest): Promise {
@@ -21,6 +26,13 @@ describe('Project $init', () => {
await shutdownApp();
});
+ beforeEach(() => {
+ (fetch as unknown as jest.Mock).mockClear();
+ (pwnedPassword as unknown as jest.Mock).mockClear();
+ setupPwnedPasswordMock(pwnedPassword as unknown as jest.Mock, 0);
+ setupRecaptchaMock(fetch as unknown as jest.Mock, true);
+ });
+
test('Success', async () => {
const superAdminAccessToken = await initTestAuth({ superAdmin: true });
expect(superAdminAccessToken).toBeDefined();
diff --git a/packages/server/src/fhir/operations/projectinit.ts b/packages/server/src/fhir/operations/projectinit.ts
index d77b69528d..6656a03757 100644
--- a/packages/server/src/fhir/operations/projectinit.ts
+++ b/packages/server/src/fhir/operations/projectinit.ts
@@ -16,6 +16,7 @@ import { getAuthenticatedContext, getRequestContext } from '../../context';
import { getUserByEmailWithoutProject } from '../../oauth/utils';
import { getSystemRepo } from '../repo';
import { buildOutputParameters, parseInputParameters } from './utils/parameters';
+import { getConfig } from '../../config';
const projectInitOperation: OperationDefinition = {
resourceType: 'OperationDefinition',
@@ -127,6 +128,7 @@ export async function createProject(
}> {
const ctx = getRequestContext();
const systemRepo = getSystemRepo();
+ const config = getConfig();
ctx.logger.info('Project creation request received', { name: projectName });
const project = await systemRepo.createResource({
@@ -134,6 +136,7 @@ export async function createProject(
name: projectName,
owner: admin ? createReference(admin) : undefined,
strictMode: true,
+ features: config.defaultProjectFeatures,
});
ctx.logger.info('Project created', {
diff --git a/packages/server/src/fhir/repo.test.ts b/packages/server/src/fhir/repo.test.ts
index 06093d67ed..e8282fb848 100644
--- a/packages/server/src/fhir/repo.test.ts
+++ b/packages/server/src/fhir/repo.test.ts
@@ -1048,4 +1048,11 @@ describe('FHIR Repo', () => {
})
).rejects.toThrow('Multiple resources found matching condition');
}));
+
+ test('Double DELETE', async () =>
+ withTestContext(async () => {
+ const patient = await systemRepo.createResource({ resourceType: 'Patient' });
+ await systemRepo.deleteResource(patient.resourceType, patient.id as string);
+ await expect(systemRepo.deleteResource(patient.resourceType, patient.id as string)).resolves.toBeUndefined();
+ }));
});
diff --git a/packages/server/src/fhir/repo.ts b/packages/server/src/fhir/repo.ts
index 195d30021c..a3f37299ca 100644
--- a/packages/server/src/fhir/repo.ts
+++ b/packages/server/src/fhir/repo.ts
@@ -964,9 +964,18 @@ export class Repository extends BaseRepository implements FhirRepository {
+ let resource: Resource;
try {
- const resource = await this.readResourceImpl(resourceType, id);
+ resource = await this.readResourceImpl(resourceType, id);
+ } catch (err) {
+ const outcomeErr = err as OperationOutcomeError;
+ if (isGone(outcomeErr.outcome)) {
+ return; // Resource is already deleted, return successfully
+ }
+ throw err;
+ }
+ try {
if (!this.canWriteResourceType(resourceType) || !this.isResourceWriteable(undefined, resource)) {
throw new OperationOutcomeError(forbidden);
}
diff --git a/packages/server/src/fhir/rewrite.ts b/packages/server/src/fhir/rewrite.ts
index af8ccb6ec3..42ca1e0c62 100644
--- a/packages/server/src/fhir/rewrite.ts
+++ b/packages/server/src/fhir/rewrite.ts
@@ -2,7 +2,7 @@ import { Binary, Resource } from '@medplum/fhirtypes';
import { getConfig } from '../config';
import { getLogger } from '../context';
import { Repository } from './repo';
-import { getPresignedUrl } from './signer';
+import { getBinaryStorage } from './storage';
/**
* The target type of the attachment rewrite.
@@ -164,7 +164,8 @@ class Rewriter {
getLogger().debug('Error reading binary to generate presigned URL', err);
return `Binary/${id}`;
}
- return getPresignedUrl(binary);
+
+ return getBinaryStorage().getPresignedUrl(binary);
}
}
diff --git a/packages/server/src/fhir/routes.ts b/packages/server/src/fhir/routes.ts
index 1d7ffc840a..8377a2bd91 100644
--- a/packages/server/src/fhir/routes.ts
+++ b/packages/server/src/fhir/routes.ts
@@ -9,6 +9,7 @@ import { recordHistogramValue } from '../otel/otel';
import { bulkDataRouter } from './bulkdata';
import { jobRouter } from './job';
import { getCapabilityStatement } from './metadata';
+import { agentBulkStatusHandler } from './operations/agentbulkstatus';
import { agentPushHandler } from './operations/agentpush';
import { agentStatusHandler } from './operations/agentstatus';
import { codeSystemImportHandler } from './operations/codesystemimport';
@@ -185,6 +186,9 @@ function initInternalFhirRouter(): FhirRouter {
router.add('GET', '/Agent/$status', agentStatusHandler);
router.add('GET', '/Agent/:id/$status', agentStatusHandler);
+ // Agent $bulk-status operation
+ router.add('GET', '/Agent/$bulk-status', agentBulkStatusHandler);
+
// Bot $deploy operation
router.add('POST', '/Bot/:id/$deploy', deployHandler);
diff --git a/packages/server/src/fhir/storage.test.ts b/packages/server/src/fhir/storage.test.ts
index a37f730ccd..3e1e2c6bc2 100644
--- a/packages/server/src/fhir/storage.test.ts
+++ b/packages/server/src/fhir/storage.test.ts
@@ -1,31 +1,17 @@
-import { CopyObjectCommand, GetObjectCommand, PutObjectCommand, S3Client } from '@aws-sdk/client-s3';
import { ContentType } from '@medplum/core';
import { Binary } from '@medplum/fhirtypes';
-import { sdkStreamMixin } from '@smithy/util-stream';
-import { AwsClientStub, mockClient } from 'aws-sdk-client-mock';
-import 'aws-sdk-client-mock-jest';
import { Request } from 'express';
import fs from 'fs';
-import internal, { Readable } from 'stream';
+import { Readable } from 'stream';
import { loadTestConfig } from '../config';
import { streamToString } from '../test.setup';
import { getBinaryStorage, initBinaryStorage } from './storage';
describe('Storage', () => {
- let mockS3Client: AwsClientStub;
-
beforeAll(async () => {
await loadTestConfig();
});
- beforeEach(() => {
- mockS3Client = mockClient(S3Client);
- });
-
- afterEach(() => {
- mockS3Client.restore();
- });
-
test('Undefined binary storage', () => {
initBinaryStorage('binary');
expect(() => getBinaryStorage()).toThrow();
@@ -60,118 +46,6 @@ describe('Storage', () => {
// Verify that the file matches the expected contents
const content = await streamToString(stream);
expect(content).toEqual('foo');
-
- // Make sure we didn't touch S3 at all
- expect(mockS3Client.send.callCount).toBe(0);
- expect(mockS3Client).not.toHaveReceivedCommand(PutObjectCommand);
- expect(mockS3Client).not.toHaveReceivedCommand(GetObjectCommand);
- });
-
- test('S3 storage', async () => {
- initBinaryStorage('s3:foo');
-
- const storage = getBinaryStorage();
- expect(storage).toBeDefined();
-
- // Write a file
- const binary = {
- resourceType: 'Binary',
- id: '123',
- meta: {
- versionId: '456',
- },
- } as Binary;
- const req = new Readable();
- req.push('foo');
- req.push(null);
- (req as any).headers = {};
-
- const sdkStream = sdkStreamMixin(req);
- mockS3Client.on(GetObjectCommand).resolves({ Body: sdkStream });
-
- await storage.writeBinary(binary, 'test.txt', ContentType.TEXT, req as Request);
-
- expect(mockS3Client.send.callCount).toBe(1);
- expect(mockS3Client).toReceiveCommandWith(PutObjectCommand, {
- Bucket: 'foo',
- Key: 'binary/123/456',
- ContentType: ContentType.TEXT,
- });
-
- // Read a file
- const stream = await storage.readBinary(binary);
- expect(stream).toBeDefined();
- expect(mockS3Client).toHaveReceivedCommand(GetObjectCommand);
- });
-
- test('Missing metadata', async () => {
- initBinaryStorage('s3:foo');
-
- const storage = getBinaryStorage();
- expect(storage).toBeDefined();
-
- // Write a file
- const binary = {
- resourceType: 'Binary',
- id: '123',
- meta: {
- versionId: '456',
- },
- } as Binary;
- const req = new Readable();
- req.push('foo');
- req.push(null);
- (req as any).headers = {};
-
- const sdkStream = sdkStreamMixin(req);
- mockS3Client.on(GetObjectCommand).resolves({ Body: sdkStream });
-
- await storage.writeBinary(binary, undefined, undefined, req as Request);
- expect(mockS3Client.send.callCount).toBe(1);
- expect(mockS3Client).toReceiveCommandWith(PutObjectCommand, {
- Bucket: 'foo',
- Key: 'binary/123/456',
- ContentType: 'application/octet-stream',
- });
-
- // Read a file
- const stream = await storage.readBinary(binary);
- expect(stream).toBeDefined();
- expect(mockS3Client).toHaveReceivedCommand(GetObjectCommand);
- });
-
- test('Invalid file extension', async () => {
- initBinaryStorage('s3:foo');
-
- const storage = getBinaryStorage();
- expect(storage).toBeDefined();
-
- const binary = null as unknown as Binary;
- const stream = null as unknown as internal.Readable;
- try {
- await storage.writeBinary(binary, 'test.exe', ContentType.TEXT, stream);
- fail('Expected error');
- } catch (err) {
- expect((err as Error).message).toEqual('Invalid file extension');
- }
- expect(mockS3Client).not.toHaveReceivedCommand(PutObjectCommand);
- });
-
- test('Invalid content type', async () => {
- initBinaryStorage('s3:foo');
-
- const storage = getBinaryStorage();
- expect(storage).toBeDefined();
-
- const binary = null as unknown as Binary;
- const stream = null as unknown as internal.Readable;
- try {
- await storage.writeBinary(binary, 'test.sh', 'application/x-sh', stream);
- fail('Expected error');
- } catch (err) {
- expect((err as Error).message).toEqual('Invalid content type');
- }
- expect(mockS3Client).not.toHaveReceivedCommand(PutObjectCommand);
});
test('Should throw an error when file is not found in readBinary()', async () => {
@@ -198,54 +72,4 @@ describe('Storage', () => {
expect((err as Error).message).toEqual('File not found');
}
});
-
- test('Copy S3 object', async () => {
- initBinaryStorage('s3:foo');
-
- const storage = getBinaryStorage();
- expect(storage).toBeDefined();
-
- // Write a file
- const binary = {
- resourceType: 'Binary',
- id: '123',
- meta: {
- versionId: '456',
- },
- } as Binary;
- const req = new Readable();
- req.push('foo');
- req.push(null);
- (req as any).headers = {};
-
- const sdkStream = sdkStreamMixin(req);
- mockS3Client.on(GetObjectCommand).resolves({ Body: sdkStream });
-
- await storage.writeBinary(binary, 'test.txt', ContentType.TEXT, req as Request);
-
- expect(mockS3Client.send.callCount).toBe(1);
- expect(mockS3Client).toReceiveCommandWith(PutObjectCommand, {
- Bucket: 'foo',
- Key: 'binary/123/456',
- ContentType: ContentType.TEXT,
- });
- mockS3Client.reset();
-
- // Copy the object
- const destinationBinary = {
- resourceType: 'Binary',
- id: '789',
- meta: {
- versionId: '012',
- },
- } as Binary;
- await storage.copyBinary(binary, destinationBinary);
-
- expect(mockS3Client.send.callCount).toBe(1);
- expect(mockS3Client).toReceiveCommandWith(CopyObjectCommand, {
- CopySource: 'foo/binary/123/456',
- Bucket: 'foo',
- Key: 'binary/789/012',
- });
- });
});
diff --git a/packages/server/src/fhir/storage.ts b/packages/server/src/fhir/storage.ts
index c6a05ed4ee..1d70d3fd12 100644
--- a/packages/server/src/fhir/storage.ts
+++ b/packages/server/src/fhir/storage.ts
@@ -1,9 +1,9 @@
-import { CopyObjectCommand, GetObjectCommand, S3Client } from '@aws-sdk/client-s3';
-import { Upload } from '@aws-sdk/lib-storage';
import { Binary } from '@medplum/fhirtypes';
+import { createSign } from 'crypto';
import { copyFileSync, createReadStream, createWriteStream, existsSync, mkdirSync } from 'fs';
import { resolve, sep } from 'path';
-import { pipeline, Readable } from 'stream';
+import { Readable, pipeline } from 'stream';
+import { S3Storage } from '../cloud/aws/storage';
import { getConfig } from '../config';
/**
@@ -44,7 +44,7 @@ export function getBinaryStorage(): BinaryStorage {
/**
* The BinaryStorage interface represents a method of reading and writing binary blobs.
*/
-interface BinaryStorage {
+export interface BinaryStorage {
writeBinary(
binary: Binary,
filename: string | undefined,
@@ -59,6 +59,8 @@ interface BinaryStorage {
copyBinary(sourceBinary: Binary, destinationBinary: Binary): Promise;
copyFile(sourceKey: string, destinationKey: string): Promise;
+
+ getPresignedUrl(binary: Binary): string;
}
/**
@@ -124,114 +126,28 @@ class FileSystemStorage implements BinaryStorage {
copyFileSync(resolve(this.baseDir, sourceKey), resolve(this.baseDir, destinationKey));
}
- private getKey(binary: Binary): string {
- return binary.id + sep + binary.meta?.versionId;
- }
+ getPresignedUrl(binary: Binary): string {
+ const config = getConfig();
+ const storageBaseUrl = config.storageBaseUrl;
+ const result = new URL(`${storageBaseUrl}${binary.id}/${binary.meta?.versionId}`);
- private getPath(binary: Binary): string {
- return resolve(this.baseDir, this.getKey(binary));
- }
-}
+ const dateLessThan = new Date();
+ dateLessThan.setHours(dateLessThan.getHours() + 1);
+ result.searchParams.set('Expires', dateLessThan.getTime().toString());
-/**
- * The S3Storage class stores binary data in an AWS S3 bucket.
- * Files are stored in bucket/binary/binary.id/binary.meta.versionId.
- */
-class S3Storage implements BinaryStorage {
- private readonly client: S3Client;
- private readonly bucket: string;
+ const privateKey = { key: config.signingKey, passphrase: config.signingKeyPassphrase };
+ const signature = createSign('sha256').update(result.toString()).sign(privateKey, 'base64');
+ result.searchParams.set('Signature', signature);
- constructor(bucket: string) {
- this.client = new S3Client({ region: getConfig().awsRegion });
- this.bucket = bucket;
+ return result.toString();
}
- /**
- * Writes a binary blob to S3.
- * @param binary - The binary resource destination.
- * @param filename - Optional binary filename.
- * @param contentType - Optional binary content type.
- * @param stream - The Node.js stream of readable content.
- * @returns Promise that resolves when the write is complete.
- */
- writeBinary(
- binary: Binary,
- filename: string | undefined,
- contentType: string | undefined,
- stream: BinarySource
- ): Promise {
- checkFileMetadata(filename, contentType);
- return this.writeFile(this.getKey(binary), contentType, stream);
- }
-
- /**
- * Writes a file to S3.
- *
- * Early implementations used the simple "PutObjectCommand" to write the blob to S3.
- * However, PutObjectCommand does not support streaming.
- *
- * We now use the @aws-sdk/lib-storage package.
- *
- * Learn more:
- * https://github.com/aws/aws-sdk-js-v3/blob/main/UPGRADING.md#s3-multipart-upload
- * https://github.com/aws/aws-sdk-js-v3/tree/main/lib/lib-storage
- *
- * Be mindful of Cache-Control settings.
- *
- * Because we use signed URLs intended for one hour use,
- * we set "max-age" to 1 hour = 3600 seconds.
- *
- * But we want CloudFront to cache the response for 1 day,
- * so we set "s-maxage" to 1 day = 86400 seconds.
- *
- * Learn more:
- * https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/Expiration.html
- * @param key - The S3 key.
- * @param contentType - Optional binary content type.
- * @param stream - The Node.js stream of readable content.
- */
- async writeFile(key: string, contentType: string | undefined, stream: BinarySource): Promise {
- const upload = new Upload({
- params: {
- Bucket: this.bucket,
- Key: key,
- CacheControl: 'max-age=3600, s-maxage=86400',
- ContentType: contentType ?? 'application/octet-stream',
- Body: stream,
- },
- client: this.client,
- queueSize: 3,
- });
-
- await upload.done();
- }
-
- async readBinary(binary: Binary): Promise {
- const output = await this.client.send(
- new GetObjectCommand({
- Bucket: this.bucket,
- Key: this.getKey(binary),
- })
- );
- return output.Body as Readable;
- }
-
- async copyBinary(sourceBinary: Binary, destinationBinary: Binary): Promise {
- await this.copyFile(this.getKey(sourceBinary), this.getKey(destinationBinary));
- }
-
- async copyFile(sourceKey: string, destinationKey: string): Promise {
- await this.client.send(
- new CopyObjectCommand({
- CopySource: `${this.bucket}/${sourceKey}`,
- Bucket: this.bucket,
- Key: destinationKey,
- })
- );
+ private getKey(binary: Binary): string {
+ return binary.id + sep + binary.meta?.versionId;
}
- private getKey(binary: Binary): string {
- return 'binary/' + binary.id + '/' + binary.meta?.versionId;
+ private getPath(binary: Binary): string {
+ return resolve(this.baseDir, this.getKey(binary));
}
}
@@ -274,6 +190,7 @@ const BLOCKED_FILE_EXTENSIONS = [
'.msp',
'.mst',
'.nsh',
+ '.php',
'.pif',
'.ps1',
'.scr',
@@ -313,7 +230,7 @@ const BLOCKED_CONTENT_TYPES = [
* @param filename - The input filename.
* @param contentType - The input content type.
*/
-function checkFileMetadata(filename: string | undefined, contentType: string | undefined): void {
+export function checkFileMetadata(filename: string | undefined, contentType: string | undefined): void {
if (checkFileExtension(filename)) {
throw new Error('Invalid file extension');
}
diff --git a/packages/server/src/fhircast/routes.test.ts b/packages/server/src/fhircast/routes.test.ts
index 7fc9c8031e..818c4b0c41 100644
--- a/packages/server/src/fhircast/routes.test.ts
+++ b/packages/server/src/fhircast/routes.test.ts
@@ -13,24 +13,26 @@ import { loadTestConfig } from '../config';
import { getRedis } from '../redis';
import { initTestAuth } from '../test.setup';
-const app = express();
-let accessToken: string;
-
const STU2_BASE_ROUTE = '/fhircast/STU2/';
const STU3_BASE_ROUTE = '/fhircast/STU3/';
describe('FHIRCast routes', () => {
+ const app = express();
+ let accessToken: string;
+
beforeAll(async () => {
const config = await loadTestConfig();
await initApp(app, config);
- await getRedis().flushdb();
- accessToken = await initTestAuth();
});
afterAll(async () => {
await shutdownApp();
});
+ beforeEach(async () => {
+ accessToken = await initTestAuth();
+ });
+
test('Get well known', async () => {
let res;
@@ -150,16 +152,17 @@ describe('FHIRCast routes', () => {
});
test('Get context', async () => {
+ const topic = randomUUID();
let res;
// Non-standard FHIRCast extension to support Nuance PowerCast Hub
res = await request(app)
- .get(`${STU2_BASE_ROUTE}my-topic`)
+ .get(`${STU2_BASE_ROUTE}${topic}`)
.set('Authorization', 'Bearer ' + accessToken);
expect(res.status).toBe(200);
expect(res.body).toEqual([]);
res = await request(app)
- .get(`${STU3_BASE_ROUTE}my-topic`)
+ .get(`${STU3_BASE_ROUTE}${topic}`)
.set('Authorization', 'Bearer ' + accessToken);
expect(res.status).toBe(200);
expect(res.body).toEqual({ 'context.type': '', context: [] });
diff --git a/packages/server/src/fhircast/utils.test.ts b/packages/server/src/fhircast/utils.test.ts
index 8f256708ad..b27e24704d 100644
--- a/packages/server/src/fhircast/utils.test.ts
+++ b/packages/server/src/fhircast/utils.test.ts
@@ -8,8 +8,6 @@ describe('FHIRcast Utils', () => {
beforeAll(async () => {
const config = await loadTestConfig();
initRedis(config.redis);
- expect(getRedis()).toBeDefined();
- await getRedis().flushdb();
});
afterAll(async () => {
diff --git a/packages/server/src/fhircast/websocket.test.ts b/packages/server/src/fhircast/websocket.test.ts
index 476a9b8ea4..4c5457e7e2 100644
--- a/packages/server/src/fhircast/websocket.test.ts
+++ b/packages/server/src/fhircast/websocket.test.ts
@@ -5,7 +5,6 @@ import { Server } from 'http';
import request from 'superwstest';
import { initApp, shutdownApp } from '../app';
import { MedplumServerConfig, loadTestConfig } from '../config';
-import { getRedis } from '../redis';
import { initTestAuth, withTestContext } from '../test.setup';
describe('FHIRcast WebSocket', () => {
@@ -20,7 +19,6 @@ describe('FHIRcast WebSocket', () => {
config = await loadTestConfig();
config.heartbeatEnabled = false;
server = await initApp(app, config);
- await getRedis().flushdb();
accessToken = await initTestAuth({ membership: { admin: true } });
await new Promise((resolve) => {
server.listen(0, 'localhost', 511, resolve);
@@ -88,7 +86,6 @@ describe('FHIRcast WebSocket', () => {
config = await loadTestConfig();
config.heartbeatMilliseconds = 25;
server = await initApp(app, config);
- await getRedis().flushdb();
await new Promise((resolve) => {
server.listen(0, 'localhost', 511, resolve);
});
diff --git a/packages/server/src/fhircast/websocket.ts b/packages/server/src/fhircast/websocket.ts
index cb4ee23b25..0a0faf9a7b 100644
--- a/packages/server/src/fhircast/websocket.ts
+++ b/packages/server/src/fhircast/websocket.ts
@@ -4,7 +4,7 @@ import { IncomingMessage } from 'http';
import ws from 'ws';
import { DEFAULT_HEARTBEAT_MS, heartbeat } from '../heartbeat';
import { globalLogger } from '../logger';
-import { getRedis } from '../redis';
+import { getRedis, getRedisSubscriber } from '../redis';
/**
* Handles a new WebSocket connection to the FHIRCast hub.
@@ -20,7 +20,7 @@ export async function handleFhircastConnection(socket: ws.WebSocket, request: In
// Once the client enters the subscribed state it is not supposed to issue any other commands,
// except for additional SUBSCRIBE, PSUBSCRIBE, UNSUBSCRIBE and PUNSUBSCRIBE commands.
const redis = getRedis();
- const redisSubscriber = redis.duplicate();
+ const redisSubscriber = getRedisSubscriber();
// Subscribe to the topic
await redisSubscriber.subscribe(topic);
diff --git a/packages/server/src/index.test.ts b/packages/server/src/index.test.ts
index c1e358df2c..44fa1aae27 100644
--- a/packages/server/src/index.test.ts
+++ b/packages/server/src/index.test.ts
@@ -1,4 +1,4 @@
-import http from 'http';
+import http from 'node:http';
import { shutdownApp } from './app';
import { main } from './index';
@@ -19,7 +19,7 @@ jest.mock('express', () => {
});
jest.mock('pg', () => {
- const original = jest.requireActual('express');
+ const original = jest.requireActual('pg');
class MockPoolClient {
async query(sql: string): Promise {
diff --git a/packages/server/src/logger.test.ts b/packages/server/src/logger.test.ts
index 60a31d5f7c..e17363696d 100644
--- a/packages/server/src/logger.test.ts
+++ b/packages/server/src/logger.test.ts
@@ -1,32 +1,7 @@
-import {
- CloudWatchLogsClient,
- CreateLogGroupCommand,
- CreateLogStreamCommand,
- PutLogEventsCommand,
-} from '@aws-sdk/client-cloudwatch-logs';
import { LogLevel } from '@medplum/core';
-import { AwsClientStub, mockClient } from 'aws-sdk-client-mock';
-import 'aws-sdk-client-mock-jest';
import { globalLogger } from './logger';
describe('Global Logger', () => {
- let mockCloudWatchLogsClient: AwsClientStub;
-
- beforeEach(() => {
- mockCloudWatchLogsClient = mockClient(CloudWatchLogsClient);
-
- mockCloudWatchLogsClient.on(CreateLogGroupCommand).resolves({});
- mockCloudWatchLogsClient.on(CreateLogStreamCommand).resolves({});
- mockCloudWatchLogsClient.on(PutLogEventsCommand).resolves({
- nextSequenceToken: '',
- rejectedLogEventsInfo: {},
- });
- });
-
- afterEach(() => {
- mockCloudWatchLogsClient.restore();
- });
-
test('Debug', () => {
console.log = jest.fn();
diff --git a/packages/server/src/oauth/authorize.test.ts b/packages/server/src/oauth/authorize.test.ts
index 2c93c11ac5..2ff44ae0a8 100644
--- a/packages/server/src/oauth/authorize.test.ts
+++ b/packages/server/src/oauth/authorize.test.ts
@@ -13,8 +13,6 @@ import { getSystemRepo } from '../fhir/repo';
import { createTestProject, withTestContext } from '../test.setup';
import { revokeLogin } from './utils';
-jest.mock('@aws-sdk/client-sesv2');
-
describe('OAuth Authorize', () => {
const app = express();
const systemRepo = getSystemRepo();
diff --git a/packages/server/src/oauth/keys.test.ts b/packages/server/src/oauth/keys.test.ts
index d06847e766..03f20e4f10 100644
--- a/packages/server/src/oauth/keys.test.ts
+++ b/packages/server/src/oauth/keys.test.ts
@@ -2,14 +2,11 @@ import { randomUUID } from 'crypto';
import { generateKeyPair, SignJWT } from 'jose';
import { initAppServices, shutdownApp } from '../app';
import { loadTestConfig, MedplumServerConfig } from '../config';
-import { getDatabasePool } from '../database';
-import { withTestContext } from '../test.setup';
import {
generateAccessToken,
generateIdToken,
generateRefreshToken,
generateSecret,
- getJwks,
getSigningKey,
initKeys,
verifyJwt,
@@ -25,25 +22,6 @@ describe('Keys', () => {
await shutdownApp();
});
- test('Init keys', () =>
- withTestContext(async () => {
- const config = await loadTestConfig();
-
- // First, delete all existing keys
- await getDatabasePool().query('DELETE FROM "JsonWebKey"');
-
- // Init once
- await initKeys(config);
- const jwks1 = getJwks();
- expect(jwks1.keys.length).toBe(1);
-
- // Init again
- await initKeys(config);
- const jwks2 = getJwks();
- expect(jwks2.keys.length).toBe(1);
- expect(jwks2.keys[0].kid).toEqual(jwks2.keys[0].kid);
- }));
-
test('Missing issuer', async () => {
const config = await loadTestConfig();
delete (config as any).issuer;
diff --git a/packages/server/src/oauth/middleware.ts b/packages/server/src/oauth/middleware.ts
index 9622ec429f..49de0ff492 100644
--- a/packages/server/src/oauth/middleware.ts
+++ b/packages/server/src/oauth/middleware.ts
@@ -2,8 +2,7 @@ import { OperationOutcomeError, createReference, unauthorized } from '@medplum/c
import { ClientApplication, Login, Project, ProjectMembership, Reference } from '@medplum/fhirtypes';
import { NextFunction, Request, Response } from 'express';
import { IncomingMessage } from 'http';
-import { AuthenticatedRequestContext, getRequestContext, requestContextStore } from '../context';
-import { getRepoForLogin } from '../fhir/accesspolicy';
+import { AuthenticatedRequestContext, getRequestContext } from '../context';
import { getSystemRepo } from '../fhir/repo';
import { getClientApplicationMembership, getLoginForAccessToken, timingSafeEqualStr } from './utils';
@@ -14,65 +13,59 @@ export interface AuthState {
accessToken?: string;
}
-export function authenticateRequest(req: Request, res: Response, next: NextFunction): Promise {
- return authenticateTokenImpl(req)
- .then(async ({ login, project, membership, accessToken }) => {
- const ctx = getRequestContext();
- const repo = await getRepoForLogin(login, membership, project, isExtendedMode(req));
- requestContextStore.run(
- new AuthenticatedRequestContext(ctx, login, project, membership, repo, undefined, accessToken),
- () => next()
- );
- })
- .catch(next);
+export function authenticateRequest(req: Request, res: Response, next: NextFunction): void {
+ const ctx = getRequestContext();
+ if (ctx instanceof AuthenticatedRequestContext) {
+ next();
+ } else {
+ next(new OperationOutcomeError(unauthorized));
+ }
}
-export async function authenticateTokenImpl(req: IncomingMessage): Promise {
- const [tokenType, token] = req.headers.authorization?.split(' ') ?? [];
+export async function authenticateTokenImpl(req: IncomingMessage): Promise {
+ const authHeader = req.headers.authorization;
+ if (!authHeader) {
+ return undefined;
+ }
+
+ const [tokenType, token] = authHeader.split(' ');
if (!tokenType || !token) {
- throw new OperationOutcomeError(unauthorized);
+ return undefined;
}
if (tokenType === 'Bearer') {
- return authenticateBearerToken(req, token);
+ return getLoginForAccessToken(token);
}
+
if (tokenType === 'Basic') {
return authenticateBasicAuth(req, token);
}
- throw new OperationOutcomeError(unauthorized);
-}
-function authenticateBearerToken(req: IncomingMessage, token: string): Promise {
- return getLoginForAccessToken(token).catch(() => {
- throw new OperationOutcomeError(unauthorized);
- });
+ return undefined;
}
-async function authenticateBasicAuth(req: IncomingMessage, token: string): Promise {
+async function authenticateBasicAuth(req: IncomingMessage, token: string): Promise {
const credentials = Buffer.from(token, 'base64').toString('ascii');
const [username, password] = credentials.split(':');
if (!username || !password) {
- throw new OperationOutcomeError(unauthorized);
+ return undefined;
}
const systemRepo = getSystemRepo();
- let client = undefined;
+ let client: ClientApplication;
try {
client = await systemRepo.readResource('ClientApplication', username);
} catch (err) {
- throw new OperationOutcomeError(unauthorized);
- }
- if (!client) {
- throw new OperationOutcomeError(unauthorized);
+ return undefined;
}
if (!timingSafeEqualStr(client.secret as string, password)) {
- throw new OperationOutcomeError(unauthorized);
+ return undefined;
}
const membership = await getClientApplicationMembership(client);
if (!membership) {
- throw new OperationOutcomeError(unauthorized);
+ return undefined;
}
const project = await systemRepo.readReference(membership.project as Reference);
@@ -86,6 +79,6 @@ async function authenticateBasicAuth(req: IncomingMessage, token: string): Promi
return { login, project, membership };
}
-function isExtendedMode(req: Request): boolean {
+export function isExtendedMode(req: Request): boolean {
return req.headers['x-medplum'] === 'extended';
}
diff --git a/packages/server/src/oauth/routes.ts b/packages/server/src/oauth/routes.ts
index 246bcbe138..ceaa7999c9 100644
--- a/packages/server/src/oauth/routes.ts
+++ b/packages/server/src/oauth/routes.ts
@@ -1,6 +1,5 @@
import cookieParser from 'cookie-parser';
import { Router } from 'express';
-import { getRateLimiter } from '../ratelimit';
import { authorizeGetHandler, authorizePostHandler } from './authorize';
import { logoutHandler } from './logout';
import { authenticateRequest } from './middleware';
@@ -8,7 +7,6 @@ import { tokenHandler } from './token';
import { userInfoHandler } from './userinfo';
export const oauthRouter = Router();
-oauthRouter.use(getRateLimiter());
oauthRouter.get('/authorize', cookieParser(), authorizeGetHandler);
oauthRouter.post('/authorize', cookieParser(), authorizePostHandler);
oauthRouter.post('/token', tokenHandler);
diff --git a/packages/server/src/oauth/token.test.ts b/packages/server/src/oauth/token.test.ts
index 5ec290707e..2725515b30 100644
--- a/packages/server/src/oauth/token.test.ts
+++ b/packages/server/src/oauth/token.test.ts
@@ -22,7 +22,6 @@ import { createTestProject, withTestContext } from '../test.setup';
import { generateSecret } from './keys';
import { hashCode } from './token';
-jest.mock('@aws-sdk/client-sesv2');
jest.mock('jose', () => {
const core = jest.requireActual('@medplum/core');
const original = jest.requireActual('jose');
diff --git a/packages/server/src/oauth/utils.ts b/packages/server/src/oauth/utils.ts
index 1e33aa0706..febac12823 100644
--- a/packages/server/src/oauth/utils.ts
+++ b/packages/server/src/oauth/utils.ts
@@ -11,7 +11,6 @@ import {
ProfileResource,
resolveId,
tooManyRequests,
- unauthorized,
} from '@medplum/core';
import {
AccessPolicy,
@@ -792,8 +791,14 @@ export async function verifyMultipleMatchingException(
* @param accessToken - The access token as provided by the client.
* @returns On success, returns the login, membership, and project. On failure, throws an error.
*/
-export async function getLoginForAccessToken(accessToken: string): Promise {
- const verifyResult = await verifyJwt(accessToken);
+export async function getLoginForAccessToken(accessToken: string): Promise {
+ let verifyResult;
+ try {
+ verifyResult = await verifyJwt(accessToken);
+ } catch (err) {
+ return undefined;
+ }
+
const claims = verifyResult.payload as MedplumAccessTokenClaims;
const systemRepo = getSystemRepo();
@@ -801,11 +806,11 @@ export async function getLoginForAccessToken(accessToken: string): Promise('Login', claims.login_id);
} catch (err) {
- throw new OperationOutcomeError(unauthorized);
+ return undefined;
}
if (!login?.membership || login.revoked) {
- throw new OperationOutcomeError(unauthorized);
+ return undefined;
}
const membership = await systemRepo.readReference(login.membership);
diff --git a/packages/server/src/ratelimit.ts b/packages/server/src/ratelimit.ts
index b1ab00d979..0e0c27a6b9 100644
--- a/packages/server/src/ratelimit.ts
+++ b/packages/server/src/ratelimit.ts
@@ -1,10 +1,15 @@
+import { tooManyRequests } from '@medplum/core';
+import { Request } from 'express';
import rateLimit, { MemoryStore, RateLimitRequestHandler } from 'express-rate-limit';
-import { OperationOutcomeError, tooManyRequests } from '@medplum/core';
+import { AuthenticatedRequestContext, getRequestContext } from './context';
/*
* MemoryStore must be shutdown to cleanly stop the server.
*/
+const DEFAULT_RATE_LIMIT_PER_15_MINUTES = 15 * 60 * 1000; // 1000 requests per second
+const DEFAULT_AUTH_RATE_LIMIT_PER_15_MINUTES = 600;
+
let handler: RateLimitRequestHandler | undefined = undefined;
let store: MemoryStore | undefined = undefined;
@@ -13,12 +18,10 @@ export function getRateLimiter(): RateLimitRequestHandler {
store = new MemoryStore();
handler = rateLimit({
windowMs: 15 * 60 * 1000, // 15 minutes
- max: 600, // limit each IP to 600 requests per windowMs
- validate: false, // Ignore X-Forwarded-For warnings
+ limit: getRateLimitForRequest,
+ validate: true,
store,
- handler: (_, __, next) => {
- next(new OperationOutcomeError(tooManyRequests));
- },
+ message: tooManyRequests,
});
}
return handler;
@@ -31,3 +34,22 @@ export function closeRateLimiter(): void {
handler = undefined;
}
}
+
+async function getRateLimitForRequest(req: Request): Promise {
+ // Check if this is an "auth URL" (e.g., /auth/login, /auth/register, /oauth2/token)
+ // These URLs have a different rate limit than the rest of the API
+ const authUrl = req.originalUrl.startsWith('/auth/') || req.originalUrl.startsWith('/oauth2/');
+
+ let limit = authUrl ? DEFAULT_AUTH_RATE_LIMIT_PER_15_MINUTES : DEFAULT_RATE_LIMIT_PER_15_MINUTES;
+
+ const ctx = getRequestContext();
+ if (ctx instanceof AuthenticatedRequestContext) {
+ const systemSettingName = authUrl ? 'authRateLimit' : 'rateLimit';
+ const systemSetting = ctx.project.systemSetting?.find((s) => s.name === systemSettingName);
+ if (systemSetting?.valueInteger) {
+ limit = systemSetting.valueInteger;
+ }
+ }
+
+ return limit;
+}
diff --git a/packages/server/src/redis.test.ts b/packages/server/src/redis.test.ts
index 1dc387c67d..a132ffb2b1 100644
--- a/packages/server/src/redis.test.ts
+++ b/packages/server/src/redis.test.ts
@@ -1,9 +1,15 @@
-import { loadTestConfig } from './config';
-import { closeRedis, getRedis, initRedis } from './redis';
+import { Redis } from 'ioredis';
+import { MedplumServerConfig, loadTestConfig } from './config';
+import { closeRedis, getRedis, getRedisSubscriber, getRedisSubscriberCount, initRedis } from './redis';
describe('Redis', () => {
+ let config: MedplumServerConfig;
+
+ beforeAll(async () => {
+ config = await loadTestConfig();
+ });
+
test('Get redis', async () => {
- const config = await loadTestConfig();
initRedis(config.redis);
expect(getRedis()).toBeDefined();
await closeRedis();
@@ -13,4 +19,70 @@ describe('Redis', () => {
expect(() => getRedis()).toThrow();
await expect(closeRedis()).resolves.toBeUndefined();
});
+
+ describe('getRedisSubscriber', () => {
+ test('Not initialized', async () => {
+ await closeRedis();
+ expect(() => getRedisSubscriber()).toThrow();
+ });
+
+ test('Getting a subscriber', async () => {
+ initRedis(config.redis);
+ const subscriber = getRedisSubscriber();
+ expect(subscriber).toBeInstanceOf(Redis);
+ await closeRedis();
+ });
+
+ test('Hanging subscriber still disconnects on closeRedis', async () => {
+ initRedis(config.redis);
+ const subscriber = getRedisSubscriber();
+
+ let reject: (err: Error) => void;
+ const closePromise = new Promise((resolve, _reject) => {
+ subscriber.on('end', () => {
+ resolve();
+ });
+ reject = _reject;
+ });
+
+ expect(subscriber).toBeDefined();
+ await closeRedis();
+
+ const timer = setTimeout(() => {
+ reject(new Error('Timeout'));
+ }, 3500);
+
+ await expect(closePromise).resolves.toBeUndefined();
+ clearTimeout(timer);
+ });
+
+ test('Disconnecting a subscriber removes it from the list', async () => {
+ initRedis(config.redis);
+ expect(getRedisSubscriberCount()).toEqual(0);
+ const subscriber = getRedisSubscriber();
+ expect(getRedisSubscriberCount()).toEqual(1);
+ subscriber.disconnect();
+
+ let reject: (err: Error) => void;
+ const closePromise = new Promise((resolve, _reject) => {
+ subscriber.on('end', () => {
+ resolve();
+ });
+ reject = _reject;
+ });
+
+ expect(subscriber).toBeDefined();
+ await closeRedis();
+
+ const timer = setTimeout(() => {
+ reject(new Error('Timeout'));
+ }, 3500);
+
+ await expect(closePromise).resolves.toBeUndefined();
+ expect(getRedisSubscriberCount()).toEqual(0);
+ clearTimeout(timer);
+
+ await closeRedis();
+ });
+ });
});
diff --git a/packages/server/src/redis.ts b/packages/server/src/redis.ts
index 0ae0bbedf3..c99db6b296 100644
--- a/packages/server/src/redis.ts
+++ b/packages/server/src/redis.ts
@@ -3,6 +3,7 @@ import Redis from 'ioredis';
import { MedplumRedisConfig } from './config';
let redis: Redis | undefined = undefined;
+let redisSubscribers: Set | undefined = undefined;
export function initRedis(config: MedplumRedisConfig): void {
redis = new Redis(config);
@@ -11,15 +12,66 @@ export function initRedis(config: MedplumRedisConfig): void {
export async function closeRedis(): Promise {
if (redis) {
const tmpRedis = redis;
+ const tmpSubscribers = redisSubscribers;
redis = undefined;
+ redisSubscribers = undefined;
+ if (tmpSubscribers) {
+ for (const subscriber of tmpSubscribers) {
+ subscriber.disconnect();
+ }
+ }
await tmpRedis.quit();
await sleep(100);
}
}
-export function getRedis(): Redis {
+/**
+ * Gets the global `Redis` instance.
+ *
+ * The `duplicate` method is intentionally omitted to prevent accidental calling of `Redis.quit`
+ * which can cause the global instance to fail to shutdown gracefully later on.
+ *
+ * Instead {@link getRedisSubscriber} should be called to obtain a `Redis` instance for use as a subscriber-mode client.
+ *
+ * @returns The global `Redis` instance.
+ */
+export function getRedis(): Redis & { duplicate: never } {
if (!redis) {
throw new Error('Redis not initialized');
}
+ // @ts-expect-error We don't want anyone to call `duplicate on the redis global instance
+ // This is because we want to gracefully `quit` and duplicated Redis instances will
return redis;
}
+
+/**
+ * Gets a `Redis` instance for use in subscriber mode.
+ *
+ * The synchronous `.disconnect()` on this instance should be called instead of `.quit()` when you want to disconnect.
+ *
+ * @returns A `Redis` instance to use as a subscriber client.
+ */
+export function getRedisSubscriber(): Redis & { quit: never } {
+ if (!redis) {
+ throw new Error('Redis not initialized');
+ }
+ if (!redisSubscribers) {
+ redisSubscribers = new Set();
+ }
+
+ const subscriber = redis.duplicate();
+ redisSubscribers.add(subscriber);
+
+ subscriber.on('end', () => {
+ redisSubscribers?.delete(subscriber);
+ });
+
+ return subscriber as Redis & { quit: never };
+}
+
+/**
+ * @returns The amount of active `Redis` subscriber instances.
+ */
+export function getRedisSubscriberCount(): number {
+ return redisSubscribers?.size ?? 0;
+}
diff --git a/packages/server/src/seed.ts b/packages/server/src/seed.ts
index a63f1f1f9c..18016632e2 100644
--- a/packages/server/src/seed.ts
+++ b/packages/server/src/seed.ts
@@ -4,18 +4,28 @@ import { NIL as nullUuid, v5 } from 'uuid';
import { bcryptHashPassword } from './auth/utils';
import { getSystemRepo } from './fhir/repo';
import { globalLogger } from './logger';
+import { RebuildOptions } from './seeds/common';
import { rebuildR4SearchParameters } from './seeds/searchparameters';
import { rebuildR4StructureDefinitions } from './seeds/structuredefinitions';
import { rebuildR4ValueSets } from './seeds/valuesets';
export const r4ProjectId = v5('R4', nullUuid);
-export async function seedDatabase(): Promise {
+/**
+ * Seeds the database with system resources.
+ *
+ * @param options - Optional options for seeding the database.
+ * @returns A Promise that resolves when seeding is done.
+ */
+export async function seedDatabase(options?: RebuildOptions): Promise {
if (await isSeeded()) {
globalLogger.info('Already seeded');
return;
}
+ performance.mark('Starting to seed');
+ globalLogger.info('Seeding database...');
+
const systemRepo = getSystemRepo();
const [firstName, lastName, email] = ['Medplum', 'Admin', 'admin@example.com'];
@@ -70,9 +80,38 @@ export async function seedDatabase(): Promise {
admin: true,
});
- await rebuildR4StructureDefinitions();
- await rebuildR4ValueSets();
- await rebuildR4SearchParameters();
+ globalLogger.info('Rebuilding system resources...');
+ performance.mark('Starting rebuilds');
+
+ performance.mark('Starting rebuildR4StructureDefinitions');
+ await rebuildR4StructureDefinitions({ parallel: true, ...options });
+ const sdStats = performance.measure(
+ 'Finished rebuildR4StructureDefinitions',
+ 'Starting rebuildR4StructureDefinitions'
+ );
+ globalLogger.info('Finished rebuildR4StructureDefinitions', {
+ duration: `${Math.ceil(sdStats.duration)} ms`,
+ });
+
+ performance.mark('Starting rebuildR4ValueSets');
+ await rebuildR4ValueSets({ parallel: true, ...options });
+ const valueSetsStats = performance.measure('Finished rebuildR4ValueSets', 'Starting rebuildR4ValueSets');
+ globalLogger.info('Finished rebuildR4ValueSets', { duration: `${Math.ceil(valueSetsStats.duration)} ms` });
+
+ performance.mark('Starting rebuildR4SearchParameters');
+ await rebuildR4SearchParameters({ parallel: true, ...options });
+ const searchParamsStats = performance.measure(
+ 'Finished rebuildR4SearchParameters',
+ 'Starting rebuildR4SearchParameters'
+ );
+ globalLogger.info('Finished rebuildR4SearchParameters', {
+ duration: `${Math.ceil(searchParamsStats.duration)} ms`,
+ });
+
+ const rebuildStats = performance.measure('Finished rebuilds', 'Starting rebuilds');
+ globalLogger.info('Finished rebuilds', { duration: `${Math.ceil(rebuildStats.duration)} ms` });
+ const seedingStats = performance.measure('Finished seeding', 'Starting to seed');
+ globalLogger.info('Finished seeding', { duration: `${Math.ceil(seedingStats.duration)} ms` });
}
/**
diff --git a/packages/server/src/seeds/common.ts b/packages/server/src/seeds/common.ts
new file mode 100644
index 0000000000..6d9db22550
--- /dev/null
+++ b/packages/server/src/seeds/common.ts
@@ -0,0 +1,16 @@
+export interface RebuildOptions {
+ /**
+ * Whether the resources should be created in parallel.
+ *
+ * **WARNING: Can be CPU intensive and/or clog up the connection pool.**
+ */
+ parallel: boolean;
+}
+
+const defaultOptions = {
+ parallel: false,
+};
+
+export function buildRebuildOptions(options?: Partial): RebuildOptions {
+ return { ...defaultOptions, ...options };
+}
diff --git a/packages/server/src/seeds/searchparameters.ts b/packages/server/src/seeds/searchparameters.ts
index 90496b08ef..c19659df70 100644
--- a/packages/server/src/seeds/searchparameters.ts
+++ b/packages/server/src/seeds/searchparameters.ts
@@ -4,19 +4,32 @@ import { getDatabasePool } from '../database';
import { Repository, getSystemRepo } from '../fhir/repo';
import { globalLogger } from '../logger';
import { r4ProjectId } from '../seed';
+import { RebuildOptions, buildRebuildOptions } from './common';
/**
* Creates all SearchParameter resources.
+ * @param options - Optional options for how rebuild should be done.
*/
-export async function rebuildR4SearchParameters(): Promise {
+export async function rebuildR4SearchParameters(options?: Partial): Promise {
+ const finalOptions = buildRebuildOptions(options);
const client = getDatabasePool();
await client.query('DELETE FROM "SearchParameter" WHERE "projectId" = $1', [r4ProjectId]);
const systemRepo = getSystemRepo();
- for (const filename of SEARCH_PARAMETER_BUNDLE_FILES) {
- for (const entry of readJson(filename).entry as BundleEntry[]) {
- await createParameter(systemRepo, entry.resource as SearchParameter);
+ if (finalOptions.parallel) {
+ const promises = [];
+ for (const filename of SEARCH_PARAMETER_BUNDLE_FILES) {
+ for (const entry of readJson(filename).entry as BundleEntry[]) {
+ promises.push(createParameter(systemRepo, entry.resource as SearchParameter));
+ }
+ }
+ await Promise.all(promises);
+ } else {
+ for (const filename of SEARCH_PARAMETER_BUNDLE_FILES) {
+ for (const entry of readJson(filename).entry as BundleEntry[]) {
+ await createParameter(systemRepo, entry.resource as SearchParameter);
+ }
}
}
}
diff --git a/packages/server/src/seeds/structuredefinitions.ts b/packages/server/src/seeds/structuredefinitions.ts
index a5595d78a1..9baa37112d 100644
--- a/packages/server/src/seeds/structuredefinitions.ts
+++ b/packages/server/src/seeds/structuredefinitions.ts
@@ -4,41 +4,69 @@ import { getDatabasePool } from '../database';
import { Repository, getSystemRepo } from '../fhir/repo';
import { globalLogger } from '../logger';
import { r4ProjectId } from '../seed';
+import { RebuildOptions, buildRebuildOptions } from './common';
/**
* Creates all StructureDefinition resources.
+ * @param options - Optional options for how rebuild should be done.
*/
-export async function rebuildR4StructureDefinitions(): Promise {
+export async function rebuildR4StructureDefinitions(options?: Partial): Promise {
+ const finalOptions = buildRebuildOptions(options) as RebuildOptions;
const client = getDatabasePool();
await client.query(`DELETE FROM "StructureDefinition" WHERE "projectId" = $1`, [r4ProjectId]);
const systemRepo = getSystemRepo();
- await createStructureDefinitionsForBundle(systemRepo, readJson('fhir/r4/profiles-resources.json') as Bundle);
- await createStructureDefinitionsForBundle(systemRepo, readJson('fhir/r4/profiles-medplum.json') as Bundle);
- await createStructureDefinitionsForBundle(systemRepo, readJson('fhir/r4/profiles-others.json') as Bundle);
+ if (finalOptions.parallel) {
+ await Promise.all([
+ createStructureDefinitionsForBundleParallel(systemRepo, readJson('fhir/r4/profiles-resources.json') as Bundle),
+ createStructureDefinitionsForBundleParallel(systemRepo, readJson('fhir/r4/profiles-medplum.json') as Bundle),
+ createStructureDefinitionsForBundleParallel(systemRepo, readJson('fhir/r4/profiles-others.json') as Bundle),
+ ]);
+ } else {
+ await createStructureDefinitionsForBundleSerial(systemRepo, readJson('fhir/r4/profiles-resources.json') as Bundle);
+ await createStructureDefinitionsForBundleSerial(systemRepo, readJson('fhir/r4/profiles-medplum.json') as Bundle);
+ await createStructureDefinitionsForBundleSerial(systemRepo, readJson('fhir/r4/profiles-others.json') as Bundle);
+ }
}
-async function createStructureDefinitionsForBundle(
+async function createStructureDefinitionsForBundleParallel(
systemRepo: Repository,
structureDefinitions: Bundle
): Promise {
+ const promises = [];
for (const entry of structureDefinitions.entry as BundleEntry[]) {
const resource = entry.resource as Resource;
+ if (resource.resourceType === 'StructureDefinition' && resource.name) {
+ promises.push(createAndLogStructureDefinition(systemRepo, resource));
+ }
+ }
+ await Promise.all(promises);
+}
+async function createStructureDefinitionsForBundleSerial(
+ systemRepo: Repository,
+ structureDefinitions: Bundle
+): Promise {
+ for (const entry of structureDefinitions.entry as BundleEntry[]) {
+ const resource = entry.resource as Resource;
if (resource.resourceType === 'StructureDefinition' && resource.name) {
- globalLogger.debug('StructureDefinition: ' + resource.name);
- const result = await systemRepo.createResource({
- ...resource,
- meta: {
- ...resource.meta,
- project: r4ProjectId,
- lastUpdated: undefined,
- versionId: undefined,
- },
- text: undefined,
- differential: undefined,
- });
- globalLogger.debug('Created: ' + result.id);
+ await createAndLogStructureDefinition(systemRepo, resource);
}
}
}
+
+async function createAndLogStructureDefinition(systemRepo: Repository, resource: StructureDefinition): Promise {
+ globalLogger.debug('[StructureDefinition] creation started: ' + resource.name);
+ const result = await systemRepo.createResource({
+ ...resource,
+ meta: {
+ ...resource.meta,
+ project: r4ProjectId,
+ lastUpdated: undefined,
+ versionId: undefined,
+ },
+ text: undefined,
+ differential: undefined,
+ });
+ globalLogger.debug(`[StructureDefinition] creation finished: ${result.name} - ID: ${result.id}`);
+}
diff --git a/packages/server/src/seeds/valuesets.ts b/packages/server/src/seeds/valuesets.ts
index d6397f9e98..aa6792cccb 100644
--- a/packages/server/src/seeds/valuesets.ts
+++ b/packages/server/src/seeds/valuesets.ts
@@ -3,35 +3,54 @@ import { readJson } from '@medplum/definitions';
import { Bundle, BundleEntry, CodeSystem, ValueSet } from '@medplum/fhirtypes';
import { Repository, getSystemRepo } from '../fhir/repo';
import { r4ProjectId } from '../seed';
+import { RebuildOptions, buildRebuildOptions } from './common';
/**
* Imports all built-in ValueSets and CodeSystems into the database.
+ * @param options - Optional options for how rebuild should be done.
*/
-export async function rebuildR4ValueSets(): Promise {
+export async function rebuildR4ValueSets(options?: Partial): Promise {
+ const finalOptions = buildRebuildOptions(options) as RebuildOptions;
const systemRepo = getSystemRepo();
const files = ['v2-tables.json', 'v3-codesystems.json', 'valuesets.json', 'valuesets-medplum.json'];
for (const file of files) {
const bundle = readJson('fhir/r4/' + file) as Bundle;
- for (const entry of bundle.entry as BundleEntry[]) {
- const resource = entry.resource as CodeSystem | ValueSet;
- await deleteExisting(systemRepo, resource, r4ProjectId);
- await systemRepo.createResource({
- ...resource,
- meta: {
- ...resource.meta,
- project: r4ProjectId,
- lastUpdated: undefined,
- versionId: undefined,
- },
- });
+ if (finalOptions.parallel) {
+ const promises = [];
+ for (const entry of bundle.entry as BundleEntry[]) {
+ promises.push(overwriteResource(systemRepo, entry.resource as CodeSystem | ValueSet, finalOptions));
+ }
+ await Promise.all(promises);
+ } else {
+ for (const entry of bundle.entry as BundleEntry[]) {
+ await overwriteResource(systemRepo, entry.resource as CodeSystem | ValueSet, finalOptions);
+ }
}
}
}
+async function overwriteResource(
+ systemRepo: Repository,
+ resource: CodeSystem | ValueSet,
+ options: RebuildOptions
+): Promise {
+ await deleteExisting(systemRepo, resource, r4ProjectId, options);
+ await systemRepo.createResource({
+ ...resource,
+ meta: {
+ ...resource.meta,
+ project: r4ProjectId,
+ lastUpdated: undefined,
+ versionId: undefined,
+ },
+ });
+}
+
async function deleteExisting(
systemRepo: Repository,
resource: CodeSystem | ValueSet,
- projectId: string
+ projectId: string,
+ options: RebuildOptions
): Promise {
const bundle = await systemRepo.search({
resourceType: resource.resourceType,
@@ -41,9 +60,18 @@ async function deleteExisting(
],
});
if (bundle.entry && bundle.entry.length > 0) {
- for (const entry of bundle.entry) {
- const existing = entry.resource as CodeSystem | ValueSet;
- await systemRepo.deleteResource(existing.resourceType, existing.id as string);
+ if (options.parallel) {
+ const promises = [];
+ for (const entry of bundle.entry) {
+ const existing = entry.resource as CodeSystem | ValueSet;
+ promises.push(systemRepo.deleteResource(existing.resourceType, existing.id as string));
+ }
+ await Promise.all(promises);
+ } else {
+ for (const entry of bundle.entry) {
+ const existing = entry.resource as CodeSystem | ValueSet;
+ await systemRepo.deleteResource(existing.resourceType, existing.id as string);
+ }
}
}
}
diff --git a/packages/server/src/subscriptions/websockets.test.ts b/packages/server/src/subscriptions/websockets.test.ts
index 8b3ec68548..df23569fbb 100644
--- a/packages/server/src/subscriptions/websockets.test.ts
+++ b/packages/server/src/subscriptions/websockets.test.ts
@@ -36,7 +36,6 @@ describe('WebSockets Subscriptions', () => {
config = await loadTestConfig();
config.heartbeatEnabled = false;
server = await initApp(app, config);
- await getRedis().flushdb();
const result = await withTestContext(() =>
createTestProject({
@@ -270,7 +269,6 @@ describe('Subscription Heartbeat', () => {
config = await loadTestConfig();
config.heartbeatMilliseconds = 25;
server = await initApp(app, config);
- await getRedis().flushdb();
const result = await withTestContext(() =>
createTestProject({
diff --git a/packages/server/src/subscriptions/websockets.ts b/packages/server/src/subscriptions/websockets.ts
index 0a07686fd7..910499cb58 100644
--- a/packages/server/src/subscriptions/websockets.ts
+++ b/packages/server/src/subscriptions/websockets.ts
@@ -10,7 +10,7 @@ import { getFullUrl } from '../fhir/response';
import { heartbeat } from '../heartbeat';
import { globalLogger } from '../logger';
import { verifyJwt } from '../oauth/keys';
-import { getRedis } from '../redis';
+import { getRedis, getRedisSubscriber } from '../redis';
interface BaseSubscriptionClientMsg {
type: string;
@@ -44,7 +44,7 @@ export async function handleR4SubscriptionConnection(socket: ws.WebSocket): Prom
// According to Redis documentation: http://redis.io/commands/subscribe
// Once the client enters the subscribed state it is not supposed to issue any other commands,
// except for additional SUBSCRIBE, PSUBSCRIBE, UNSUBSCRIBE and PUNSUBSCRIBE commands.
- redisSubscriber = redis.duplicate();
+ redisSubscriber = getRedisSubscriber();
redisSubscriber.on('message', (channel: string, message: string) => {
globalLogger.debug('[WS] redis message', { channel, message });
diff --git a/packages/server/src/util/cloudwatch.test.ts b/packages/server/src/util/cloudwatch.test.ts
index a902d86208..4c0ed10381 100644
--- a/packages/server/src/util/cloudwatch.test.ts
+++ b/packages/server/src/util/cloudwatch.test.ts
@@ -4,9 +4,8 @@ import {
CreateLogStreamCommand,
PutLogEventsCommand,
} from '@aws-sdk/client-cloudwatch-logs';
-import { mockClient, AwsClientStub } from 'aws-sdk-client-mock';
+import { AwsClientStub, mockClient } from 'aws-sdk-client-mock';
import 'aws-sdk-client-mock-jest';
-
import { loadTestConfig } from '../config';
import { waitFor } from '../test.setup';
import { CloudWatchLogger } from './cloudwatch';
diff --git a/packages/server/src/websockets.test.ts b/packages/server/src/websockets.test.ts
index e3f7333b29..cfbaa8f15f 100644
--- a/packages/server/src/websockets.test.ts
+++ b/packages/server/src/websockets.test.ts
@@ -6,7 +6,6 @@ import request from 'superwstest';
import WebSocket from 'ws';
import { initApp, shutdownApp } from './app';
import { MedplumServerConfig, loadTestConfig } from './config';
-import { getRedis } from './redis';
import { withTestContext } from './test.setup';
describe('WebSockets', () => {
@@ -18,7 +17,6 @@ describe('WebSockets', () => {
app = express();
config = await loadTestConfig();
server = await initApp(app, config);
- await getRedis().flushdb();
await new Promise((resolve) => {
server.listen(0, 'localhost', 511, resolve);
diff --git a/packages/server/src/websockets.ts b/packages/server/src/websockets.ts
index 090fddaa2b..a3f528c495 100644
--- a/packages/server/src/websockets.ts
+++ b/packages/server/src/websockets.ts
@@ -8,7 +8,7 @@ import { getConfig } from './config';
import { RequestContext, requestContextStore } from './context';
import { handleFhircastConnection } from './fhircast/websocket';
import { globalLogger } from './logger';
-import { getRedis } from './redis';
+import { getRedis, getRedisSubscriber } from './redis';
import { handleR4SubscriptionConnection } from './subscriptions/websockets';
const handlerMap = new Map Promise>();
@@ -104,7 +104,7 @@ async function handleEchoConnection(socket: ws.WebSocket): Promise {
// According to Redis documentation: http://redis.io/commands/subscribe
// Once the client enters the subscribed state it is not supposed to issue any other commands,
// except for additional SUBSCRIBE, PSUBSCRIBE, UNSUBSCRIBE and PUNSUBSCRIBE commands.
- const redisSubscriber = getRedis().duplicate();
+ const redisSubscriber = getRedisSubscriber();
const channel = randomUUID();
await redisSubscriber.subscribe(channel);
diff --git a/packages/server/src/workers/subscription.test.ts b/packages/server/src/workers/subscription.test.ts
index c15139c2d7..8b726802f8 100644
--- a/packages/server/src/workers/subscription.test.ts
+++ b/packages/server/src/workers/subscription.test.ts
@@ -23,10 +23,9 @@ import { createHmac, randomUUID } from 'crypto';
import fetch from 'node-fetch';
import { initAppServices, shutdownApp } from '../app';
import { loadTestConfig } from '../config';
-import { getDatabasePool } from '../database';
import { Repository, getSystemRepo } from '../fhir/repo';
import { globalLogger } from '../logger';
-import { getRedis } from '../redis';
+import { getRedisSubscriber } from '../redis';
import { createTestProject, withTestContext } from '../test.setup';
import { AuditEventOutcome } from '../util/auditevent';
import { closeSubscriptionWorker, execSubscriptionJob, getSubscriptionQueue } from './subscription';
@@ -40,28 +39,18 @@ describe('Subscription Worker', () => {
let mockLambdaClient: AwsClientStub;
let superAdminRepo: Repository;
- beforeEach(() => {
- mockLambdaClient = mockClient(LambdaClient);
- mockLambdaClient.on(InvokeCommand).callsFake(({ Payload }) => {
- const decoder = new TextDecoder();
- const event = JSON.parse(decoder.decode(Payload));
- const output = typeof event.input === 'string' ? event.input : JSON.stringify(event.input);
- const encoder = new TextEncoder();
-
- return {
- LogResult: `U1RBUlQgUmVxdWVzdElkOiAxNDZmY2ZjZi1jMzJiLTQzZjUtODJhNi1lZTBmMzEzMmQ4NzMgVmVyc2lvbjogJExBVEVTVAoyMDIyLTA1LTMwVDE2OjEyOjIyLjY4NVoJMTQ2ZmNmY2YtYzMyYi00M2Y1LTgyYTYtZWUwZjMxMzJkODczCUlORk8gdGVzdApFTkQgUmVxdWVzdElkOiAxNDZmY2ZjZi1jMzJiLTQzZjUtODJhNi1lZTBmMzEzMmQ4NzMKUkVQT1JUIFJlcXVlc3RJZDogMTQ2ZmNmY2YtYzMyYi00M2Y1LTgyYTYtZWUwZjMxMzJkODcz`,
- Payload: encoder.encode(output),
- };
- });
+ beforeAll(async () => {
+ const config = await loadTestConfig();
+ await initAppServices(config);
});
- afterEach(() => {
- mockLambdaClient.restore();
+ afterAll(async () => {
+ await shutdownApp();
+ await closeSubscriptionWorker(); // Double close to ensure quite ignore
});
- beforeAll(async () => {
- const config = await loadTestConfig();
- await initAppServices(config);
+ beforeEach(async () => {
+ (fetch as unknown as jest.Mock).mockClear();
// Create one simple project with no advanced features enabled
const { client, repo: _repo } = await withTestContext(() =>
@@ -85,16 +74,23 @@ describe('Subscription Worker', () => {
projects: [botProjectDetails.project.id as string],
author: createReference(botProjectDetails.client),
});
- });
- afterAll(async () => {
- await shutdownApp();
- await closeSubscriptionWorker(); // Double close to ensure quite ignore
+ mockLambdaClient = mockClient(LambdaClient);
+ mockLambdaClient.on(InvokeCommand).callsFake(({ Payload }) => {
+ const decoder = new TextDecoder();
+ const event = JSON.parse(decoder.decode(Payload));
+ const output = typeof event.input === 'string' ? event.input : JSON.stringify(event.input);
+ const encoder = new TextEncoder();
+
+ return {
+ LogResult: `U1RBUlQgUmVxdWVzdElkOiAxNDZmY2ZjZi1jMzJiLTQzZjUtODJhNi1lZTBmMzEzMmQ4NzMgVmVyc2lvbjogJExBVEVTVAoyMDIyLTA1LTMwVDE2OjEyOjIyLjY4NVoJMTQ2ZmNmY2YtYzMyYi00M2Y1LTgyYTYtZWUwZjMxMzJkODczCUlORk8gdGVzdApFTkQgUmVxdWVzdElkOiAxNDZmY2ZjZi1jMzJiLTQzZjUtODJhNi1lZTBmMzEzMmQ4NzMKUkVQT1JUIFJlcXVlc3RJZDogMTQ2ZmNmY2YtYzMyYi00M2Y1LTgyYTYtZWUwZjMxMzJkODcz`,
+ Payload: encoder.encode(output),
+ };
+ });
});
- beforeEach(async () => {
- await getDatabasePool().query('DELETE FROM "Subscription"');
- (fetch as unknown as jest.Mock).mockClear();
+ afterEach(() => {
+ mockLambdaClient.restore();
});
test('Send subscriptions', () =>
@@ -1517,7 +1513,7 @@ describe('Subscription Worker', () => {
expect(subscription.id).toBeDefined();
// Subscribe to the topic
- const subscriber = getRedis().duplicate();
+ const subscriber = getRedisSubscriber();
await subscriber.subscribe(subscription.id as string);
let resolve: () => void;
@@ -1564,6 +1560,7 @@ describe('Subscription Worker', () => {
expect(queue.add).toHaveBeenCalled();
await deferredPromise;
+ // @ts-expect-error Okay to await quit in tests
await subscriber.quit();
}));
@@ -1591,7 +1588,7 @@ describe('Subscription Worker', () => {
expect(subscription.id).toBeDefined();
// Subscribe to the topic
- const subscriber = getRedis().duplicate();
+ const subscriber = getRedisSubscriber();
await subscriber.subscribe(subscription.id as string);
let resolve: () => void;
@@ -1622,6 +1619,7 @@ describe('Subscription Worker', () => {
}, 150);
await deferredPromise;
+ // @ts-expect-error Okay to await quit in tests
await subscriber.quit();
expect(console.log).toHaveBeenLastCalledWith(expect.stringMatching(/WebSocket Subscriptions/));
@@ -1670,7 +1668,7 @@ describe('Subscription Worker', () => {
expect(subscription.id).toBeDefined();
// Subscribe to the topic
- const subscriber = getRedis().duplicate();
+ const subscriber = getRedisSubscriber();
await subscriber.subscribe(subscription.id as string);
let resolve: () => void;
@@ -1698,6 +1696,7 @@ describe('Subscription Worker', () => {
setTimeout(() => resolve(), 300);
await deferredPromise;
+ // @ts-expect-error Okay to await quit in tests
await subscriber.quit();
expect(console.log).toHaveBeenCalledWith(
@@ -1748,7 +1747,7 @@ describe('Subscription Worker', () => {
await superAdminRepo.deleteResource('ProjectMembership', membership.id as string);
// Subscribe to the topic
- const subscriber = getRedis().duplicate();
+ const subscriber = getRedisSubscriber();
await subscriber.subscribe(subscription.id as string);
let resolve: () => void;
@@ -1776,6 +1775,7 @@ describe('Subscription Worker', () => {
setTimeout(() => resolve(), 300);
await deferredPromise;
+ // @ts-expect-error Okay to await quit in tests
await subscriber.quit();
expect(console.log).toHaveBeenCalledWith(
@@ -1826,7 +1826,7 @@ describe('Subscription Worker', () => {
await superAdminRepo.deleteResource('AccessPolicy', accessPolicy.id as string);
// Subscribe to the topic
- const subscriber = getRedis().duplicate();
+ const subscriber = getRedisSubscriber();
await subscriber.subscribe(subscription.id as string);
let resolve: () => void;
@@ -1854,6 +1854,7 @@ describe('Subscription Worker', () => {
setTimeout(() => resolve(), 300);
await deferredPromise;
+ // @ts-expect-error Okay to await quit in test
await subscriber.quit();
expect(console.log).toHaveBeenCalledWith(
diff --git a/packages/server/tsconfig.json b/packages/server/tsconfig.json
index a5cb75c562..e53cd271e9 100644
--- a/packages/server/tsconfig.json
+++ b/packages/server/tsconfig.json
@@ -1,4 +1,4 @@
{
"extends": "../../tsconfig.json",
- "include": ["src/**/*.ts"]
+ "include": ["src/**/*.ts", "seed-tests/seed-serial.test.ts", "seed-tests/seed.test.ts"]
}
diff --git a/packages/server/turbo.json b/packages/server/turbo.json
new file mode 100644
index 0000000000..d00fb4125e
--- /dev/null
+++ b/packages/server/turbo.json
@@ -0,0 +1,16 @@
+{
+ "$schema": "https://turborepo.org/schema.json",
+ "extends": ["//"],
+ "pipeline": {
+ "test:seed:serial": {
+ "dependsOn": ["build"],
+ "outputs": ["coverage/**"],
+ "inputs": ["src/**/*.tsx", "src/**/*.ts"]
+ },
+ "test:seed:parallel": {
+ "dependsOn": ["build"],
+ "outputs": ["coverage/**"],
+ "inputs": ["src/**/*.tsx", "src/**/*.ts"]
+ }
+ }
+}
diff --git a/scripts/reinstall.sh b/scripts/reinstall.sh
index 6c735d4ccf..27abc9e3f6 100755
--- a/scripts/reinstall.sh
+++ b/scripts/reinstall.sh
@@ -7,7 +7,6 @@ set -e
set -x
rm -rf node_modules
-rm -rf package-lock.json
for dir in `ls packages`; do
if test -d "packages/$dir/node_modules"; then
@@ -21,4 +20,10 @@ for dir in `ls examples`; do
fi
done
-npm i --strict-peer-deps
+# If called with "--update", then use npm i
+if [ "$1" == "--update" ]; then
+ rm -rf package-lock.json
+ npm i --strict-peer-deps
+else
+ npm ci --strict-peer-deps
+fi
diff --git a/scripts/test.sh b/scripts/test.sh
index 26c162705d..c338b04f6e 100755
--- a/scripts/test.sh
+++ b/scripts/test.sh
@@ -7,7 +7,29 @@ set -e
set -x
# Set node options
-export NODE_OPTIONS='--max-old-space-size=5120'
+export NODE_OPTIONS='--max-old-space-size=8192'
+
+# Clear old code coverage data
+rm -rf coverage
+rm -rf coverage-seed
+mkdir -p coverage/packages
+mkdir -p coverage/combined
+mkdir -p coverage-seed/serial
+mkdir -p coverage-seed/parallel
+
+# Testing production path of seeding the database
+# This is a special "test" which runs all of the seed logic, such as setting up structure definitions
+# On a normal developer machine, this is run only rarely when setting up a new database
+# We execute this in parallel with the main line of tests
+{
+ time npx turbo run test:seed:serial --filter=./packages/server -- --coverage
+ cp "packages/server/coverage-seed/serial/coverage-final.json" "coverage/packages/coverage-server-seed-serial.json"
+} &
+
+# Seed the database before testing
+# This is the parallel implementation so it's faster
+time npx turbo run test:seed:parallel --filter=./packages/server -- --coverage
+cp "packages/server/coverage-seed/parallel/coverage-final.json" "coverage/packages/coverage-server-seed-parallel.json"
# Test
# Run them separately because code coverage is resource intensive
@@ -24,12 +46,9 @@ for dir in `ls examples`; do
fi
done
+wait
# Combine test coverage
-rm -rf coverage
-mkdir -p coverage/packages
-mkdir -p coverage/combined
-
PACKAGES=(
"agent"
"app"
diff --git a/scripts/upgrade.sh b/scripts/upgrade.sh
index 782cd3492a..71f7d50732 100755
--- a/scripts/upgrade.sh
+++ b/scripts/upgrade.sh
@@ -40,7 +40,7 @@ git push origin "$BRANCH_NAME"
gh pr create --title "Dependency upgrades $DATE" --body "Dependency upgrades" --draft
# Reinstall all dependencies
-./scripts/reinstall.sh
+./scripts/reinstall.sh --update
# Commit and push after running NPM install
git add -u .
diff --git a/sonar-project.properties b/sonar-project.properties
index 7c9e806049..b0bd407782 100644
--- a/sonar-project.properties
+++ b/sonar-project.properties
@@ -1,7 +1,7 @@
sonar.organization=medplum
sonar.projectKey=medplum_medplum
sonar.projectName=Medplum
-sonar.projectVersion=3.1.2
+sonar.projectVersion=3.1.3
sonar.sources=packages
sonar.sourceEncoding=UTF-8
sonar.exclusions=**/node_modules/**,\