Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.apache.avro.generic.IndexedRecord;
import org.apache.commons.collections4.CollectionUtils;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.jspecify.annotations.Nullable;

/** Converter to change HAPI objects into Avro structures and vice versa. */
public class AvroConverter {
Expand Down Expand Up @@ -74,6 +75,7 @@ private static AvroConverter visitResource(
RuntimeResourceDefinition[] resources =
new RuntimeResourceDefinition[1 + containedResourceTypeUrls.size()];

Preconditions.checkNotNull(converter.getElementType(), "Converter must have an element type");
resources[0] = context.getResourceDefinition(converter.getElementType());

for (int i = 0; i < containedResourceTypeUrls.size(); i++) {
Expand Down Expand Up @@ -210,6 +212,7 @@ public static AvroConverter forResource(
* @param resource the FHIR resource
* @return the record.
*/
@Nullable
public IndexedRecord resourceToAvro(IBaseResource resource) {

return (IndexedRecord) hapiToAvroConverter.fromHapi(resource);
Expand All @@ -221,6 +224,7 @@ public IndexedRecord resourceToAvro(IBaseResource resource) {
* @param record the record
* @return the FHIR resource.
*/
@Nullable
public IBaseResource avroToResource(IndexedRecord record) {

return (IBaseResource) avroToHapiConverter.toHapi(record);
Expand All @@ -241,6 +245,7 @@ public Schema getSchema() {
*
* @return the FHIR type of the resource being converted.
*/
@Nullable
public String getResourceType() {
return hapiToAvroConverter.getElementType();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.jspecify.annotations.Nullable;

public class DefinitionToAvroVisitor implements DefinitionVisitor<HapiConverter<Schema>> {

Expand Down Expand Up @@ -170,19 +171,19 @@ private static class CompositeToAvroConverter extends HapiCompositeConverter<Sch
private final GenericData avroData = SpecificData.get();

CompositeToAvroConverter(
String elementType,
@Nullable String elementType,
List<StructureField<HapiConverter<Schema>>> children,
Schema structType,
FhirConversionSupport fhirSupport) {
this(elementType, children, structType, fhirSupport, null);
}

CompositeToAvroConverter(
String elementType,
@Nullable String elementType,
List<StructureField<HapiConverter<Schema>>> children,
Schema structType,
FhirConversionSupport fhirSupport,
String extensionUrl) {
@Nullable String extensionUrl) {

super(elementType, children, structType, fhirSupport, extensionUrl);
}
Expand Down Expand Up @@ -629,6 +630,7 @@ private static class NoOpFieldSetter implements HapiFieldSetter, HapiObjectConve
public void setField(
IBase parentObject, BaseRuntimeChildDefinition fieldToSet, Object sparkObject) {}

@Nullable
@Override
public IBase toHapi(Object input) {
return null;
Expand All @@ -646,6 +648,7 @@ private static class RelativeValueConverter extends HapiConverter<Schema> {
this.prefix = prefix;
}

@Nullable
@Override
public Object fromHapi(Object input) {
String uri = ((IPrimitiveType) input).getValueAsString();
Expand Down Expand Up @@ -763,11 +766,6 @@ public HapiConverter<Schema> visitParentExtension(
String extensionUrl,
List<StructureField<HapiConverter<Schema>>> children) {

// Ignore extension fields that don't have declared content for now.
if (children.isEmpty()) {
return null;
}

String recordNamespace = DefinitionVisitorsUtil.namespaceFor(basePackage, extensionUrl);

String localPart = extensionUrl.substring(extensionUrl.lastIndexOf('/') + 1);
Expand Down Expand Up @@ -850,13 +848,13 @@ public int getMaxDepth(String elementTypeUrl, String path) {
}

private static HapiCompositeConverter createCompositeConverter(
String elementType,
@Nullable String elementType,
String recordName,
String doc,
String namespace,
List<StructureField<HapiConverter<Schema>>> children,
FhirConversionSupport fhirSupport,
String extensionUrl) {
@Nullable String extensionUrl) {

List<Field> fields =
children.stream()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,11 @@
import org.apache.avro.Schema;
import org.apache.avro.Schema.Type;
import org.hl7.fhir.instance.model.api.IBase;
import org.jspecify.annotations.Nullable;

public class NoOpConverter extends HapiConverter<Schema> {

@Nullable
@Override
public Object fromHapi(Object input) {
return null;
Expand All @@ -26,6 +28,7 @@ private static class FieldSetter implements HapiFieldSetter, HapiObjectConverter
@Override
public void setField(IBase parentObject, BaseRuntimeChildDefinition fieldToSet, Object value) {}

@Nullable
@Override
public IBase toHapi(Object input) {
return null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import java.util.Map;
import org.apache.avro.Schema;
import org.apache.avro.SchemaFormatter;
import org.jspecify.annotations.Nullable;

/** This class can be used to generate aggregated avro schemas for the FHIR profile extensions. */
public class GenerateAggregatedSchemas {
Expand Down Expand Up @@ -75,10 +76,10 @@ private static Map<String, String> convertArgsToPairs(String[] args) {
}

private static void generateAggregatedSchemas(
FhirVersionEnum fhirVersionEnum,
String structureDefinitionsPath,
List<String> resourceTypes,
String outputDir)
@Nullable FhirVersionEnum fhirVersionEnum,
@Nullable String structureDefinitionsPath,
@Nullable List<String> resourceTypes,
@Nullable String outputDir)
throws ProfileException, IOException {
Preconditions.checkNotNull(fhirVersionEnum, "%s cannot be empty", FHIR_VERSION);
Preconditions.checkNotNull(
Expand All @@ -95,6 +96,15 @@ private static void generateAggregatedSchemas(
List<String> resourceTypeURLs =
ProfileMapperFhirContexts.getInstance()
.getMappedProfilesForResource(FhirVersionEnum.R4, resourceType);
if (resourceTypeURLs == null || resourceTypeURLs.isEmpty()) {
System.out.printf(
"No profiles found for resourceType=%s, skipping schema generation for this"
+ " resourceType%n",
resourceType);
continue; // TODO confirm if we need to throw a new ProfileException exception here instead
// of skipping
}

AvroConverter aggregatedConverter =
AvroConverter.forResources(fhirContext, resourceTypeURLs, 1);
createOutputFile(resourceType, aggregatedConverter.getSchema(), outputDir);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -115,8 +115,10 @@ public void validateR4UsCoreResourceWithExtension() throws ProfileException, IOE
Patient patient =
(Patient)
loadResource(fhirContext, "/r4-us-core-resources/patient_us_core.json", Patient.class);
IndexedRecord avroRecord = patientConverter.resourceToAvro(patient);
Patient patientDecoded = (Patient) patientConverter.avroToResource(avroRecord);
IndexedRecord patientRecord = patientConverter.resourceToAvro(patient);
Assert.assertNotNull(patientRecord);
Patient patientDecoded = (Patient) patientConverter.avroToResource(patientRecord);
Assert.assertNotNull(patientDecoded);
Assert.assertTrue(
patient.equalsDeep(
(Base) TestUtil.encodeThenParse(patientDecoded, Patient.class, fhirContext)));
Expand All @@ -137,9 +139,11 @@ public void validateMergedStu3UsCoreResourceWithExtensions()
fhirContext,
"/stu3-us-core-resources/patient_us_core.json",
org.hl7.fhir.dstu3.model.Patient.class);
IndexedRecord avroRecord = patientConverter.resourceToAvro(patient);
IndexedRecord patientRecord = patientConverter.resourceToAvro(patient);
Assert.assertNotNull(patientRecord);
org.hl7.fhir.dstu3.model.Patient patientDecoded =
(org.hl7.fhir.dstu3.model.Patient) patientConverter.avroToResource(avroRecord);
(org.hl7.fhir.dstu3.model.Patient) patientConverter.avroToResource(patientRecord);
Assert.assertNotNull(patientDecoded);
patientDecoded.setId(patient.getId());
// TODO : The text field is not properly copied to the decoded object back, hence manually
// copying it, check here for details https://github.com/google/fhir-data-pipes/issues/1014
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@
* This class tests the user-defined structure definitions which should also follow the HL7 FHIR
* specifications. These are additional test cases beyond the regular US Core Profiles tests.
*/

// Suppressing NullAway warnings for test code
@SuppressWarnings("NullAway")
public class R4AvroConverterCustomProfileTest {

private static final Patient testBunsenTestProfilePatient =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,8 @@
import org.junit.Test;

// TODO refactor the shared code with `Stu3AvroConverterUsCoreTest`.
// Suppressing NullAway warnings for test code
@SuppressWarnings("NullAway")
public class R4AvroConverterUsCoreTest {

private static final Observation testObservation = TestData.newObservation();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@
* <p>TODO: Refactor this and the R43AvroConverterCustomProfileTest.java to move the duplicate code
* into a common class and add only relevant cases here
*/
// Suppressing NullAway warnings for test code
@SuppressWarnings("NullAway")
public class Stu3AvroConverterCustomProfileTest {

private static final Patient testBunsenTestProfilePatient =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@
import org.junit.BeforeClass;
import org.junit.Test;

// Suppressing NullAway warnings for test code
@SuppressWarnings("NullAway")
public class Stu3AvroConverterUsCoreTest {

private static final Observation testObservation = TestData.newObservation();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ public Map<String, List> compositeValues(IBase composite) {

if (children == null) {

return null;
return Map.of();
} else {

// Some FHIR resources produce duplicate properties in the children,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import org.hl7.fhir.r4.model.ElementDefinition;
import org.hl7.fhir.r4.model.StructureDefinition;
import org.jspecify.annotations.NonNull;
import org.jspecify.annotations.Nullable;

// TODO: A significant part of this is similar to Stu3StructureDefinitions which we should refactor.
// This is non-trivial because FHIR ElementDefinition objects do not share the same interface for
Expand Down Expand Up @@ -134,6 +135,7 @@ public boolean getIsModifier() {
}

@Override
@Nullable
public String getFixedPrimitiveValue() {
if (elementDefinition.getFixed() == null) {
return null;
Expand All @@ -156,6 +158,7 @@ public List<String> getReferenceTargetProfiles() {
}

@Override
@Nullable
public String getFirstTypeProfile() {
List<CanonicalType> profiles = elementDefinition.getTypeFirstRep().getProfile();
if (profiles == null || profiles.isEmpty()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ public Map<String, List> compositeValues(IBase composite) {

if (children == null) {

return null;
return Map.of();
} else {

// Some FHIR resources produce duplicate properties in the children,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import org.hl7.fhir.dstu3.model.StructureDefinition;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.jspecify.annotations.NonNull;
import org.jspecify.annotations.Nullable;

/** {@link StructureDefinitions} implementation for FHIR STU3. */
public class Stu3StructureDefinitions extends StructureDefinitions {
Expand Down Expand Up @@ -127,6 +128,7 @@ public boolean getIsModifier() {
return elementDefinition.getIsModifier();
}

@Nullable
@Override
public String getFixedPrimitiveValue() {
if (elementDefinition.getFixed() == null) {
Expand All @@ -144,6 +146,7 @@ public List<String> getReferenceTargetProfiles() {
.collect(Collectors.toList());
}

@Nullable
@Override
public String getFirstTypeProfile() {
return elementDefinition.getTypeFirstRep().getProfile();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,14 @@ public class ProfileMapperFhirContexts {
private static final Logger logger =
LoggerFactory.getLogger(ProfileMapperFhirContexts.class.getName());
private final Map<FhirVersionEnum, FhirContextData> fhirContextMappings;

// This is initialized/used via the singleton pattern getInstance method.
@SuppressWarnings("NullAway.Init")
private static ProfileMapperFhirContexts instance;
private ProfileMappingProvider profileMappingProvider;

// This is initialized in the private constructor.
@SuppressWarnings("NullAway")
private final ProfileMappingProvider profileMappingProvider;

private ProfileMapperFhirContexts() {
this.fhirContextMappings = Maps.newHashMap();
Expand Down Expand Up @@ -111,6 +117,7 @@ private Map<String, List<String>> loadStructureDefinitions(
* @return the list of profile urls
* @throws ProfileException if the FhirContext is not initialised for the given fhirVersion
*/
@Nullable
public List<String> getMappedProfilesForResource(FhirVersionEnum fhirVersion, String resourceType)
throws ProfileException {
FhirContextData fhirContextData = fhirContextMappings.get(fhirVersion);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,16 +94,19 @@ private Map<String, List<String>> loadBaseStructureDefinitions(
Map<String, List<String>> resourceProfileMap = new HashMap<>();
List<IBaseResource> defaultDefinitions =
context.getValidationSupport().fetchAllStructureDefinitions();
for (IBaseResource definition : defaultDefinitions) {
support.addStructureDefinition(definition);
// Links the profile only if the definition belongs to a base resource. The default
// definitions loaded could be a StructureDefinition, Extension element, CapabilityStatement,
// ValueSet etc., hence this check is necessary.
if (isABaseResource(context, definition)) {
RuntimeResourceDefinition resourceDefinition = context.getResourceDefinition(definition);
String type = fetchProperty("type", resourceDefinition, definition);
String url = fetchProperty("url", resourceDefinition, definition);
resourceProfileMap.computeIfAbsent(type, list -> new ArrayList<>()).add(url);
if (defaultDefinitions != null) {
for (IBaseResource definition : defaultDefinitions) {
support.addStructureDefinition(definition);
// Links the profile only if the definition belongs to a base resource. The default
// definitions loaded could be a StructureDefinition, Extension element,
// CapabilityStatement,
// ValueSet etc., hence this check is necessary.
if (isABaseResource(context, definition)) {
RuntimeResourceDefinition resourceDefinition = context.getResourceDefinition(definition);
String type = fetchProperty("type", resourceDefinition, definition);
String url = fetchProperty("url", resourceDefinition, definition);
resourceProfileMap.computeIfAbsent(type, list -> new ArrayList<>()).add(url);
}
}
}
context.setValidationSupport(support);
Expand Down Expand Up @@ -235,6 +238,7 @@ private static IBaseResource getResource(IParser jsonParser, InputStream inputSt
}
}

@Nullable
private String fetchProperty(
String property, RuntimeResourceDefinition resourceDefinition, IBaseResource definition) {
Optional<IBase> propertyValue =
Expand All @@ -253,8 +257,8 @@ private boolean isABaseResource(FhirContext fhirContext, IBaseResource definitio
String type = fetchProperty("type", resourceDefinition, definition);
String baseDefinition = fetchProperty("baseDefinition", resourceDefinition, definition);
if (fhirContext.getResourceTypes().contains(type)
&& baseDefinition.equalsIgnoreCase(
"http://hl7.org/fhir/StructureDefinition/DomainResource")) {
&& "http://hl7.org/fhir/StructureDefinition/DomainResource"
.equalsIgnoreCase(baseDefinition)) {
return true;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ public static String recordNameFor(String elementPath) {
.splitToStream(elementPath)
.map(StringUtils::capitalize)
.reduce(String::concat)
.get();
.orElse("");
}

/**
Expand Down
Loading
Loading