Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add reindex of branch-review to elasticsearch upgrade guide #631

Open
wants to merge 14 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
133 changes: 132 additions & 1 deletion docs/elasticsearch8-upgrade.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ POST codesystem-version/_update_by_query
}
```

### Option 2 - Reindex codesystem-version to change the field type of importDate to long
### Option 2 - Reindex codesystem-version and branch-review to change the field type of importDate and lastUpdated to long

#### In Kibana create a new index for codesystem-version-tmp with updated mapping
```
Expand Down Expand Up @@ -135,6 +135,137 @@ POST _reindex
```
DELETE codesystem-version-tmp
```

#### Create a new index for branch-review-tmp with updated mapping
```
PUT branch-review-tmp
{
"mappings":{
"properties":{
"_class":{
"type":"text",
"fields":{
"keyword":{
"type":"keyword",
"ignore_above":256
}
}
},
"changedConcepts":{
"type":"long"
},
"id":{
"type":"keyword"
},
"lastUpdated":{
"type":"long"
},
"source":{
"type":"nested",
"properties":{
"baseTimestamp":{
"type":"long"
},
"headTimestamp":{
"type":"long"
},
"path":{
"type":"text",
"fields":{
"keyword":{
"type":"keyword",
"ignore_above":256
}
}
}
}
},
"sourceIsParent":{
"type":"boolean"
},
"status":{
"type":"keyword"
},
"target":{
"type":"nested",
"properties":{
"baseTimestamp":{
"type":"long"
},
"headTimestamp":{
"type":"long"
},
"path":{
"type":"text",
"fields":{
"keyword":{
"type":"keyword",
"ignore_above":256
}
}
}
}
}
}
}
}
```

#### Reindex branch-review to branch-review-tmp
```
POST _reindex
{
"source": {
"index": "branch-review"
},
"dest": {
"index": "branch-review-tmp"
},
"script": {
"source": """
if (ctx._source.containsKey('lastUpdated')) {
def value = ctx._source['lastUpdated'];
// Try parsing the value as a date and convert to millis
try {
if (value instanceof String) {
// Convert date to milliseconds
ZonedDateTime zdt = ZonedDateTime.parse(value);
long milliSinceEpoch = zdt.toInstant().toEpochMilli();
ctx._source.lastUpdated = milliSinceEpoch;
}
} catch (Exception e) {
// If parsing fails, handle the failure (e.g., log an error, set a default value)
ctx._source.lastUpdated = 1000; // Set a default value for debug, adjust as needed
}
}
""",
"lang": "painless"
}
}
```

#### Delete branch-review
```
DELETE branch-review
```

#### Reindex branch-review-tmp back to the original index name.
```
POST _reindex
{
"source": {
"index": "branch-review-tmp"
},
"dest": {
"index": "branch-review"
}
}
```

#### Delete branch-review-tmp
```
DELETE branch-review-tmp
```
Note: You can use curl for above operations if you don't have Kibana installed. See more details on [Reindex API](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-reindex.html)

## Step two - Upgrade cluster to Elasticsearch 8
Expand Down
18 changes: 16 additions & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,17 @@
<description>SNOMED CT Terminology Server Using Elasticsearch</description>

<artifactId>snowstorm</artifactId>
<version>10.6.1</version>
<version>10.7.0-SNAPSHOT</version>
<parent>
<groupId>org.snomed</groupId>
<artifactId>snomed-parent-bom</artifactId>
<version>3.7.1</version>
<version>3.9.0-SNAPSHOT</version>
</parent>

<properties>
<packageName>${project.artifactId}</packageName>
<!-- BOM Override -->
<jena_version>4.9.0</jena_version>
<!--
Current Elasticsearch _Server_ version is 8.11.1
N.B. Remember to keep TestConfig.ELASTIC_SEARCH_SERVER_VERSION and getting-started guide updated.
Expand All @@ -27,6 +29,18 @@
</properties>

<dependencies>
<!-- BOM Override -->
<dependency>
<groupId>org.apache.jena</groupId>
<artifactId>jena-core</artifactId>
<version>${jena_version}</version>
</dependency>
<dependency>
<groupId>org.apache.jena</groupId>
<artifactId>jena-arq</artifactId>
<version>${jena_version}</version>
</dependency>

<dependency>
<!-- Branching and Version Control on top of Elasticsearch -->
<groupId>io.kaicode</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ public class Concepts {
public static final String CORE_MODULE = "900000000000207008";
public static final String MODEL_MODULE = "900000000000012004";
public static final String ICD10_MODULE = "449080006";
public static final String ICD11_MODULE = "1204363008";
public static final String COMMON_FRENCH_MODULE = "11000241103";
public static final String MODULE = "900000000000443000";

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import io.kaicode.elasticvc.api.PathUtil;
import io.kaicode.elasticvc.domain.Branch;
import io.kaicode.elasticvc.domain.Metadata;
import org.apache.activemq.command.ActiveMQTopic;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.snomed.snowstorm.core.data.domain.CodeSystem;
Expand All @@ -14,6 +15,7 @@
import org.snomed.snowstorm.dailybuild.DailyBuildService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.jms.core.JmsTemplate;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.security.core.context.SecurityContext;
import org.springframework.security.core.context.SecurityContextHolder;
Expand All @@ -24,7 +26,8 @@
import java.util.concurrent.ExecutorService;

import static org.snomed.snowstorm.core.data.services.BranchMetadataHelper.INTERNAL_METADATA_KEY;
import static org.snomed.snowstorm.core.data.services.BranchMetadataKeys.*;
import static org.snomed.snowstorm.core.data.services.BranchMetadataKeys.DEPENDENCY_PACKAGE;
import static org.snomed.snowstorm.core.data.services.BranchMetadataKeys.DEPENDENCY_RELEASE;

@Service
public class CodeSystemUpgradeService {
Expand Down Expand Up @@ -55,9 +58,18 @@ public class CodeSystemUpgradeService {
@Autowired
private ExecutorService executorService;

@Autowired
private JmsTemplate jmsTemplate;

@Value("${snowstorm.rest-api.readonly}")
private boolean isReadOnly;

@Value("${snowstorm.codesystem-version.message.enabled}")
private boolean jmsMessageEnabled;

@Value("${jms.queue.prefix}")
private String jmsQueuePrefix;

private static final Map<String, CodeSystemUpgradeJob> upgradeJobMap = new HashMap<>();

private final Logger logger = LoggerFactory.getLogger(getClass());
Expand Down Expand Up @@ -202,6 +214,19 @@ public synchronized void upgrade(String id, CodeSystem codeSystem, Integer newDe
if (job != null) {
job.setStatus(CodeSystemUpgradeJob.UpgradeStatus.COMPLETED);
}

if (jmsMessageEnabled) {
Map<String, String> payload = new HashMap<>();
payload.put("codeSystemShortName", codeSystem.getShortName());
payload.put("codeSystemBranchPath", codeSystem.getBranchPath());
payload.put(DEPENDENCY_PACKAGE, newParentVersion.getReleasePackage());
payload.put(DEPENDENCY_RELEASE, String.valueOf(newParentVersion.getEffectiveDate()));

String topicDestination = jmsQueuePrefix + ".upgrade.complete";
logger.info("Sending JMS Topic - destination {}, payload {}...", topicDestination, payload);
jmsTemplate.convertAndSend(new ActiveMQTopic(topicDestination), payload);
}

upgradedSuccessfully = true;
} catch (Exception e) {
logger.error("Upgrade on {} failed", branchPath, e);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -870,6 +870,17 @@ public List<ConceptMini> copyConcepts(String ecl, String sourceBranchPath, Strin
final Branch sourceBranch = branchService.findBranchOrThrow(sourceBranchPath, true);
final Branch destinationBranch = branchService.findBranchOrThrow(destinationBranchPath, true);

CodeSystem codeSystem = codeSystemService.findClosestCodeSystemUsingAnyBranch(destinationBranchPath, false);
if (codeSystem != null) {
List<CodeSystemVersion> codeSystemVersions = codeSystemService.findAllVersions(codeSystem.getShortName(), true, true);
String branchPath = destinationBranch.getPath();
for (CodeSystemVersion codeSystemVersion : codeSystemVersions) {
if (Objects.equals(branchPath, codeSystemVersion.getBranchPath())) {
throw new ServiceException("Cannot donate concepts from " + sourceBranchPath + " to versioned " + destinationBranchPath);
}
}
}

if (getDefaultModuleId(sourceBranch).equals(getDefaultModuleId(destinationBranch))) {
throw new ServiceException("Cannot donate concepts from " + sourceBranchPath + " to " + destinationBranchPath + " as they are from the same module: " + getDefaultModuleId(sourceBranch));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -943,7 +943,7 @@ private void addClause(Query queryClause, BoolQuery.Builder boolBuilder, boolean
}
}

private List<String> analyze(String text, StandardAnalyzer analyzer) {
public static List<String> analyze(String text, StandardAnalyzer analyzer) {
List<String> result = new ArrayList<>();
try {
TokenStream tokenStream = analyzer.tokenStream("contents", text);
Expand All @@ -953,12 +953,13 @@ private List<String> analyze(String text, StandardAnalyzer analyzer) {
result.add(attr.toString());
}
} catch (IOException e) {
logger.error("Failed to analyze text {}", text, e);
LoggerFactory.getLogger(DescriptionService.class)
.error("Failed to analyze text {}", text, e);
}
return result;
}

private String constructSimpleQueryString(String searchTerm) {
public static String constructSimpleQueryString(String searchTerm) {
return (searchTerm.trim().replace(" ", "* ") + "*").replace("**", "*");
}

Expand Down Expand Up @@ -1000,7 +1001,7 @@ private String constructRegexQuery(String term) {
return regexBuilder.toString();
}

private String constructSearchTerm(List<String> tokens) {
public static String constructSearchTerm(List<String> tokens) {
StringBuilder builder = new StringBuilder();
for (String token : tokens) {
builder.append(token);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ public class ModuleDependencyService extends ComponentService {

public static final Set<String> CORE_MODULES = Set.of(Concepts.CORE_MODULE, Concepts.MODEL_MODULE);

public Set<String> SI_MODULES = new HashSet<>(Set.of(Concepts.CORE_MODULE, Concepts.MODEL_MODULE, Concepts.ICD10_MODULE));
public Set<String> SI_MODULES = new HashSet<>(Set.of(Concepts.CORE_MODULE, Concepts.MODEL_MODULE, Concepts.ICD10_MODULE, Concepts.ICD11_MODULE));

@Autowired
private BranchService branchService;
Expand Down Expand Up @@ -94,10 +94,13 @@ public synchronized void refreshCache() {
cacheValidAt = currentTime;
logger.info("MDR cache of International Modules refreshed for HEAD time: {}", currentTime);

//During unit tests, or in non-standard installations we might not see the ICD-10 Module
//During unit tests, or in non-standard installations we might not see the ICD-10 and ICD-11 Modules
if (!cachedInternationalModules.contains(Concepts.ICD10_MODULE)) {
SI_MODULES.remove(Concepts.ICD10_MODULE);
}
if (!cachedInternationalModules.contains(Concepts.ICD11_MODULE)) {
SI_MODULES.remove(Concepts.ICD11_MODULE);
}

derivativeModules = cachedInternationalModules.stream()
.filter(m -> !SI_MODULES.contains(m))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
import ca.uhn.fhir.jpa.entity.TermConcept;
import co.elastic.clients.elasticsearch._types.query_dsl.BoolQuery;
import com.google.common.collect.Iterables;

import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.CodeType;
import org.slf4j.Logger;
Expand All @@ -19,10 +18,11 @@
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.elasticsearch.core.ElasticsearchOperations;
import org.springframework.data.elasticsearch.core.SearchHits;
import org.springframework.data.domain.Sort;
import org.springframework.data.elasticsearch.client.elc.NativeQuery;
import org.springframework.data.elasticsearch.client.elc.NativeQueryBuilder;
import org.springframework.data.elasticsearch.core.ElasticsearchOperations;
import org.springframework.data.elasticsearch.core.SearchHits;
import org.springframework.stereotype.Service;

import java.util.*;
Expand Down Expand Up @@ -182,16 +182,18 @@ public FHIRConcept findConcept(FHIRCodeSystemVersion systemVersion, String code)
public Page<FHIRConcept> findConcepts(BoolQuery.Builder fhirConceptQuery, PageRequest pageRequest) {
NativeQuery searchQuery = new NativeQueryBuilder()
.withQuery(fhirConceptQuery.build()._toQuery())
.withSort(Sort.by(FHIRConcept.Fields.DISPLAY_LENGTH, FHIRConcept.Fields.CODE))
.withPageable(pageRequest)
.build();
searchQuery.setTrackTotalHits(true);
updateQueryWithSearchAfter(searchQuery, pageRequest);
return toPage(elasticsearchOperations.search(searchQuery, FHIRConcept.class), pageRequest);
}

public SearchAfterPage<String> findConceptCodes(BoolQuery.Builder fhirConceptQuery, PageRequest pageRequest) {
public SearchAfterPage<String> findConceptCodes(BoolQuery fhirConceptQuery, PageRequest pageRequest) {
NativeQuery searchQuery = new NativeQueryBuilder()
.withQuery(fhirConceptQuery.build()._toQuery())
.withQuery(fhirConceptQuery._toQuery())
.withSort(Sort.by(FHIRConcept.Fields.CODE))
.withPageable(pageRequest)
.build();
searchQuery.setTrackTotalHits(true);
Expand Down
Loading