|
11 | 11 | import java.util.ArrayList;
|
12 | 12 | import java.util.Arrays;
|
13 | 13 | import java.util.List;
|
| 14 | +import java.util.Map; |
14 | 15 | import java.util.Properties;
|
15 | 16 |
|
16 | 17 | import io.kubernetes.client.custom.V1Patch;
|
| 18 | +import io.kubernetes.client.openapi.models.CoreV1Event; |
17 | 19 | import io.kubernetes.client.openapi.models.V1Container;
|
18 | 20 | import io.kubernetes.client.openapi.models.V1EnvVar;
|
19 | 21 | import io.kubernetes.client.openapi.models.V1LocalObjectReference;
|
|
22 | 24 | import io.kubernetes.client.openapi.models.V1SecretReference;
|
23 | 25 | import io.kubernetes.client.openapi.models.V1Volume;
|
24 | 26 | import io.kubernetes.client.openapi.models.V1VolumeMount;
|
| 27 | +import io.kubernetes.client.util.Yaml; |
25 | 28 | import oracle.weblogic.domain.AdminServer;
|
26 | 29 | import oracle.weblogic.domain.AdminService;
|
27 | 30 | import oracle.weblogic.domain.Channel;
|
|
58 | 61 | import static oracle.weblogic.kubernetes.actions.TestActions.deleteDomainCustomResource;
|
59 | 62 | import static oracle.weblogic.kubernetes.actions.TestActions.deletePersistentVolume;
|
60 | 63 | import static oracle.weblogic.kubernetes.actions.TestActions.deletePersistentVolumeClaim;
|
| 64 | +import static oracle.weblogic.kubernetes.actions.TestActions.getDomainCustomResource; |
61 | 65 | import static oracle.weblogic.kubernetes.actions.TestActions.getNextIntrospectVersion;
|
62 | 66 | import static oracle.weblogic.kubernetes.actions.TestActions.getServiceNodePort;
|
63 | 67 | import static oracle.weblogic.kubernetes.actions.TestActions.getServicePort;
|
64 | 68 | import static oracle.weblogic.kubernetes.actions.TestActions.now;
|
65 | 69 | import static oracle.weblogic.kubernetes.actions.TestActions.scaleClusterWithRestApi;
|
66 | 70 | import static oracle.weblogic.kubernetes.actions.impl.Domain.patchDomainCustomResource;
|
| 71 | +import static oracle.weblogic.kubernetes.assertions.TestAssertions.verifyRollingRestartOccurred; |
| 72 | +import static oracle.weblogic.kubernetes.utils.CommonPatchTestUtils.patchDomainResource; |
67 | 73 | import static oracle.weblogic.kubernetes.utils.CommonTestUtils.checkPodDoesNotExist;
|
68 | 74 | import static oracle.weblogic.kubernetes.utils.CommonTestUtils.checkPodExists;
|
69 | 75 | import static oracle.weblogic.kubernetes.utils.CommonTestUtils.checkPodReady;
|
|
78 | 84 | import static oracle.weblogic.kubernetes.utils.CommonTestUtils.createSecretWithUsernamePassword;
|
79 | 85 | import static oracle.weblogic.kubernetes.utils.CommonTestUtils.getExternalServicePodName;
|
80 | 86 | import static oracle.weblogic.kubernetes.utils.CommonTestUtils.getIntrospectJobName;
|
| 87 | +import static oracle.weblogic.kubernetes.utils.CommonTestUtils.getPodsWithTimeStamps; |
81 | 88 | import static oracle.weblogic.kubernetes.utils.CommonTestUtils.installAndVerifyOperator;
|
82 | 89 | import static oracle.weblogic.kubernetes.utils.CommonTestUtils.setPodAntiAffinity;
|
83 | 90 | import static oracle.weblogic.kubernetes.utils.CommonTestUtils.upgradeAndVerifyOperator;
|
|
89 | 96 | import static oracle.weblogic.kubernetes.utils.K8sEvents.DOMAIN_PROCESSING_FAILED;
|
90 | 97 | import static oracle.weblogic.kubernetes.utils.K8sEvents.DOMAIN_PROCESSING_RETRYING;
|
91 | 98 | import static oracle.weblogic.kubernetes.utils.K8sEvents.DOMAIN_PROCESSING_STARTING;
|
| 99 | +import static oracle.weblogic.kubernetes.utils.K8sEvents.DOMAIN_ROLL_COMPLETED; |
| 100 | +import static oracle.weblogic.kubernetes.utils.K8sEvents.DOMAIN_ROLL_STARTING; |
92 | 101 | import static oracle.weblogic.kubernetes.utils.K8sEvents.DOMAIN_VALIDATION_ERROR;
|
93 | 102 | import static oracle.weblogic.kubernetes.utils.K8sEvents.NAMESPACE_WATCHING_STARTED;
|
| 103 | +import static oracle.weblogic.kubernetes.utils.K8sEvents.POD_CYCLE_STARTING; |
94 | 104 | import static oracle.weblogic.kubernetes.utils.K8sEvents.checkDomainEvent;
|
95 | 105 | import static oracle.weblogic.kubernetes.utils.K8sEvents.checkDomainEventWatchingStopped;
|
96 | 106 | import static oracle.weblogic.kubernetes.utils.K8sEvents.checkDomainEventWithCount;
|
97 | 107 | import static oracle.weblogic.kubernetes.utils.K8sEvents.domainEventExists;
|
98 | 108 | import static oracle.weblogic.kubernetes.utils.K8sEvents.getDomainEventCount;
|
| 109 | +import static oracle.weblogic.kubernetes.utils.K8sEvents.getEvent; |
99 | 110 | import static oracle.weblogic.kubernetes.utils.K8sEvents.getEventCount;
|
100 | 111 | import static oracle.weblogic.kubernetes.utils.ThreadSafeLogger.getLogger;
|
101 | 112 | import static oracle.weblogic.kubernetes.utils.WLSTUtils.executeWLSTScript;
|
@@ -492,11 +503,161 @@ public void testDomainK8sEventsProcessingFailed() {
|
492 | 503 | }
|
493 | 504 | }
|
494 | 505 |
|
| 506 | + /** |
| 507 | + * The test modifies the logHome property and verifies the domain roll events are logged. |
| 508 | + */ |
| 509 | + @Order(10) |
| 510 | + @Test |
| 511 | + @DisplayName("Verify logHome property change rolls domain and relevant events are logged") |
| 512 | + public void testLogHomeChangeEvents() { |
| 513 | + |
| 514 | + OffsetDateTime timestamp = now(); |
| 515 | + |
| 516 | + // get the original domain resource before update |
| 517 | + Domain domain1 = assertDoesNotThrow(() -> getDomainCustomResource(domainUid, domainNamespace1), |
| 518 | + String.format("getDomainCustomResource failed with ApiException when tried to get domain %s in namespace %s", |
| 519 | + domainUid, domainNamespace1)); |
| 520 | + |
| 521 | + // get the map with server pods and their original creation timestamps |
| 522 | + Map<String, OffsetDateTime> podsWithTimeStamps = getPodsWithTimeStamps(domainNamespace1, |
| 523 | + adminServerPodName, managedServerPodNamePrefix, replicaCount); |
| 524 | + |
| 525 | + //print out the original image name |
| 526 | + String logHome = domain1.getSpec().getLogHome(); |
| 527 | + logger.info("Currently the log home used by the domain is: {0}", logHome); |
| 528 | + |
| 529 | + //change logHome from /shared/logs to /shared/logs/logHome |
| 530 | + String patchStr = "[" |
| 531 | + + "{\"op\": \"replace\", \"path\": \"/spec/logHome\", \"value\": \"/shared/logs/logHome\"}" |
| 532 | + + "]"; |
| 533 | + logger.info("PatchStr for logHome: {0}", patchStr); |
| 534 | + |
| 535 | + assertTrue(patchDomainResource(domainUid, domainNamespace1, new StringBuffer(patchStr)), |
| 536 | + "patchDomainCustomResource(logHome) failed"); |
| 537 | + |
| 538 | + domain1 = assertDoesNotThrow(() -> getDomainCustomResource(domainUid, domainNamespace1), |
| 539 | + String.format("getDomainCustomResource failed with ApiException when tried to get domain %s in namespace %s", |
| 540 | + domainUid, domainNamespace1)); |
| 541 | + |
| 542 | + //print out logHome in the new patched domain |
| 543 | + logger.info("In the new patched domain logHome is: {0}", domain1.getSpec().getLogHome()); |
| 544 | + assertTrue(domain1.getSpec().getLogHome().equals("/shared/logs/logHome"), "logHome is not updated"); |
| 545 | + |
| 546 | + // verify the server pods are rolling restarted and back to ready state |
| 547 | + logger.info("Verifying rolling restart occurred for domain {0} in namespace {1}", |
| 548 | + domainUid, domainNamespace1); |
| 549 | + assertTrue(verifyRollingRestartOccurred(podsWithTimeStamps, 1, domainNamespace1), |
| 550 | + String.format("Rolling restart failed for domain %s in namespace %s", domainUid, domainNamespace1)); |
| 551 | + |
| 552 | + checkPodReadyAndServiceExists(adminServerPodName, domainUid, domainNamespace1); |
| 553 | + |
| 554 | + for (int i = 1; i <= replicaCount; i++) { |
| 555 | + logger.info("Checking managed server service {0} is created in namespace {1}", |
| 556 | + managedServerPodNamePrefix + i, domainNamespace1); |
| 557 | + checkPodReadyAndServiceExists(managedServerPodNamePrefix + i, domainUid, domainNamespace1); |
| 558 | + } |
| 559 | + |
| 560 | + //verify the logHome change causes the domain roll events to be logged |
| 561 | + logger.info("verify domain roll starting/pod cycle starting events are logged"); |
| 562 | + checkEvent(opNamespace, domainNamespace1, domainUid, DOMAIN_ROLL_STARTING, "Normal", timestamp); |
| 563 | + checkEvent(opNamespace, domainNamespace1, domainUid, POD_CYCLE_STARTING, "Normal", timestamp); |
| 564 | + |
| 565 | + CoreV1Event event = getEvent(opNamespace, domainNamespace1, |
| 566 | + domainUid, DOMAIN_ROLL_STARTING, "Normal", timestamp); |
| 567 | + logger.info(Yaml.dump(event)); |
| 568 | + logger.info("verify the event message contains the logHome changed messages is logged"); |
| 569 | + assertTrue(event.getMessage().contains("logHome")); |
| 570 | + |
| 571 | + event = getEvent(opNamespace, domainNamespace1, |
| 572 | + domainUid, POD_CYCLE_STARTING, "Normal", timestamp); |
| 573 | + logger.info(Yaml.dump(event)); |
| 574 | + logger.info("verify the event message contains the LOG_HOME changed messages is logged"); |
| 575 | + assertTrue(event.getMessage().contains("LOG_HOME")); |
| 576 | + |
| 577 | + checkEvent(opNamespace, domainNamespace1, domainUid, DOMAIN_ROLL_COMPLETED, "Normal", timestamp); |
| 578 | + } |
| 579 | + |
| 580 | + |
| 581 | + /** |
| 582 | + * The test modifies the includeServerOutInPodLog property and verifies the domain roll starting events are logged. |
| 583 | + */ |
| 584 | + @Order(11) |
| 585 | + @Test |
| 586 | + @DisplayName("Verify includeServerOutInPodLog property change rolls domain and relevant events are logged") |
| 587 | + public void testIncludeServerOutInPodLog() { |
| 588 | + |
| 589 | + OffsetDateTime timestamp = now(); |
| 590 | + |
| 591 | + // get the original domain resource before update |
| 592 | + Domain domain1 = assertDoesNotThrow(() -> getDomainCustomResource(domainUid, domainNamespace1), |
| 593 | + String.format("getDomainCustomResource failed with ApiException when tried to get domain %s in namespace %s", |
| 594 | + domainUid, domainNamespace1)); |
| 595 | + |
| 596 | + // get the map with server pods and their original creation timestamps |
| 597 | + Map<String, OffsetDateTime> podsWithTimeStamps = getPodsWithTimeStamps(domainNamespace1, |
| 598 | + adminServerPodName, managedServerPodNamePrefix, replicaCount); |
| 599 | + |
| 600 | + //print out the original includeServerOutInPodLog value |
| 601 | + boolean includeLogInPod = domain1.getSpec().includeServerOutInPodLog(); |
| 602 | + logger.info("Currently the includeServerOutInPodLog used for the domain is: {0}", includeLogInPod); |
| 603 | + |
| 604 | + //change includeServerOutInPodLog |
| 605 | + String patchStr = "[" |
| 606 | + + "{\"op\": \"replace\", \"path\": \"/spec/includeServerOutInPodLog\", " |
| 607 | + + "\"value\": " + Boolean.toString(!includeLogInPod) + "}" |
| 608 | + + "]"; |
| 609 | + logger.info("PatchStr for includeServerOutInPodLog: {0}", patchStr); |
| 610 | + |
| 611 | + assertTrue(patchDomainResource(domainUid, domainNamespace1, new StringBuffer(patchStr)), |
| 612 | + "patchDomainCustomResource(includeServerOutInPodLog) failed"); |
| 613 | + |
| 614 | + domain1 = assertDoesNotThrow(() -> getDomainCustomResource(domainUid, domainNamespace1), |
| 615 | + String.format("getDomainCustomResource failed with ApiException when tried to get domain %s in namespace %s", |
| 616 | + domainUid, domainNamespace1)); |
| 617 | + |
| 618 | + //print out includeServerOutInPodLog in the new patched domain |
| 619 | + logger.info("In the new patched domain includeServerOutInPodLog is: {0}", |
| 620 | + domain1.getSpec().includeServerOutInPodLog()); |
| 621 | + assertTrue(domain1.getSpec().includeServerOutInPodLog() != includeLogInPod, |
| 622 | + "includeServerOutInPodLog is not updated"); |
| 623 | + |
| 624 | + // verify the server pods are rolling restarted and back to ready state |
| 625 | + logger.info("Verifying rolling restart occurred for domain {0} in namespace {1}", |
| 626 | + domainUid, domainNamespace1); |
| 627 | + assertTrue(verifyRollingRestartOccurred(podsWithTimeStamps, 1, domainNamespace1), |
| 628 | + String.format("Rolling restart failed for domain %s in namespace %s", domainUid, domainNamespace1)); |
| 629 | + |
| 630 | + checkPodReadyAndServiceExists(adminServerPodName, domainUid, domainNamespace1); |
| 631 | + |
| 632 | + for (int i = 1; i <= replicaCount; i++) { |
| 633 | + logger.info("Checking managed server service {0} is created in namespace {1}", |
| 634 | + managedServerPodNamePrefix + i, domainNamespace1); |
| 635 | + checkPodReadyAndServiceExists(managedServerPodNamePrefix + i, domainUid, domainNamespace1); |
| 636 | + } |
| 637 | + |
| 638 | + //verify the includeServerOutInPodLog change causes the domain roll events to be logged |
| 639 | + logger.info("verify domain roll starting/pod cycle starting events are logged"); |
| 640 | + checkEvent(opNamespace, domainNamespace1, domainUid, DOMAIN_ROLL_STARTING, "Normal", timestamp); |
| 641 | + checkEvent(opNamespace, domainNamespace1, domainUid, POD_CYCLE_STARTING, "Normal", timestamp); |
| 642 | + |
| 643 | + CoreV1Event event = getEvent(opNamespace, domainNamespace1, |
| 644 | + domainUid, DOMAIN_ROLL_STARTING, "Normal", timestamp); |
| 645 | + logger.info(Yaml.dump(event)); |
| 646 | + logger.info("verify the event message contains the includeServerOutInPodLog changed messages is logged"); |
| 647 | + assertTrue(event.getMessage().contains("isIncludeServerOutInPodLog")); |
| 648 | + |
| 649 | + event = getEvent(opNamespace, domainNamespace1, domainUid, POD_CYCLE_STARTING, "Normal", timestamp); |
| 650 | + logger.info(Yaml.dump(event)); |
| 651 | + logger.info("verify the event message contains the SERVER_OUT_IN_POD_LOG changed messages is logged"); |
| 652 | + assertTrue(event.getMessage().contains("SERVER_OUT_IN_POD_LOG")); |
| 653 | + |
| 654 | + checkEvent(opNamespace, domainNamespace1, domainUid, DOMAIN_ROLL_COMPLETED, "Normal", timestamp); |
| 655 | + } |
495 | 656 |
|
496 | 657 | /**
|
497 | 658 | * Test DomainDeleted event is logged when domain resource is deleted.
|
498 | 659 | */
|
499 |
| - @Order(10) |
| 660 | + @Order(13) |
500 | 661 | @Test
|
501 | 662 | @DisplayName("Test domain events for various domain life cycle changes")
|
502 | 663 | public void testDomainK8SEventsDelete() {
|
@@ -529,7 +690,7 @@ public void testDomainK8SEventsDelete() {
|
529 | 690 | * </pre>
|
530 | 691 | * </p>
|
531 | 692 | */
|
532 |
| - @Order(11) |
| 693 | + @Order(14) |
533 | 694 | @ParameterizedTest
|
534 | 695 | @ValueSource(booleans = { true, false })
|
535 | 696 | public void testK8SEventsStartStopWatchingNS(boolean enableClusterRoleBinding) {
|
@@ -580,7 +741,7 @@ public void testK8SEventsStartStopWatchingNS(boolean enableClusterRoleBinding) {
|
580 | 741 | * </pre>
|
581 | 742 | * </p>
|
582 | 743 | */
|
583 |
| - @Order(13) |
| 744 | + @Order(15) |
584 | 745 | @ParameterizedTest
|
585 | 746 | @ValueSource(booleans = { true, false })
|
586 | 747 | public void testK8SEventsStartStopWatchingNSWithLabelSelector(boolean enableClusterRoleBinding) {
|
@@ -646,7 +807,7 @@ public void testK8SEventsStartStopWatchingNSWithLabelSelector(boolean enableClus
|
646 | 807 | * </pre>
|
647 | 808 | * </p>
|
648 | 809 | */
|
649 |
| - @Order(15) |
| 810 | + @Order(16) |
650 | 811 | @ParameterizedTest
|
651 | 812 | @ValueSource(booleans = { true, false })
|
652 | 813 | public void testK8SEventsStartStopWatchingNSWithRegExp(boolean enableClusterRoleBinding) {
|
|
0 commit comments