This is an automated email from the ASF dual-hosted git repository.
sai_boorlagadda pushed a commit to branch develop
in repository https://gitbox.apache.org/repos/asf/geode.git
The following commit(s) were added to refs/heads/develop by this push:
new af430d39e2 GEODE-10434: Migrate to Github action (#7870)
af430d39e2 is described below
commit af430d39e2f610bb887d0cfa500f0e4d61a46f18
Author: Sai Boorlagadda <[email protected]>
AuthorDate: Sat Jan 28 19:00:01 2023 -0800
GEODE-10434: Migrate to Github action (#7870)
* GEODE-10434: Migrate to Github Action
* Added a build step for java 8 on ubuntu
* Added a unit-test step for ubuntu that runs
- using JDK 8 for build and tests using 8, 11 and 17
* Added apiCheck step
---
.github/workflows/gradle.yml | 140 +++++++++++++++++++++
.../internal/DistributionIntegrationTest.java} | 2 +-
.../apache/geode/internal/cache/DiskStoreImpl.java | 26 +++-
.../geode/internal/cache/PersistentOplogSet.java | 2 +-
.../org/apache/geode/internal/tcp/Connection.java | 7 ++
.../geode/internal/cache/DiskStoreImplTest.java | 54 ++++++++
.../OplogEntryIdSetDrfHashSetThresholdTest.java | 5 +-
.../apache/geode/internal/tcp/ConnectionTest.java | 6 +
.../internal/tcp/ConnectionTransmissionTest.java | 6 +
9 files changed, 236 insertions(+), 12 deletions(-)
diff --git a/.github/workflows/gradle.yml b/.github/workflows/gradle.yml
new file mode 100644
index 0000000000..1503bf8891
--- /dev/null
+++ b/.github/workflows/gradle.yml
@@ -0,0 +1,140 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+name: develop
+
+on:
+ push:
+ branches: [ "develop" ]
+ pull_request:
+ branches: [ "develop" ]
+
+permissions:
+ contents: read
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up JDK 8
+ uses: actions/setup-java@v3
+ with:
+ java-version: '8'
+ distribution: 'liberica'
+ - name: Run 'build install javadoc spotlessCheck rat checkPom
resolveDependencies pmdMain' with Gradle
+ uses: gradle/gradle-build-action@v2
+ with:
+ arguments: --console=plain --no-daemon build install javadoc
spotlessCheck rat checkPom resolveDependencies pmdMain -x test
+
+ apiCheck:
+ needs: build
+ strategy:
+ fail-fast: false
+ matrix:
+ os: [ubuntu-latest]
+ distribution: [ 'liberica' ]
+ java: ['11']
+ runs-on: ${{ matrix.os }}
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up JDK (include all 3 JDKs in the env)
+ uses: actions/setup-java@v3
+ with:
+ distribution: ${{ matrix.distribution }}
+ java-version: |
+ 8
+ 11
+ 17
+ - name: Set JAVA_TEST_PATH to 11
+ run: |
+ echo "JAVA_TEST_PATH=${JAVA_HOME_11_X64}" >> $GITHUB_ENV
+ if: matrix.java == '11'
+ - name: Java API Check
+ run: |
+ GRADLE_JVM_PATH=${JAVA_HOME_8_X64}
+ JAVA_BUILD_PATH=${JAVA_HOME_8_X64}
+ JAVA_BUILD_VERSION=8 # Use jdk 8 for build
+ JAVA_TEST_VERSION=${{ matrix.java }}
+ cp gradlew gradlewStrict
+ sed -e 's/JAVA_HOME/GRADLE_JVM/g' -i.back gradlewStrict
+ GRADLE_JVM=${GRADLE_JVM_PATH} JAVA_TEST_PATH=${JAVA_TEST_PATH}
./gradlewStrict \
+ -PcompileJVM=${JAVA_BUILD_PATH} \
+ -PcompileJVMVer=${JAVA_BUILD_VERSION} \
+ -PtestJVM=${JAVA_TEST_PATH} \
+ -PtestJVMVer=${JAVA_TEST_VERSION} \
+ -PtestJava8Home=${JAVA_HOME_8_X64} \
+ -PtestJava11Home=${JAVA_HOME_11_X64} \
+ -PtestJava17Home=${JAVA_HOME_17_X64} \
+ japicmp --console=plain --no-daemon
+
+ unitTest:
+ needs: apiCheck
+ strategy:
+ fail-fast: false
+ matrix:
+ os: [ubuntu-latest]
+ distribution: ['liberica']
+ java: ['8', '11', '17']
+ runs-on: ${{ matrix.os }}
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up JDK (include all 3 JDKs in env)
+ uses: actions/setup-java@v3
+ with:
+ distribution: ${{ matrix.distribution }}
+ java-version: |
+ 8
+ 11
+ 17
+ - name: Setup Gradle
+ uses: gradle/gradle-build-action@v2
+ - name: Set JAVA_TEST_PATH to 8
+ run: |
+ echo "JAVA_TEST_PATH=${JAVA_HOME_8_X64}" >> $GITHUB_ENV
+ if: matrix.java == '8'
+ - name: Set JAVA_TEST_PATH to 11
+ run: |
+ echo "JAVA_TEST_PATH=${JAVA_HOME_11_X64}" >> $GITHUB_ENV
+ if: matrix.java == '11'
+ - name: Set JAVA_TEST_PATH to 17
+ run: |
+ echo "JAVA_TEST_PATH=${JAVA_HOME_17_X64}" >> $GITHUB_ENV
+ if: matrix.java == '17'
+ - name: Run unit tests
+ run: |
+ GRADLE_JVM_PATH=${JAVA_HOME_8_X64}
+ JAVA_BUILD_PATH=${JAVA_HOME_8_X64}
+ JAVA_BUILD_VERSION=8 # Use jdk 8 for build
+ JAVA_TEST_VERSION=${{ matrix.java }}
+ cp gradlew gradlewStrict
+ sed -e 's/JAVA_HOME/GRADLE_JVM/g' -i.back gradlewStrict
+ GRADLE_JVM=${GRADLE_JVM_PATH} JAVA_TEST_PATH=${JAVA_TEST_PATH}
./gradlewStrict \
+ -PcompileJVM=${JAVA_BUILD_PATH} \
+ -PcompileJVMVer=${JAVA_BUILD_VERSION} \
+ -PtestJVM=${JAVA_TEST_PATH} \
+ -PtestJVMVer=${JAVA_TEST_VERSION} \
+ -PtestJava8Home=${JAVA_HOME_8_X64} \
+ -PtestJava11Home=${JAVA_HOME_11_X64} \
+ -PtestJava17Home=${JAVA_HOME_17_X64} \
+ test --console=plain --no-daemon
+ - uses: actions/upload-artifact@v3
+ if: failure()
+ with:
+ name: unit-test-reports-${{ matrix.os }}-${{ matrix.java }}
+ path: build/reports
+ retention-days: 5
\ No newline at end of file
diff --git
a/geode-core/src/test/java/org/apache/geode/distributed/internal/DistributionTest.java
b/geode-core/src/integrationTest/java/org/apache/geode/distributed/internal/DistributionIntegrationTest.java
similarity index 99%
rename from
geode-core/src/test/java/org/apache/geode/distributed/internal/DistributionTest.java
rename to
geode-core/src/integrationTest/java/org/apache/geode/distributed/internal/DistributionIntegrationTest.java
index f3844ce47b..0c2e610623 100644
---
a/geode-core/src/test/java/org/apache/geode/distributed/internal/DistributionTest.java
+++
b/geode-core/src/integrationTest/java/org/apache/geode/distributed/internal/DistributionIntegrationTest.java
@@ -56,7 +56,7 @@ import
org.apache.geode.internal.admin.remote.AlertListenerMessage;
import org.apache.geode.internal.admin.remote.RemoteTransportConfig;
import org.apache.geode.internal.tcp.ConnectExceptions;
-public class DistributionTest {
+public class DistributionIntegrationTest {
private DirectChannel dc;
diff --git
a/geode-core/src/main/java/org/apache/geode/internal/cache/DiskStoreImpl.java
b/geode-core/src/main/java/org/apache/geode/internal/cache/DiskStoreImpl.java
index 8415a00b34..dd2bb2c300 100644
---
a/geode-core/src/main/java/org/apache/geode/internal/cache/DiskStoreImpl.java
+++
b/geode-core/src/main/java/org/apache/geode/internal/cache/DiskStoreImpl.java
@@ -71,6 +71,7 @@ import it.unimi.dsi.fastutil.ints.IntOpenHashSet;
import it.unimi.dsi.fastutil.longs.LongOpenHashSet;
import org.apache.commons.io.FileUtils;
import org.apache.logging.log4j.Logger;
+import org.jetbrains.annotations.TestOnly;
import org.apache.geode.CancelCriterion;
import org.apache.geode.CancelException;
@@ -198,7 +199,7 @@ public class DiskStoreImpl implements DiskStore {
GeodeGlossary.GEMFIRE_PREFIX + "disk.recoverLruValues";
static final long DRF_HASHMAP_OVERFLOW_THRESHOLD_DEFAULT = 805306368;
- static final long DRF_HASHMAP_OVERFLOW_THRESHOLD =
+ final long DRF_HASHMAP_OVERFLOW_THRESHOLD =
Long.getLong(DRF_HASHMAP_OVERFLOW_THRESHOLD_NAME,
DRF_HASHMAP_OVERFLOW_THRESHOLD_DEFAULT);
boolean RECOVER_VALUES =
getBoolean(DiskStoreImpl.RECOVER_VALUE_PROPERTY_NAME, true);
@@ -3532,18 +3533,26 @@ public class DiskStoreImpl implements DiskStore {
private final List<LongOpenHashSet> allLongs;
private final AtomicReference<IntOpenHashSet> currentInts;
private final AtomicReference<LongOpenHashSet> currentLongs;
+ private final long drfHashMapOverFlowThreashold;
// For testing purposes only.
@VisibleForTesting
- OplogEntryIdSet(List<IntOpenHashSet> allInts, List<LongOpenHashSet>
allLongs) {
+ OplogEntryIdSet(List<IntOpenHashSet> allInts, List<LongOpenHashSet>
allLongs,
+ long drfHashMapOverflowThreshold) {
this.allInts = allInts;
currentInts = new AtomicReference<>(this.allInts.get(0));
this.allLongs = allLongs;
currentLongs = new AtomicReference<>(this.allLongs.get(0));
+ this.drfHashMapOverFlowThreashold = drfHashMapOverflowThreshold;
}
- public OplogEntryIdSet() {
+ @TestOnly
+ OplogEntryIdSet(List<IntOpenHashSet> allInts, List<LongOpenHashSet>
allLongs) {
+ this(allInts, allLongs, DRF_HASHMAP_OVERFLOW_THRESHOLD_DEFAULT);
+ }
+
+ public OplogEntryIdSet(long drfHashMapOverflowThreshold) {
IntOpenHashSet intHashSet = new IntOpenHashSet((int) INVALID_ID);
allInts = new ArrayList<>();
allInts.add(intHashSet);
@@ -3553,6 +3562,11 @@ public class DiskStoreImpl implements DiskStore {
allLongs = new ArrayList<>();
allLongs.add(longHashSet);
currentLongs = new AtomicReference<>(longHashSet);
+ this.drfHashMapOverFlowThreashold = drfHashMapOverflowThreshold;
+ }
+
+ public OplogEntryIdSet() {
+ this(DRF_HASHMAP_OVERFLOW_THRESHOLD_DEFAULT);
}
public void add(long id) {
@@ -3580,14 +3594,14 @@ public class DiskStoreImpl implements DiskStore {
boolean shouldOverflow(final long id) {
if (id > 0 && id <= 0x00000000FFFFFFFFL) {
- return currentInts.get().size() == DRF_HASHMAP_OVERFLOW_THRESHOLD;
+ return currentInts.get().size() == drfHashMapOverFlowThreashold;
} else {
- return currentLongs.get().size() == DRF_HASHMAP_OVERFLOW_THRESHOLD;
+ return currentLongs.get().size() == drfHashMapOverFlowThreashold;
}
}
void overflowToNewHashMap(final long id) {
- if (DRF_HASHMAP_OVERFLOW_THRESHOLD ==
DRF_HASHMAP_OVERFLOW_THRESHOLD_DEFAULT) {
+ if (drfHashMapOverFlowThreashold ==
DRF_HASHMAP_OVERFLOW_THRESHOLD_DEFAULT) {
logger.warn(
"There is a large number of deleted entries within the disk-store,
please execute an offline compaction.");
}
diff --git
a/geode-core/src/main/java/org/apache/geode/internal/cache/PersistentOplogSet.java
b/geode-core/src/main/java/org/apache/geode/internal/cache/PersistentOplogSet.java
index 67291651a3..80a9378cfd 100644
---
a/geode-core/src/main/java/org/apache/geode/internal/cache/PersistentOplogSet.java
+++
b/geode-core/src/main/java/org/apache/geode/internal/cache/PersistentOplogSet.java
@@ -440,7 +440,7 @@ public class PersistentOplogSet implements OplogSet {
}
private long recoverOplogs(long byteCount) {
- OplogEntryIdSet deletedIds = new OplogEntryIdSet();
+ OplogEntryIdSet deletedIds = new
OplogEntryIdSet(parent.DRF_HASHMAP_OVERFLOW_THRESHOLD);
TreeSet<Oplog> oplogSet = getSortedOplogs();
if (!getAlreadyRecoveredOnce().get()) {
diff --git
a/geode-core/src/main/java/org/apache/geode/internal/tcp/Connection.java
b/geode-core/src/main/java/org/apache/geode/internal/tcp/Connection.java
index 9e921d7d03..191c991b89 100644
--- a/geode-core/src/main/java/org/apache/geode/internal/tcp/Connection.java
+++ b/geode-core/src/main/java/org/apache/geode/internal/tcp/Connection.java
@@ -55,6 +55,7 @@ import javax.net.ssl.SSLHandshakeException;
import org.apache.commons.validator.routines.InetAddressValidator;
import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.TestOnly;
import org.apache.geode.CancelException;
import org.apache.geode.SerializationException;
@@ -552,6 +553,12 @@ public class Connection implements Runnable {
return P2P_CONNECT_TIMEOUT;
}
+ @TestOnly
+ static void clearP2PConnectTimeout() {
+ IS_P2P_CONNECT_TIMEOUT_INITIALIZED = false;
+ P2P_CONNECT_TIMEOUT = 0;
+ }
+
/**
* @return true if this thread is a reader thread, otherwise false
*/
diff --git
a/geode-core/src/test/java/org/apache/geode/internal/cache/DiskStoreImplTest.java
b/geode-core/src/test/java/org/apache/geode/internal/cache/DiskStoreImplTest.java
new file mode 100644
index 0000000000..af025938e8
--- /dev/null
+++
b/geode-core/src/test/java/org/apache/geode/internal/cache/DiskStoreImplTest.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
contributor license
+ * agreements. See the NOTICE file distributed with this work for additional
information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache
License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the
License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express
+ * or implied. See the License for the specific language governing permissions
and limitations under
+ * the License.
+ */
+package org.apache.geode.internal.cache;
+
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import java.io.File;
+
+import org.assertj.core.api.Assertions;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.io.TempDir;
+import org.junitpioneer.jupiter.SetSystemProperty;
+
+import org.apache.geode.Statistics;
+import org.apache.geode.distributed.internal.InternalDistributedSystem;
+import org.apache.geode.internal.statistics.StatisticsManager;
+
+class DiskStoreImplTest {
+ @Test
+ @SetSystemProperty(key = "gemfire.disk.drfHashMapOverflowThreshold", value =
"10")
+ public void testDrfHashMapOverflowThresholdSystemPropertyIsUsed(@TempDir
File dir1,
+ @TempDir File dir2) {
+ InternalCache cache = mock(InternalCache.class);
+ InternalDistributedSystem internalDistributedSystem =
mock(InternalDistributedSystem.class);
+ DiskStoreAttributes diskStoreAttributes = mock(DiskStoreAttributes.class);
+ StatisticsManager statisticsManager = mock(StatisticsManager.class);
+
+
when(internalDistributedSystem.getStatisticsManager()).thenReturn(statisticsManager);
+
when(cache.getInternalDistributedSystem()).thenReturn(internalDistributedSystem);
+ when(diskStoreAttributes.getDiskDirs()).thenReturn(
+ new File[] {dir1, dir2});
+ when(diskStoreAttributes.getDiskDirSizes()).thenReturn(new int[] {1, 1});
+
when(diskStoreAttributes.getDiskDirSizesUnit()).thenReturn(DiskDirSizesUnit.MEGABYTES);
+ when(statisticsManager.createStatistics(any(),
any())).thenReturn(mock(Statistics.class));
+
+ DiskStoreImpl diskStore = new DiskStoreImpl(cache, diskStoreAttributes);
+
+
Assertions.assertThat(diskStore.DRF_HASHMAP_OVERFLOW_THRESHOLD).isEqualTo(10);
+ }
+}
diff --git
a/geode-core/src/test/java/org/apache/geode/internal/cache/OplogEntryIdSetDrfHashSetThresholdTest.java
b/geode-core/src/test/java/org/apache/geode/internal/cache/OplogEntryIdSetDrfHashSetThresholdTest.java
index ff7e43e7b5..e4b3c56f36 100644
---
a/geode-core/src/test/java/org/apache/geode/internal/cache/OplogEntryIdSetDrfHashSetThresholdTest.java
+++
b/geode-core/src/test/java/org/apache/geode/internal/cache/OplogEntryIdSetDrfHashSetThresholdTest.java
@@ -25,7 +25,6 @@ import java.util.stream.LongStream;
import it.unimi.dsi.fastutil.ints.IntOpenHashSet;
import it.unimi.dsi.fastutil.longs.LongOpenHashSet;
import org.junit.jupiter.api.Test;
-import org.junitpioneer.jupiter.SetSystemProperty;
import org.apache.geode.internal.cache.DiskStoreImpl.OplogEntryIdSet;
@@ -34,9 +33,7 @@ import
org.apache.geode.internal.cache.DiskStoreImpl.OplogEntryIdSet;
*/
public class OplogEntryIdSetDrfHashSetThresholdTest {
@Test
- @SetSystemProperty(key = "gemfire.disk.drfHashMapOverflowThreshold", value =
"10")
public void addMethodOverflowBasedOnDrfOverflowThresholdParameters() {
-
int testEntries = 41;
IntOpenHashSet intOpenHashSet = new IntOpenHashSet();
LongOpenHashSet longOpenHashSet = new LongOpenHashSet();
@@ -46,7 +43,7 @@ public class OplogEntryIdSetDrfHashSetThresholdTest {
List<LongOpenHashSet> longOpenHashSets =
new ArrayList<>(Collections.singletonList(longOpenHashSet));
- OplogEntryIdSet oplogEntryIdSet = new OplogEntryIdSet(intOpenHashSets,
longOpenHashSets);
+ OplogEntryIdSet oplogEntryIdSet = new OplogEntryIdSet(intOpenHashSets,
longOpenHashSets, 10);
IntStream.range(1, testEntries).forEach(oplogEntryIdSet::add);
LongStream.range(0x00000000FFFFFFFFL + 1, 0x00000000FFFFFFFFL +
testEntries)
.forEach(oplogEntryIdSet::add);
diff --git
a/geode-core/src/test/java/org/apache/geode/internal/tcp/ConnectionTest.java
b/geode-core/src/test/java/org/apache/geode/internal/tcp/ConnectionTest.java
index dcbb8e7a4d..9bb757b4b3 100644
--- a/geode-core/src/test/java/org/apache/geode/internal/tcp/ConnectionTest.java
+++ b/geode-core/src/test/java/org/apache/geode/internal/tcp/ConnectionTest.java
@@ -35,6 +35,7 @@ import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.nio.channels.SocketChannel;
+import org.junit.After;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -54,6 +55,11 @@ import org.apache.geode.test.junit.categories.MembershipTest;
@Category(MembershipTest.class)
public class ConnectionTest {
+ @After
+ public void tearDown() {
+ Connection.clearP2PConnectTimeout();
+ }
+
@SuppressWarnings("ConstantConditions")
@Test
public void canBeMocked() throws Exception {
diff --git
a/geode-core/src/test/java/org/apache/geode/internal/tcp/ConnectionTransmissionTest.java
b/geode-core/src/test/java/org/apache/geode/internal/tcp/ConnectionTransmissionTest.java
index 906a021dec..1b98e7f4f5 100644
---
a/geode-core/src/test/java/org/apache/geode/internal/tcp/ConnectionTransmissionTest.java
+++
b/geode-core/src/test/java/org/apache/geode/internal/tcp/ConnectionTransmissionTest.java
@@ -39,6 +39,7 @@ import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
+import org.junit.After;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -59,6 +60,11 @@ import org.apache.geode.test.junit.categories.MembershipTest;
@Category(MembershipTest.class)
public class ConnectionTransmissionTest {
+ @After
+ public void tearDown() {
+ Connection.clearP2PConnectTimeout();
+ }
+
/**
* Create a sender connection and a receiver connection and pass data from
* one to the other.