This is an automated email from the ASF dual-hosted git repository. markt pushed a commit to branch 10.1.x in repository https://gitbox.apache.org/repos/asf/tomcat.git
commit 53da9862c9ef8b8663b1f234f6feb11b59bca366 Author: Mark Thomas <ma...@apache.org> AuthorDate: Wed Jan 3 10:13:33 2024 +0000 Refactor tests that require large heap to new category These tests are still disabled by default but: - no not appear as skipped tests in a standard test run - can be enabled with a build property rather than a code change --- test/org/apache/tomcat/util/buf/TestByteChunk.java | 36 ------------- .../tomcat/util/buf/TestByteChunkLargeHeap.java | 60 ++++++++++++++++++++++ test/org/apache/tomcat/util/buf/TestCharChunk.java | 31 ----------- .../tomcat/util/buf/TestCharChunkLargeHeap.java | 55 ++++++++++++++++++++ 4 files changed, 115 insertions(+), 67 deletions(-) diff --git a/test/org/apache/tomcat/util/buf/TestByteChunk.java b/test/org/apache/tomcat/util/buf/TestByteChunk.java index 0299e891e1..ce8234230e 100644 --- a/test/org/apache/tomcat/util/buf/TestByteChunk.java +++ b/test/org/apache/tomcat/util/buf/TestByteChunk.java @@ -18,20 +18,15 @@ package org.apache.tomcat.util.buf; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; -import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.UnsupportedEncodingException; -import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.Arrays; import org.junit.Assert; -import org.junit.Ignore; import org.junit.Test; -import org.apache.tomcat.util.buf.ByteChunk.ByteOutputChannel; - /** * Test cases for {@link ByteChunk}. */ @@ -172,37 +167,6 @@ public class TestByteChunk { } - @Ignore // Requires a 6GB heap (on markt's desktop - YMMV) - @Test - public void testAppend() throws Exception { - ByteChunk bc = new ByteChunk(); - bc.setByteOutputChannel(new Sink()); - // Defaults to no limit - - byte data[] = new byte[32 * 1024 * 1024]; - - for (int i = 0; i < 100; i++) { - bc.append(data, 0, data.length); - } - - Assert.assertEquals(AbstractChunk.ARRAY_MAX_SIZE, bc.getBuffer().length); - } - - - public static class Sink implements ByteOutputChannel { - - @Override - public void realWriteBytes(byte[] cbuf, int off, int len) throws IOException { - // NO-OP - } - - @Override - public void realWriteBytes(ByteBuffer from) throws IOException { - // NO-OP - } - } - - @Test public void testToString() { ByteChunk bc = new ByteChunk(); diff --git a/test/org/apache/tomcat/util/buf/TestByteChunkLargeHeap.java b/test/org/apache/tomcat/util/buf/TestByteChunkLargeHeap.java new file mode 100644 index 0000000000..e0b0f2f605 --- /dev/null +++ b/test/org/apache/tomcat/util/buf/TestByteChunkLargeHeap.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.tomcat.util.buf; + +import java.io.IOException; +import java.nio.ByteBuffer; + +import org.junit.Assert; +import org.junit.Test; + +import org.apache.tomcat.util.buf.ByteChunk.ByteOutputChannel; + +/** + * Test cases for {@link ByteChunk} that require a large heap. + */ +public class TestByteChunkLargeHeap { + + @Test + public void testAppend() throws Exception { + ByteChunk bc = new ByteChunk(); + bc.setByteOutputChannel(new Sink()); + // Defaults to no limit + + byte data[] = new byte[32 * 1024 * 1024]; + + for (int i = 0; i < 100; i++) { + bc.append(data, 0, data.length); + } + + Assert.assertEquals(AbstractChunk.ARRAY_MAX_SIZE, bc.getBuffer().length); + } + + + public static class Sink implements ByteOutputChannel { + + @Override + public void realWriteBytes(byte[] cbuf, int off, int len) throws IOException { + // NO-OP + } + + @Override + public void realWriteBytes(ByteBuffer from) throws IOException { + // NO-OP + } + } +} diff --git a/test/org/apache/tomcat/util/buf/TestCharChunk.java b/test/org/apache/tomcat/util/buf/TestCharChunk.java index b540ee8baf..c2182af8f1 100644 --- a/test/org/apache/tomcat/util/buf/TestCharChunk.java +++ b/test/org/apache/tomcat/util/buf/TestCharChunk.java @@ -16,14 +16,9 @@ */ package org.apache.tomcat.util.buf; -import java.io.IOException; - import org.junit.Assert; -import org.junit.Ignore; import org.junit.Test; -import org.apache.tomcat.util.buf.CharChunk.CharOutputChannel; - /** * Test cases for {@link CharChunk}. */ @@ -69,32 +64,6 @@ public class TestCharChunk { } - @Ignore // Requires an 11GB heap (on markt's desktop - YMMV) - @Test - public void testAppend() throws Exception { - CharChunk cc = new CharChunk(); - cc.setCharOutputChannel(new Sink()); - // Defaults to no limit - - char data[] = new char[32 * 1024 * 1024]; - - for (int i = 0; i < 100; i++) { - cc.append(data, 0, data.length); - } - - Assert.assertEquals(AbstractChunk.ARRAY_MAX_SIZE, cc.getBuffer().length); - } - - - public static class Sink implements CharOutputChannel { - - @Override - public void realWriteChars(char[] cbuf, int off, int len) throws IOException { - // NO-OP - } - } - - @Test public void testToString() { CharChunk cc = new CharChunk(); diff --git a/test/org/apache/tomcat/util/buf/TestCharChunkLargeHeap.java b/test/org/apache/tomcat/util/buf/TestCharChunkLargeHeap.java new file mode 100644 index 0000000000..fd6820b563 --- /dev/null +++ b/test/org/apache/tomcat/util/buf/TestCharChunkLargeHeap.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.tomcat.util.buf; + +import java.io.IOException; + +import org.junit.Assert; +import org.junit.Test; + +import org.apache.tomcat.util.buf.CharChunk.CharOutputChannel; + +/** + * Test cases for {@link CharChunk}. + */ +public class TestCharChunkLargeHeap { + + @Test + public void testAppend() throws Exception { + CharChunk cc = new CharChunk(); + cc.setCharOutputChannel(new Sink()); + // Defaults to no limit + + char data[] = new char[32 * 1024 * 1024]; + + for (int i = 0; i < 100; i++) { + cc.append(data, 0, data.length); + } + + Assert.assertEquals(AbstractChunk.ARRAY_MAX_SIZE, cc.getBuffer().length); + } + + + public static class Sink implements CharOutputChannel { + + @Override + public void realWriteChars(char[] cbuf, int off, int len) throws IOException { + // NO-OP + } + } + +} --------------------------------------------------------------------- To unsubscribe, e-mail: dev-unsubscr...@tomcat.apache.org For additional commands, e-mail: dev-h...@tomcat.apache.org