diff --git a/agent/src/main/java/io/keploy/advice/redis/jedis/JedisPoolResource_Advice.java b/agent/src/main/java/io/keploy/advice/redis/jedis/JedisPoolResource_Advice.java deleted file mode 100644 index 4f469520..00000000 --- a/agent/src/main/java/io/keploy/advice/redis/jedis/JedisPoolResource_Advice.java +++ /dev/null @@ -1,60 +0,0 @@ -package io.keploy.advice.redis.jedis; - -import io.keploy.regression.Mode; -import io.keploy.regression.context.Context; -import io.keploy.regression.context.Kcontext; -import net.bytebuddy.asm.Advice; -import org.slf4j.LoggerFactory; -import org.slf4j.Logger; -import redis.clients.jedis.Jedis; -import redis.clients.jedis.JedisPool; - -import java.util.Objects; - -import static net.bytebuddy.implementation.bytecode.assign.Assigner.Typing.DYNAMIC; - -/** - * Class {@link JedisPoolResource_Advice} is used for intercepting method {@link JedisPool#getResource()} and returning - * {@link Jedis} object when Keploy is in TEST_MODE. - * - * @author charankamarapu - */ -public class JedisPoolResource_Advice { - - /** - * This method gets executed before the method {@link JedisPool#getResource()}. Based on the mode of Kelpoy it skips - * the invocation of the method {@link JedisPool#getResource()} - * - * @skipOn {@link Advice.OnNonDefaultValue} - this indicates that if any other value except default value of the - * return type is returned then skip method invocation of intercepting method i.e. - * {@link JedisPool#getResource()} - * @return Boolean - Default value false - */ - @Advice.OnMethodEnter(skipOn = Advice.OnNonDefaultValue.class) - static boolean enterMethods() { - final Logger logger = LoggerFactory.getLogger(JedisPoolResource_Advice.class); - Kcontext kCtx = Context.getCtx(); - if (Objects.isNull(kCtx)) { - logger.debug("Keploy context is null"); - return false; - } else { - return kCtx.getMode().equals(Mode.ModeType.MODE_TEST); - } - } - - /** - * This method gets executed after intercepting method {@link JedisPool#getResource()} irrespective of invocation of - * intercepting method. Based on the return value of the {@link JedisPoolResource_Advice#enterMethods()} it provides - * {@link Jedis} object as return value to the intercepting method. - * - * @param returned - the return object for intercepting method - * @param enter - the value returned from {@link JedisPoolResource_Advice#enterMethods()} - */ - @Advice.OnMethodExit() - static void enterMethods(@Advice.Return(readOnly = false, typing = DYNAMIC) Object returned, - @Advice.Enter boolean enter ) { - if(enter){ - returned = new Jedis(); - } - } -} diff --git a/dedupData.yaml/dedupData.yaml b/dedupData.yaml/dedupData.yaml new file mode 100644 index 00000000..e69de29b diff --git a/integration/pom.xml b/integration/pom.xml index 198a6115..447be554 100644 --- a/integration/pom.xml +++ b/integration/pom.xml @@ -23,6 +23,16 @@ 1.0.0-SNAPSHOT compile + + org.jline + jline + 3.20.0 + + + me.tongfei + progressbar + 0.5.5 + io.keploy common @@ -60,11 +70,11 @@ 2.1.214 provided - - - - - + + org.yaml + snakeyaml + 1.28 + org.mariadb.jdbc mariadb-java-client @@ -82,7 +92,6 @@ okhttp 3.14.9 - provided org.apache.httpcomponents @@ -103,6 +112,11 @@ 1.11.857 provided + + org.jacoco + org.jacoco.core + 0.8.7 + com.google.maps google-maps-services @@ -115,15 +129,19 @@ 2.11.0 - com.google.code.gson - gson - 2.8.9 + io.btrace + btrace-client + 2.2.3 + + + io.btrace + btrace-agent + 2.2.3 - - redis.clients - jedis - 2.9.3 + io.btrace + btrace-boot + 2.2.3 diff --git a/integration/src/main/java/io/keploy/redis/jedis/KConnection.java b/integration/src/main/java/io/keploy/redis/jedis/KConnection.java deleted file mode 100644 index 4aa2a15e..00000000 --- a/integration/src/main/java/io/keploy/redis/jedis/KConnection.java +++ /dev/null @@ -1,550 +0,0 @@ -package io.keploy.redis.jedis; - -import com.google.gson.Gson; -import com.google.gson.reflect.TypeToken; -import io.keploy.grpc.stubs.Service; -import io.keploy.regression.KeployInstance; -import io.keploy.regression.Mock; -import io.keploy.regression.Mode; -import io.keploy.regression.context.Context; -import io.keploy.regression.context.Kcontext; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import redis.clients.jedis.Connection; -import redis.clients.jedis.Protocol; -import redis.clients.util.SafeEncoder; - -import javax.net.ssl.HostnameVerifier; -import javax.net.ssl.SSLParameters; -import javax.net.ssl.SSLSocketFactory; -import java.lang.reflect.Type; -import java.net.Socket; -import java.util.*; - -/** - * KConnection is a class which extends Connection class and wraps it. KConnection records data in record mode and sends - * data in test mode. - */ -public class KConnection extends Connection { - - static final Logger logger = LoggerFactory.getLogger(KConnection.class); - private final Mode.ModeType keployMode = Context.getCtx().getMode().getModeFromContext(); - private Map meta = new HashMap() { - { - put("name", "redis"); - put("type", "NoSqlDB"); - } - }; - private static final RedisCustomSerializer redisCustomSerializer = new RedisCustomSerializer(); - private static final Gson gson = new Gson(); - private static final String CROSS = new String(Character.toChars(0x274C)); - private static final byte[][] EMPTY_ARGS = new byte[0][]; - - public KConnection() { - super(); - // fill data in Mock object into meta if application is in test mode. - if (keployMode == Mode.ModeType.MODE_TEST) { - fillMock(); - } - } - - public KConnection(String host) { - super(host); - // fill data in Mock object into meta if application is in test mode. - if (keployMode == Mode.ModeType.MODE_TEST) { - fillMock(); - } - } - - public KConnection(String host, int port) { - super(host, port); - // fill data in Mock object into meta if application is in test mode. - if (keployMode == Mode.ModeType.MODE_TEST) { - fillMock(); - } - } - - public KConnection(String host, int port, boolean ssl) { - super(host, port, ssl); - // fill data in Mock object into meta if application is in test mode. - if (keployMode == Mode.ModeType.MODE_TEST) { - fillMock(); - } - } - - public KConnection(String host, int port, boolean ssl, SSLSocketFactory sslSocketFactory, SSLParameters sslParameters, HostnameVerifier hostnameVerifier) { - super(host, port, ssl, sslSocketFactory, sslParameters, hostnameVerifier); - // fill data in Mock object into meta if application is in test mode. - if (keployMode == Mode.ModeType.MODE_TEST) { - fillMock(); - } - } - - @Override - public Socket getSocket() { - return super.getSocket(); - } - - @Override - public int getConnectionTimeout() { - return super.getConnectionTimeout(); - } - - @Override - public int getSoTimeout() { - return super.getSoTimeout(); - } - - @Override - public void setConnectionTimeout(int connectionTimeout) { - super.setConnectionTimeout(connectionTimeout); - } - - @Override - public void setSoTimeout(int soTimeout) { - super.setSoTimeout(soTimeout); - } - - @Override - public void setTimeoutInfinite() { - super.setTimeoutInfinite(); - } - - @Override - public void rollbackTimeout() { - super.rollbackTimeout(); - } - - @Override - protected Connection sendCommand(Protocol.Command cmd, String... args) { - switch (keployMode) { - case MODE_RECORD: - /* - if the request has reached this function it means that request did not send byte data instead request - sent objects. So Redis uses its default serializer to serialize the data. - */ - meta.put("serializer", SerializationType.REDIS_SERIALIZATION.toString()); - // capturing request data - meta.put("command", cmd.toString()); - int argCount = 1; - for (String arg : args) { - meta.put("arg".concat(Integer.toString(argCount)), arg); - argCount++; - } - return super.sendCommand(cmd, args); - case MODE_TEST: - /* - Implementing super class logic and calling function of this class. So the flow doesn't divert - completely to Connection class. - */ - byte[][] bargs = new byte[args.length][]; - for (int i = 0; i < args.length; ++i) { - bargs[i] = SafeEncoder.encode(args[i]); - } - return this.sendCommand(cmd, bargs); - default: - return super.sendCommand(cmd, args); - } - } - - @Override - protected Connection sendCommand(Protocol.Command cmd) { - /* - Implementing super class logic and calling function of this class. So the flow doesn't divert - completely to Connection class. - */ - return this.sendCommand(cmd, EMPTY_ARGS); - } - - @Override - protected Connection sendCommand(Protocol.Command cmd, byte[]... args) { - switch (keployMode) { - case MODE_RECORD: - /* - Checking if serializer is already set if not that means request sent bytes data i.e. before reaching - redis client serialization is done. As REDIS_CUSTOM_SERIALIZATION is the most used serializer using this - serializer. - */ - if (!meta.containsKey("serializer") || !Objects.equals(meta.get("serializer"), SerializationType.REDIS_SERIALIZATION.toString())) { - meta.put("serializer", SerializationType.REDIS_CUSTOM_SERIALIZATION.toString()); - // capturing data - meta.put("command", cmd.toString()); - int argCount = 1; - for (byte[] arg : args) { - Object deserializedObject = redisCustomSerializer.deserialize(arg); - meta.put("arg".concat(Integer.toString(argCount)), gson.toJson(deserializedObject)); - argCount++; - } - } - return super.sendCommand(cmd, args); - case MODE_TEST: - /* - Returning this class instead of Connection - */ - return this; - default: - return super.sendCommand(cmd, args); - } - } - - @Override - public String getHost() { - return super.getHost(); - } - - @Override - public void setHost(String host) { - super.setHost(host); - } - - @Override - public int getPort() { - return super.getPort(); - } - - @Override - public void setPort(int port) { - super.setPort(port); - } - - @Override - public void connect() { - switch (keployMode) { - case MODE_TEST: - // does nothing - break; - default: - super.connect(); - } - } - - @Override - public void close() { - this.disconnect(); - } - - @Override - public void disconnect() { - switch (keployMode) { - case MODE_TEST: - break; - // does nothing - default: - super.disconnect(); - } - } - - @Override - public boolean isConnected() { - return super.isConnected(); - } - - @Override - public String getStatusCodeReply() { - switch (keployMode) { - case MODE_RECORD: - // capturing data - String statusCodeReply = super.getStatusCodeReply(); - meta.put("response", statusCodeReply); - sendToServer(); - return statusCodeReply; - case MODE_TEST: - // returning recorded data - return meta.get("response"); - default: - return super.getStatusCodeReply(); - } - } - - @Override - public String getBulkReply() { - switch (keployMode) { - case MODE_RECORD: - // capturing data - String bulkReply = super.getBulkReply(); - meta.put("response", bulkReply); - sendToServer(); - return bulkReply; - case MODE_TEST: - // returning recorded data - return meta.get("response"); - default: - return super.getBulkReply(); - } - } - - @Override - public byte[] getBinaryBulkReply() { - switch (keployMode) { - case MODE_RECORD: - /* - Checking if serializer is already set if not that means request sent bytes data i.e. before reaching - redis client serialization is done. As REDIS_CUSTOM_SERIALIZATION is the most used serializer using this - serializer. - */ - if (Objects.equals(meta.get("serializer"), SerializationType.REDIS_SERIALIZATION.toString())) { - return super.getBinaryBulkReply(); - } else { - // capturing data - byte[] binaryBulkReply = super.getBinaryBulkReply(); - Object deserializedObject = redisCustomSerializer.deserialize(binaryBulkReply); - meta.put("response", gson.toJson(deserializedObject)); - sendToServer(); - return binaryBulkReply; - } - case MODE_TEST: - // returning recorded data based on serializer - if (!Objects.equals(meta.get("serializer"), SerializationType.REDIS_SERIALIZATION.toString())) { - return redisCustomSerializer.serialize(gson.fromJson(meta.get("response"), Object.class)); - } - return super.getBinaryBulkReply(); - default: - return super.getBinaryBulkReply(); - } - } - - @Override - public Long getIntegerReply() { - switch (keployMode) { - case MODE_RECORD: - // recording data - Long integerReply = super.getIntegerReply(); - meta.put("response", integerReply.toString()); - sendToServer(); - return integerReply; - case MODE_TEST: - // sending recorded data - return Long.parseLong(meta.get("response")); - default: - return super.getIntegerReply(); - } - } - - @Override - public List getMultiBulkReply() { - switch (keployMode) { - case MODE_RECORD: - // recording data - List multiBulkReply = super.getMultiBulkReply(); - meta.put("response", multiBulkReply.toString()); - sendToServer(); - return multiBulkReply; - case MODE_TEST: - // sending recorded data - return new ArrayList(Arrays.asList(meta.get("response").split(","))); - default: - return super.getMultiBulkReply(); - } - } - - @Override - public List getBinaryMultiBulkReply() { - switch (keployMode) { - case MODE_RECORD: - /* - Checking if serializer is already set if not that means request sent bytes data i.e. before reaching - redis client serialization is done. As REDIS_CUSTOM_SERIALIZATION is the most used serializer ,using this - serializer. - */ - if (Objects.equals(meta.get("serializer"), SerializationType.REDIS_SERIALIZATION.toString())) { - return super.getBinaryMultiBulkReply(); - } else { - // recording data - List binaryMultiBulkReply = super.getBinaryMultiBulkReply(); - List response = new ArrayList<>(); - for (byte[] i : binaryMultiBulkReply) { - Object deserializedObject = redisCustomSerializer.deserialize(i); - response.add(deserializedObject); - } - meta.put("response", gson.toJson(response)); - sendToServer(); - return binaryMultiBulkReply; - } - case MODE_TEST: - // sending recorded data - List response = new ArrayList<>(); - Type listOfObject = new TypeToken>() { - }.getType(); - List lObj = gson.fromJson(meta.get("response"), listOfObject); - for (Object i : lObj) { - response.add(redisCustomSerializer.serialize(i)); - } - return response; - default: - return super.getBinaryMultiBulkReply(); - } - } - - @Override - public void resetPipelinedCount() { - super.resetPipelinedCount(); - } - - @Override - public List getRawObjectMultiBulkReply() { - switch (keployMode) { - case MODE_RECORD: - // recording data - List rawObjectMultiBulkReply = super.getRawObjectMultiBulkReply(); - meta.put("response", gson.toJson(rawObjectMultiBulkReply)); - sendToServer(); - return rawObjectMultiBulkReply; - case MODE_TEST: - // sending recorded data - Type listOfObject = new TypeToken>() { - }.getType(); - return gson.fromJson(meta.get("response"), listOfObject); - default: - return super.getRawObjectMultiBulkReply(); - } - } - - @Override - public List getObjectMultiBulkReply() { - switch (keployMode) { - case MODE_RECORD: - // recording data - List objectMultiBulkReply = super.getObjectMultiBulkReply(); - meta.put("response", gson.toJson(objectMultiBulkReply)); - sendToServer(); - return objectMultiBulkReply; - case MODE_TEST: - // sending recorded data - Type listOfObject = new TypeToken>() { - }.getType(); - return gson.fromJson(meta.get("response"), listOfObject); - default: - return super.getObjectMultiBulkReply(); - } - } - - @Override - public List getIntegerMultiBulkReply() { - switch (keployMode) { - case MODE_RECORD: - // recording data - List integerMultiBulkReply = super.getIntegerMultiBulkReply(); - meta.put("response", gson.toJson(integerMultiBulkReply)); - sendToServer(); - return integerMultiBulkReply; - case MODE_TEST: - // sending recorded data - Type listOfLong = new TypeToken>() { - }.getType(); - return gson.fromJson(meta.get("response"), listOfLong); - default: - return super.getIntegerMultiBulkReply(); - } - } - - @Override - public List getAll() { - switch (keployMode) { - case MODE_RECORD: - // recording data - List getAll = super.getAll(); - meta.put("response", gson.toJson(getAll)); - sendToServer(); - return getAll; - case MODE_TEST: - // sending recorded data - Type listOfObject = new TypeToken>() { - }.getType(); - return gson.fromJson(meta.get("response"), listOfObject); - default: - return super.getAll(); - } - } - - @Override - public List getAll(int except) { - switch (keployMode) { - case MODE_RECORD: - // recording data - List getAll = super.getAll(except); - meta.put("response", gson.toJson(getAll)); - sendToServer(); - return getAll; - case MODE_TEST: - // sending recorded data - Type listOfObject = new TypeToken>() { - }.getType(); - return gson.fromJson(meta.get("response"), listOfObject); - default: - return super.getAll(except); - } - } - - @Override - public Object getOne() { - switch (keployMode) { - case MODE_RECORD: - // recording data - Object getOne = super.getOne(); - meta.put("response", gson.toJson(getOne)); - sendToServer(); - return getOne; - case MODE_TEST: - // sending recorded data - return gson.fromJson(meta.get("response"), Object.class); - default: - return super.getOne(); - } - } - - @Override - public boolean isBroken() { - return super.isBroken(); - } - - @Override - protected void flush() { - super.flush(); - } - - @Override - protected Object readProtocolWithCheckingBroken() { - return super.readProtocolWithCheckingBroken(); - } - - // method to send data to server - private void sendToServer() { - Kcontext kctx = Context.getCtx(); - logger.debug("meta:{}", meta.toString()); - if (Objects.equals(meta.get("command"), Protocol.Command.PING.toString()) || - Objects.equals(meta.get("command"), Protocol.Command.QUIT.toString())) { - return; - } - Service.Mock.SpecSchema specSchema = Service.Mock.SpecSchema.newBuilder() - .putAllMetadata(meta) - .build(); - Service.Mock redisMock = Service.Mock.newBuilder() - .setVersion(Mock.Version.V1_BETA1.value) - .setKind(Mock.Kind.GENERIC_EXPORT.value) - .setSpec(specSchema) - .build(); - kctx.getMock().add(redisMock); - } - - // method to fill meta with the mock - private void fillMock() { - Kcontext kctx = Context.getCtx(); - if (kctx.getMock().size() > 0 && kctx.getMock().get(0).getKind().equals(Mock.Kind.GENERIC_EXPORT.value)) { - List mocks = kctx.getMock(); - meta = mocks.get(0).getSpec().getMetadataMap(); - mocks.remove(0); - } else { - logger.error(CROSS + " mocks not present in " + KeployInstance.getInstance().getKeploy().getCfg().getApp().getMockPath() + " directory."); - throw new RuntimeException("unable to read mocks from keploy context"); - } - } - - public enum SerializationType { - REDIS_SERIALIZATION, - REDIS_CUSTOM_SERIALIZATION; - - SerializationType() { - - } - } -} diff --git a/integration/src/main/java/io/keploy/redis/jedis/RedisCustomSerializer.java b/integration/src/main/java/io/keploy/redis/jedis/RedisCustomSerializer.java deleted file mode 100644 index bb655149..00000000 --- a/integration/src/main/java/io/keploy/redis/jedis/RedisCustomSerializer.java +++ /dev/null @@ -1,32 +0,0 @@ -package io.keploy.redis.jedis; - -import lombok.NoArgsConstructor; - -import java.io.*; - -@NoArgsConstructor -public class RedisCustomSerializer { - - public byte[] serialize(T obj) { - try { - ByteArrayOutputStream byteStream = new ByteArrayOutputStream(); - ObjectOutputStream objectStream = new ObjectOutputStream(byteStream); - objectStream.writeObject(obj); - objectStream.flush(); - return byteStream.toByteArray(); - } catch (Exception e) { - throw new RuntimeException("Cannot serialize", e); - } - } - - public T deserialize(byte[] bytes) { - try { - ByteArrayInputStream byteStream = new ByteArrayInputStream(bytes); - ObjectInputStream objectStream = new ObjectInputStream(byteStream); - Object obj = objectStream.readObject(); - return (T) obj; - } catch (Exception e) { - throw new RuntimeException("Cannot deserialize", e); - } - } -} diff --git a/integration/src/main/java/io/keploy/servlet/Experiments.java b/integration/src/main/java/io/keploy/servlet/Experiments.java new file mode 100644 index 00000000..2bb248f0 --- /dev/null +++ b/integration/src/main/java/io/keploy/servlet/Experiments.java @@ -0,0 +1,114 @@ +package io.keploy.servlet; + +import me.tongfei.progressbar.ProgressBar; +import org.jacoco.core.analysis.Analyzer; +import org.jacoco.core.analysis.CoverageBuilder; +import org.jacoco.core.analysis.IClassCoverage; +import org.jacoco.core.analysis.ICounter; +import org.jacoco.core.tools.ExecFileLoader; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static java.lang.System.out; + +public class Experiments { + private static String getColor(final int status) { + switch (status) { + case ICounter.NOT_COVERED: + return "red"; + case ICounter.PARTLY_COVERED: + return "yellow"; + case ICounter.FULLY_COVERED: + return "green"; + } + return ""; + } + + private static void execReader() throws IOException { + // Together with the original class definition we can calculate coverage + // information: + out.println("------------------------------------------"); + String Line_Path = ""; + ExecFileLoader loader = new ExecFileLoader(); + // ExecutionDataWriter executionDataWriter = new ExecutionDataWriter(null); + // ExecutionDataReader reader = new ExecutionDataReader(null); + // reader.read(); + List> dataList = new ArrayList<>(); + // Load the coverage data file + File coverageFile = new File( + "/Users/sarthak_1/Documents/Keploy/trash/samples-java/target/jacoco-clienttest-188.exec"); + loader.load(coverageFile); + File binDir = new File( + "/Users/sarthak_1/Documents/Keploy/trash/samples-java/target/classes"); + final CoverageBuilder coverageBuilder = new CoverageBuilder(); + final Analyzer analyzer = new Analyzer(loader.getExecutionDataStore(), coverageBuilder); + analyzer.analyzeAll(binDir); + int x = 0; + Map> executedLinesByFile = new HashMap<>(); + + for (final IClassCoverage cc : coverageBuilder.getClasses()) { + // out.printf("Coverage of class %s%n", cc.getName()); + String ClassName = cc.getName(); // base64Encode(cc.getName()); + System.out.println(cc.getMethods()); + java.util.Collection method = cc.getMethods(); + + cc.getInstructionCounter().getTotalCount(); + List ls = new ArrayList<>(); + for (int i = cc.getFirstLine(); i <= cc.getLastLine(); i++) { + // out.printf("Line %s: %s%n", Integer.valueOf(i), + // getColor(cc.getLine(i).getStatus())); + if (getColor(cc.getLine(i).getStatus()).equals("green")) { + Line_Path += ClassName + i + ","; + out.println("LINE PATH " + Line_Path); + ls.add(i); + } + } + if (ls.size() != 0) { + executedLinesByFile.put(ClassName, ls); + } + + } + + System.out.println("Line_Path: " + Line_Path); + +// Map testData = new HashMap<>(); +// testData.put("id", keploy_test_id); +// // Map test1 = createTestData("test-1",testData); +// testData.put("executedLinesByFile", executedLinesByFile); +// +// dataList.add(testData); + +// List> existingData = readYamlFile("dedupData.yaml"); +// // Append new data to the existing data +// existingData.addAll(dataList); +// +// // Write data to YAML file +// writeYamlFile(existingData, "dedupData.yaml"); + } + public static void main(String[] args) throws IOException { +// int totalTasks = 1000; +// +// ProgressBar progressBar = new ProgressBar("Progress", totalTasks); +// progressBar.start(); +// +// for (int i = 0; i <= totalTasks; i++) { +// // Simulate some task +// try { +// Thread.sleep(50); +// } catch (InterruptedException e) { +// e.printStackTrace(); +// } +// +// // Increment the progress bar +// progressBar.step(); +// } +// +// progressBar.stop(); + execReader(); + } +} diff --git a/integration/src/main/java/io/keploy/servlet/KeployMiddleware.java b/integration/src/main/java/io/keploy/servlet/KeployMiddleware.java index 1b011f31..3e5a43b7 100644 --- a/integration/src/main/java/io/keploy/servlet/KeployMiddleware.java +++ b/integration/src/main/java/io/keploy/servlet/KeployMiddleware.java @@ -1,201 +1,65 @@ package io.keploy.servlet; import io.grpc.netty.shaded.io.netty.util.internal.InternalThreadLocalMap; -import io.keploy.grpc.stubs.Service; -import io.keploy.regression.KeployInstance; -import io.keploy.regression.context.Context; -import io.keploy.regression.context.Kcontext; -import io.keploy.regression.keploy.AppConfig; -import io.keploy.regression.keploy.Config; -import io.keploy.regression.keploy.Keploy; -import io.keploy.regression.keploy.ServerConfig; -import io.keploy.regression.Mode; -import io.keploy.service.GrpcService; -import io.keploy.utils.*; -import lombok.SneakyThrows; -import org.apache.commons.io.IOUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.jacoco.core.analysis.Analyzer; +import org.jacoco.core.analysis.CoverageBuilder; +import org.jacoco.core.analysis.IClassCoverage; +import org.jacoco.core.analysis.ICounter; +import org.jacoco.core.data.ExecutionDataWriter; +import org.jacoco.core.runtime.RemoteControlReader; +import org.jacoco.core.runtime.RemoteControlWriter; +import org.jacoco.core.tools.ExecFileLoader; +import org.yaml.snakeyaml.DumperOptions; +import org.yaml.snakeyaml.Yaml; +import org.yaml.snakeyaml.reader.UnicodeReader; import javax.servlet.*; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import javax.servlet.http.Part; -import java.io.IOException; -import java.io.InputStream; -import java.io.UnsupportedEncodingException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Path; -import java.nio.file.Paths; +import java.io.*; +import java.net.InetAddress; +import java.net.Socket; import java.util.*; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.atomic.AtomicInteger; + +import static java.lang.System.out; -/** - * KeployMiddleware is a filter. This is where according to the keploy mode the service methods to capture test cases and - * to run tests - */ public class KeployMiddleware implements Filter { + private final ExecutorService executorService = Executors.newFixedThreadPool(10); + public static int Lines_covered = 0; + public static int Branch_covered = 0; + public static int Lines_total = 0; + public static int Branch_total = 0; + public static int Methods_covered = 0; + public static int Methods_total = 0; + public static int Classes_covered = 0; + public static int Classes_total = 0; + public static String Line_Path = ""; + private static final Logger logger = LogManager.getLogger(KeployMiddleware.class); private static final String CROSS = new String(Character.toChars(0x274C)); + public static ArrayList stackTraceArr = new ArrayList<>(); + private static boolean EnableDedup = false; + public static AtomicInteger metCount = new AtomicInteger(0); + public static AtomicInteger reqCount = new AtomicInteger(0); + public static AtomicInteger cnt = new AtomicInteger(0); + // public static AtomicInteger linesCovered = new AtomicInteger(0); - @Override - public void init(FilterConfig filterConfig) { - //just like wait groups, used in testfile - CountDownLatch countDownLatch = HaltThread.getInstance().getCountDownLatch(); - - logger.debug("initializing keploy"); - KeployInstance ki = KeployInstance.getInstance(); - Keploy kp = ki.getKeploy(); - Config cfg = new Config(); - AppConfig appConfig = new AppConfig(); - if (System.getenv("APP_NAME") != null) { - String app_name = System.getenv("APP_NAME").trim(); - appConfig.setName(app_name); - } - if (System.getenv("APP_PORT") != null) { - String app_port = System.getenv("APP_PORT").trim(); - appConfig.setPort(app_port); - } + // private static final String DESTFILE = "jacoco-client.exec"; - //Path for exported tests - String kpath = System.getenv("KEPLOY_TEST_PATH"); - Path path = Paths.get(""); - if (kpath != null && kpath.length() > 0 && !Paths.get(kpath).isAbsolute()) { - kpath = kpath.trim(); - Path effectivePath = path.resolve(kpath).toAbsolutePath(); - String absolutePath = effectivePath.normalize().toString(); - appConfig.setTestPath(absolutePath); - } else if (kpath == null || kpath.length() == 0) { - String currDir = System.getProperty("user.dir") + "/src/test/e2e/keploy-tests"; - appConfig.setTestPath(currDir); - } else { - //if user gives the path - appConfig.setTestPath(kpath); - } + private static final String ADDRESS = "localhost"; - logger.debug("test path: {}", appConfig.getTestPath()); - - //Path for exported mocks - String mpath = System.getenv("KEPLOY_MOCK_PATH"); - - if (mpath != null && mpath.length() > 0 && !Paths.get(mpath).isAbsolute()) { - mpath = mpath.trim(); - Path effectivePath = path.resolve(mpath).toAbsolutePath(); - String absolutePath = effectivePath.normalize().toString(); - appConfig.setMockPath(absolutePath); - } else if (mpath == null || mpath.length() == 0) { - String currDir = System.getProperty("user.dir") + "/src/test/e2e/mocks"; - appConfig.setMockPath(currDir); - } else { - //if user gives the path - appConfig.setMockPath(mpath); - } - - logger.debug("mock path: {}", appConfig.getMockPath()); - - - //Path for exported assets - String apath = System.getenv("KEPLOY_ASSET_PATH"); - - if (apath != null && apath.length() > 0 && !Paths.get(apath).isAbsolute()) { - apath = apath.trim(); - Path effectivePath = path.resolve(apath).toAbsolutePath(); - String absolutePath = effectivePath.normalize().toString(); - appConfig.setAssetPath(absolutePath); - } else if (mpath == null || mpath.length() == 0) { - String currDir = System.getProperty("user.dir") + "/src/test/e2e/assets"; - appConfig.setAssetPath(currDir); - } else { - //if user gives the path - appConfig.setAssetPath(mpath); - } - - logger.debug("asset path: {}", appConfig.getAssetPath()); - - ServerConfig serverConfig = new ServerConfig(); - - if (System.getenv("DENOISE") != null) { - String denoise = System.getenv("DENOISE").trim(); - serverConfig.setDenoise(Boolean.parseBoolean(denoise)); - } - - if (System.getenv("KEPLOY_URL") != null) { - String keploy_url = System.getenv("KEPLOY_URL").trim(); - serverConfig.setURL(keploy_url); - } - - logger.info("fetching filter from env variables"); - // read a list from env variable then store them in filter of this keploy instance - String[] acceptUrlRegexList = System.getenv().get("ACCEPT_URL_REGEX_LIST") != null ? - System.getenv().get("ACCEPT_URL_REGEX_LIST").split(",") : null; - - String[] acceptHeaderRegexList = System.getenv().get("ACCEPT_HEADER_REGEX_LIST") != null ? - System.getenv().get("ACCEPT_HEADER_REGEX_LIST").split(",") : null; - - String[] rejectUrlRegexList = System.getenv().get("REJECT_URL_REGEX_LIST") != null ? - System.getenv().get("REJECT_URL_REGEX_LIST").split(",") : null; - - String[] rejectHeaderRegexList = System.getenv().get("REJECT_HEADER_REGEX_LIST") != null ? - System.getenv().get("REJECT_HEADER_REGEX_LIST").split(",") : null; - - boolean isFilterNull = (acceptHeaderRegexList == null) && (acceptUrlRegexList == null) && (rejectHeaderRegexList == null) && (rejectUrlRegexList == null); - if (!isFilterNull) { - io.keploy.regression.keploy.Filter filter = new io.keploy.regression.keploy.Filter(acceptUrlRegexList, acceptHeaderRegexList, rejectHeaderRegexList, rejectUrlRegexList); - appConfig.setFilter(filter); - } - cfg.setApp(appConfig); - cfg.setServer(serverConfig); - kp.setCfg(cfg); + private static final int PORT = 36320; - // its mere purpose is to call the constructor to initialize some fields - new GrpcService(); - - final Mode.ModeType KEPLOY_MODE = Mode.getMode(); - - if (KEPLOY_MODE != null && KEPLOY_MODE.equals(Mode.ModeType.MODE_TEST)) { - new Thread(() -> { - try { - logger.debug("starting tests"); - GrpcService.Test(); - } catch (Exception e) { - logger.error(CROSS + " failed to run tests", e); - } - //to stop after running all tests - countDownLatch.countDown(); // when running tests using cmd - - // to avoid memory leak - Context.cleanup(); - InternalThreadLocalMap.remove(); - try { - GrpcService.channel.shutdown(); - GrpcService.channel.awaitTermination(1, TimeUnit.MINUTES); - GrpcService.channel.shutdownNow(); - } catch (InterruptedException e) { - logger.error(CROSS + " failed to shut grpc connection properly... ", e); - } - - try { - Thread.sleep(10000); - System.exit(0); - } catch (InterruptedException e) { - logger.error(CROSS + " failed to shut test run properly... ", e); - } - - }).start(); - } - - String runTestBeforeRecord = System.getenv("RUN_TEST_BEFORE_RECORD"); - boolean runTests = false; - if (runTestBeforeRecord != null) { - runTests = Boolean.parseBoolean(runTestBeforeRecord); - } - - - if (KEPLOY_MODE != null && KEPLOY_MODE.equals(Mode.ModeType.MODE_RECORD) && runTests) { - new Thread(this::handleExistingTests).start(); - } + @Override + public void init(FilterConfig filterConfig) { + logger.debug("Keploy Middleware initialized"); } @@ -206,237 +70,255 @@ private static String bold(String str) { return (SET_BOLD_TEXT + str + SET_PLAIN_TEXT); } - @SneakyThrows - private void handleExistingTests() { - - Thread.sleep(2000); - - final String WARN = "\uD83D\uDEA8"; - - System.out.println("--------------------------------------------------------------------------------------------\n"); - String startTest = WARN + " Executing existing test cases to maintain the same state, " + - "kindly do not record any new test cases till these tests get completed."; - System.out.println(bold(startTest)); - System.out.println("\n--------------------------------------------------------------------------------------------"); - - GrpcService.Test(); - } - @Override - public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) throws IOException, ServletException { + public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) + throws IOException, ServletException { HttpServletRequest request = (HttpServletRequest) servletRequest; HttpServletResponse response = (HttpServletResponse) servletResponse; - - KeployInstance ki = KeployInstance.getInstance(); - Keploy k = ki.getKeploy(); - - logger.debug("inside middleware: incoming request"); - - logger.debug("mode: {}", Mode.getMode()); - - if (k == null || Mode.getMode() != null && (Mode.getMode()).equals(Mode.ModeType.MODE_OFF)) { - filterChain.doFilter(request, response); - return; + String keploy_test_id = request.getHeader("KEPLOY-TEST-ID"); + // logger.debug("KEPLOY-TEST-ID: {}", keploy_test_id); + filterChain.doFilter(request, response); + if (System.getenv("ENABLE_DEDUP") != null) { + String bool = System.getenv("ENABLE_DEDUP").trim(); + EnableDedup = bool.equals("true"); + } + // check if dedup is disabled then what should be the goal may be we can extract from header if dedup is enabled or not + if (keploy_test_id != null && EnableDedup) { + + // Run getCoverage in a separate thread +// Thread coverageThread = new Thread(() -> { + try { + getCoverage(keploy_test_id); + } catch (InterruptedException | IOException e) { + throw new RuntimeException(e); + } +// }); + +// coverageThread.start(); +// try { +// Thread.sleep(500); +// } catch (InterruptedException e) { +// throw new RuntimeException(e); +// } } - //setting context - Kcontext kctx = new Kcontext(); - kctx.setRequest(request); - - Context.setCtx(kctx); + } - String keploy_test_id = request.getHeader("KEPLOY_TEST_ID"); - logger.debug("KEPLOY_TEST_ID: {}", keploy_test_id); + @Override + public void destroy() { + InternalThreadLocalMap.destroy(); + } - if (keploy_test_id != null) { - kctx.setTestId(keploy_test_id); - kctx.setMode(Mode.ModeType.MODE_TEST); - List mocks = k.getMocks().get(keploy_test_id); - if (mocks != null) { - kctx.getMock().addAll(mocks); - } + public void execWriter(String keploy_test_id) throws IOException { + File directory = new File( + System.getProperty("user.dir") + "/target"); + File file = new File(directory, "jacoco-client" + keploy_test_id + ".exec"); +// File file = new File(directory, "jacoco-client.exec"); + + final FileOutputStream localFile = new FileOutputStream(file); + + final ExecutionDataWriter localWriter = new ExecutionDataWriter( + localFile); + + // Open a socket to the coverage agent: + final Socket socket = new Socket(InetAddress.getByName(ADDRESS), PORT); + final RemoteControlWriter writer = new RemoteControlWriter( + socket.getOutputStream()); + final RemoteControlReader reader = new RemoteControlReader( + socket.getInputStream()); + reader.setSessionInfoVisitor(localWriter); + reader.setExecutionDataVisitor(localWriter); + + // Send a dump command and read the response: + writer.visitDumpCommand(true, true); + if (!reader.read()) { + throw new IOException("Socket closed unexpectedly."); } + socket.close(); + localFile.close(); + } - GenericRequestWrapper requestWrapper = new GenericRequestWrapper(request); - GenericResponseWrapper responseWrapper = new GenericResponseWrapper(response); + public synchronized void execWriter2(String keploy_test_id) throws IOException { + File directory = new File(System.getProperty("user.dir")+"/target"); + File file = new File(directory, "jacoco-client" + keploy_test_id + ".exec"); - Map> formData = new HashMap<>(); - if (request.getContentType() != null && request.getContentType().startsWith("multipart/form-data")) { - formData = processMultipart(request); - } + FileOutputStream localFile = null; + ExecutionDataWriter localWriter = null; + Socket socket = null; + RemoteControlWriter writer = null; + RemoteControlReader reader = null; + try { + localFile = new FileOutputStream(file); + BufferedOutputStream bufferedLocalFile = new BufferedOutputStream(localFile); + localWriter = new ExecutionDataWriter(bufferedLocalFile); + socket = new Socket(InetAddress.getByName(ADDRESS), PORT); + writer = new RemoteControlWriter(socket.getOutputStream()); + reader = new RemoteControlReader(socket.getInputStream()); - filterChain.doFilter(requestWrapper, responseWrapper); + reader.setSessionInfoVisitor(localWriter); + reader.setExecutionDataVisitor(localWriter); - byte[] reqArr = requestWrapper.getData(); - byte[] resArr = responseWrapper.getData(); + // Send a dump command and read the response: + writer.visitDumpCommand(true, true); - String reqEncoding = (request.getCharacterEncoding() == null) ? "UTF-8" : request.getCharacterEncoding(); - String resEncoding = (response.getCharacterEncoding() == null) ? "ISO-8859-1" : response.getCharacterEncoding(); + if (!reader.read()) { + throw new IOException("Socket closed unexpectedly."); + } + } finally { + // Close resources in a finally block to ensure they are closed even if an exception occurs - String requestBody = this.getStringValue(reqArr, reqEncoding); - String responseBody = this.getStringValue(resArr, resEncoding); - String resContentType = response.getContentType(); + if (socket != null && !socket.isClosed()) { + socket.close(); + } - if (resContentType != null && isBinaryFile(resContentType)) { - logger.debug("request contains binary file"); - responseBody = ""; + if (localFile != null) { + localFile.close(); + } } + } - logger.debug("request body inside middleware: {}", requestBody); - logger.debug("response body inside middleware: {}", responseBody); - - String statusMsg = HttpStatusReasons.getStatusMsg(responseWrapper.getStatus()); - String protocolType = requestWrapper.getProtocol(); - int protoMajor = Character.getNumericValue(protocolType.charAt(protocolType.length() - 3)); - int protoMinor = Character.getNumericValue(protocolType.charAt(protocolType.length() - 1)); - - - Map simResponseHeaderMap = getResponseHeaderMap(responseWrapper); - - Service.HttpResp simulateResponse = Service.HttpResp.newBuilder() - .setStatusCode(responseWrapper.getStatus()) - .setBody(responseBody) - .setStatusMessage(statusMsg) - .setProtoMajor(protoMajor) - .setProtoMinor(protoMinor) - .putAllHeader(simResponseHeaderMap).build(); - - logger.debug("simulate response inside middleware: {}", simulateResponse); - - if (keploy_test_id != null) { - k.getResp().put(keploy_test_id, simulateResponse); - Context.cleanup(); - InternalThreadLocalMap.remove(); - logger.debug("response in keploy resp map: {}", k.getResp().get(keploy_test_id)); - } else { - Mode.ModeType mode = Mode.getMode(); - // to prevent recording testcases in test mode. - if (mode != null && mode.equals(Mode.ModeType.MODE_TEST)) { - return; - } + public void getCoverage(String keploy_test_id) throws IOException, InterruptedException { - Map urlParams = setUrlParams(requestWrapper.getParameterMap()); - - Service.HttpResp.Builder builder = Service.HttpResp.newBuilder(); - Map headerMap = getResponseHeaderMap(responseWrapper); - Service.HttpResp httpResp = builder - .setStatusCode(responseWrapper.getStatus()) - .setBody(responseBody) - .setStatusMessage(statusMsg) - .setProtoMajor(protoMajor) - .setProtoMinor(protoMinor) - .putAllHeader(headerMap).build(); - - try { - GrpcService.CaptureTestCases(requestBody, urlParams, httpResp, protocolType, formData); - } catch (Exception e) { - logger.error(CROSS + " failed to capture testCases", e); - } + try { + execWriter(keploy_test_id); + } catch (IOException e) { + e.printStackTrace(); } - responseWrapper.flushBuffer(); + try { + execReader(keploy_test_id); + } catch (IOException e) { + e.printStackTrace(); // Example: print the stack trace + } - // doing this will save thread-local from memory leak. - Context.cleanup(); - InternalThreadLocalMap.remove(); - logger.debug("inside middleware: outgoing response"); } - private boolean isBinaryFile(String resContentType) { - - switch (resContentType) { - case "application/octet-stream": - case "application/pdf": - case "image/jpeg": - case "image/jpg": - case "image/png": - case "image/gif": - case "text/plain": - case "text/html": - return true; - default: - return false; - } + public void shutdownExecutor() { + executorService.shutdown(); } - private Map> processMultipart(HttpServletRequest request) throws IOException, ServletException { - Map> data = new HashMap<>(); - Collection parts = request.getParts(); - for (Part part : parts) { - final String partName = part.getName(); - logger.debug("partName:{}", partName); - - if (part.getContentType() != null) { - // read the content of the "file" part and store it in a request attribute - InputStream inputStream = part.getInputStream(); - byte[] content = IOUtils.toByteArray(inputStream); - String fileName = part.getSubmittedFileName(); - - MultipartContent multipartContent = new MultipartContent(fileName, content); - data.computeIfAbsent(partName, x -> new ArrayList<>()).add(multipartContent); - - request.setAttribute("fileContent", content); - } else { - InputStream inputStream = part.getInputStream(); - byte[] content = IOUtils.toByteArray(inputStream); - - MultipartContent multipartContent = new MultipartContent(null, content); - data.computeIfAbsent(partName, x -> new ArrayList<>()).add(multipartContent); - logger.debug("non-file body:{}", getStringValue(content, String.valueOf(StandardCharsets.UTF_8))); + private void execReader(String keploy_test_id) throws IOException { + // Together with the original class definition we can calculate coverage + // information: + out.println("------------------------------------------"); + Line_Path = ""; + ExecFileLoader loader = new ExecFileLoader(); + + List> dataList = new ArrayList<>(); + // Load the coverage data file + File coverageFile = new File( + System.getProperty("user.dir") + + "/target/jacoco-client" + keploy_test_id + ".exec"); +// File coverageFile = new File( +// System.getProperty("user.dir") + +// "/target/jacoco-client.exec"); + loader.load(coverageFile); + File binDir = new File( + System.getProperty("user.dir")+ "/target/classes"); + final CoverageBuilder coverageBuilder = new CoverageBuilder(); + final Analyzer analyzer = new Analyzer(loader.getExecutionDataStore(), coverageBuilder); + analyzer.analyzeAll(binDir); + int x = 0; + Map> executedLinesByFile = new HashMap<>(); + + for (final IClassCoverage cc : coverageBuilder.getClasses()) { + // out.printf("Coverage of class %s%n", cc.getName()); + String ClassName = cc.getName(); // base64Encode(cc.getName()); + // System.out.println(cc.getMethods()); + java.util.Collection method = cc.getMethods(); + + cc.getInstructionCounter().getTotalCount(); + List ls = new ArrayList<>(); + for (int i = cc.getFirstLine(); i <= cc.getLastLine(); i++) { + // out.printf("Line %s: %s%n", Integer.valueOf(i), + // getColor(cc.getLine(i).getStatus())); + if (getColor(cc.getLine(i).getStatus()).equals("green")) { + Line_Path += ClassName + i + ","; + ls.add(i); + } } + if (ls.size() != 0) { + executedLinesByFile.put(ClassName, ls); + } + } - return data; - } - private Map getResponseHeaderMap(GenericResponseWrapper responseWrapper) { +// System.out.println("Line_Path: " + Line_Path); - Map map = new HashMap<>(); + Map testData = new HashMap<>(); + testData.put("id", keploy_test_id); + // Map test1 = createTestData("test-1",testData); + testData.put("executedLinesByFile", executedLinesByFile); - List headerNames = new ArrayList<>(responseWrapper.getHeaderNames()); + dataList.add(testData); - for (String name : headerNames) { + List> existingData = readYamlFile("dedupData.yaml"); + // Append new data to the existing data + existingData.addAll(dataList); - if (name == null) continue; + // Write data to YAML file + writeYamlFile(existingData, "dedupData.yaml"); + } - List values = new ArrayList<>(responseWrapper.getHeaders(name)); - Service.StrArr.Builder builder = Service.StrArr.newBuilder(); + private void printCounter(final String unit, final ICounter counter) { + final Integer missed = Integer.valueOf(counter.getMissedCount()); + final Integer total = Integer.valueOf(counter.getTotalCount()); + out.printf("%s of %s %s missed%n", missed, total, unit); + Lines_covered = total - missed; + System.out.println("Lines covered: " + Lines_covered); + Lines_total = total; + System.out.println("Lines total: " + Lines_total); - for (String s : values) { - builder.addValue(s); - } - Service.StrArr value = builder.build(); + } - map.put(name, value); + private String getColor(final int status) { + switch (status) { + case ICounter.NOT_COVERED: + return "red"; + case ICounter.PARTLY_COVERED: + return "yellow"; + case ICounter.FULLY_COVERED: + return "green"; } - return map; + return ""; } - private Map setUrlParams(Map param) { - Map urlParams = new HashMap<>(); + private static List> readYamlFile(String fileName) { + List> existingData = new ArrayList<>(); + + try (InputStream input = new FileInputStream(fileName); + UnicodeReader reader = new UnicodeReader(input)) { + + Yaml yaml = new Yaml(); + existingData = yaml.load(reader); - for (String key : param.keySet()) { - //taking only value of the parameter - String value = param.get(key)[0]; - if (key == null || value == null) continue; - urlParams.put(key, value); + } catch (IOException e) { + e.printStackTrace(); } - return urlParams; + + return existingData != null ? existingData : new ArrayList<>(); } - private String getStringValue(byte[] contentAsByteArray, String characterEncoding) { - try { - return new String(contentAsByteArray, 0, contentAsByteArray.length, characterEncoding); - } catch (UnsupportedEncodingException e) { + public static String base64Encode(String input) { + byte[] encodedBytes = Base64.getEncoder().encode(input.getBytes()); + return new String(encodedBytes); + } + + private static void writeYamlFile(List> dataList, String fileName) { + DumperOptions options = new DumperOptions(); + options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK); + + Yaml yaml = new Yaml(options); + + try (FileWriter writer = new FileWriter(fileName)) { + yaml.dump(dataList, writer); + System.out.println("YAML file updated successfully: " + fileName); + } catch (IOException e) { e.printStackTrace(); } - return ""; } - @Override - public void destroy() { - InternalThreadLocalMap.destroy(); - } } \ No newline at end of file diff --git a/keploy-logs.txt b/keploy-logs.txt new file mode 100644 index 00000000..1e3151d9 --- /dev/null +++ b/keploy-logs.txt @@ -0,0 +1,17 @@ +🐰 Keploy: 2024-01-30T19:43:08Z INFO cmd/test.go:264 Keploy config not found, continuing without configuration +🐰 Keploy: 2024-01-30T19:43:08Z INFO cmd/test.go:363 {"keploy test and mock path": "/Users/sarthak_1/Documents/Keploy/v2/java-sdk/keploy", "keploy testReport path": "/Users/sarthak_1/Documents/Keploy/v2/java-sdk/keploy/testReports/test-run-1"} +🐰 Keploy: 2024-01-30T19:43:10Z INFO hooks/loader.go:828 keploy initialized and probes added to the kernel. +🐰 Keploy: 2024-01-30T19:43:10Z INFO proxy/proxy.go:270 Java detected and CA already exists {"path": "/usr/lib/jvm/java-17-openjdk-arm64/lib/security/cacerts"} +🐰 Keploy: 2024-01-30T19:43:10Z INFO proxy/proxy.go:270 Java detected and CA already exists {"path": "/usr/lib/jvm/java-17-openjdk-arm64/lib/security/cacerts"} +🐰 Keploy: 2024-01-30T19:43:11Z ERROR proxy/proxy.go:344 Failed to update the CA store {"error": "signal: interrupt"} +🐰 Keploy: 2024-01-30T19:43:11Z INFO proxy/proxy.go:429 Keploy has hijacked the DNS resolution mechanism, your application may misbehave in keploy test mode if you have provided wrong domain name in your application code. +🐰 Keploy: 2024-01-30T19:43:11Z INFO proxy/proxy.go:443 Proxy started at port:16789 +🐰 Keploy: 2024-01-30T19:43:11Z INFO proxy/proxy.go:600 starting DNS server at addr :16789 +🐰 Keploy: 2024-01-30T19:43:11Z INFO test/test.go:121 test run completed {"passed overall": true} +🐰 Keploy: 2024-01-30T19:43:11Z INFO test/test.go:124 Keploy is looking for duplicate testcases 🥳🪞 +🐰 Keploy: 2024-01-30T19:43:11Z INFO hooks/loader.go:460 Received signal to exit keploy program.. +🐰 Keploy: 2024-01-30T19:43:11Z INFO hooks/loader.go:404 keploy has initiated the shutdown of the user application. +🐰 Keploy: 2024-01-30T19:43:11Z ERROR deduplication/dedup.go:100 Error reading file {"error": "read dedupData.yaml: is a directory"} +🐰 Keploy: 2024-01-30T19:43:11Z INFO hooks/loader.go:513 eBPF resources released successfully... +🐰 Keploy: 2024-01-30T19:43:11Z INFO proxy/proxy.go:1015 Dns server stopped +🐰 Keploy: 2024-01-30T19:43:11Z INFO proxy/proxy.go:1017 proxy stopped... diff --git a/v2/src/main/java/io.keploy.cli/KeployCLI.java b/v2/src/main/java/io.keploy.cli/KeployCLI.java index 2cc0550f..ad66b8a5 100644 --- a/v2/src/main/java/io.keploy.cli/KeployCLI.java +++ b/v2/src/main/java/io.keploy.cli/KeployCLI.java @@ -68,7 +68,7 @@ public enum TestRunStatus { } public static void StartUserApplication(String runCmd) throws IOException { - + System.out.println("Starting user application:" + runCmd); runCmd = attachJacocoAgent(runCmd); // Split the runCmd string into command parts @@ -116,7 +116,6 @@ public static void FindCoverage(String testSet) throws IOException, InterruptedE String runCmd = "java -jar " + getJacococliPath() + " dump --address localhost --port 36320 --destfile " + dest + ".exec"; - // Split the runCmd string into command parts String[] command = runCmd.split(" "); @@ -172,7 +171,7 @@ private static boolean deleteFile(String filePath) { // Attempt to delete the file if (file.delete()) { - logger.debug("File deleted successfully:",filePath); + logger.debug("File deleted successfully:", filePath); // System.out.println("File deleted successfully: " + filePath); return true; } else { @@ -420,4 +419,73 @@ private static int getCurrentPid() { String processName = ManagementFactory.getRuntimeMXBean().getName(); return Integer.parseInt(processName.split("@")[0]); } + + public static void runTestsAndCoverage(String jarPath, String[] testSets) { + for (String testSet : testSets) { + String testRunId = KeployCLI.RunTestSet(testSet); + startUserApplication(jarPath); + waitForTestRunCompletion(testRunId); + + try { + KeployCLI.FindCoverage(testSet); + Thread.sleep(5000); + } catch (Exception e) { + // TODO: handle exception + e.printStackTrace(); + } + stopUserApplication(); + } + } + + private static void startUserApplication(String jarPath) { + String[] command = { "java", "-jar", jarPath }; + String userCmd = String.join(" ", command); + try { + KeployCLI.StartUserApplication(userCmd); + System.out.println("Application started "); + } catch (IOException e) { + System.err.println("Failed to start user application: " + e.getMessage()); + } + } + + private static void waitForTestRunCompletion(String testRunId) { + // Implement the logic to wait for test run completion using KeployCLI + long MAX_TIMEOUT = 6000000; // 1m + long startTime = System.currentTimeMillis(); + + try { + KeployCLI.TestRunStatus testRunStatus; + + while (true) { + Thread.sleep(2000); + testRunStatus = KeployCLI.FetchTestSetStatus(testRunId); + + if (testRunStatus == KeployCLI.TestRunStatus.RUNNING) { + System.out.println("Test run still in progress"); + + if (System.currentTimeMillis() - startTime > MAX_TIMEOUT) { + System.out.println("Timeout reached, exiting loop"); + break; + } + + continue; + } + + break; + } + + if (testRunStatus == KeployCLI.TestRunStatus.FAILED + || testRunStatus == KeployCLI.TestRunStatus.RUNNING) { + System.out.println("Test run failed"); + } else if (testRunStatus == KeployCLI.TestRunStatus.PASSED) { + System.out.println("Test run passed"); + } + } catch (InterruptedException e) { + System.err.println("Error waiting for test run completion: " + e.getMessage()); + } + } + + private static void stopUserApplication() { + KeployCLI.StopUserApplication(); + } }