From 51d84907b96af1091fdc017165a4f3c94bc9c090 Mon Sep 17 00:00:00 2001 From: ayushaga14 Date: Tue, 27 Aug 2024 17:24:31 +0530 Subject: [PATCH 01/14] add runtime traffic metrics insertion and fetch logic --- .../traffic_metrics/TrafficMetricsAction.java | 68 +++++++++++++++ apps/dashboard/src/main/resources/struts.xml | 22 +++++ .../main/java/com/akto/action/DbAction.java | 19 +++++ .../src/main/resources/struts.xml | 11 +++ libs/dao/src/main/java/com/akto/DaoInit.java | 6 +- .../traffic_metrics/RuntimeMetricsDao.java | 82 +++++++++++++++++++ .../dto/traffic_metrics/RuntimeMetrics.java | 52 ++++++++++++ .../java/com/akto/data_actor/ClientActor.java | 18 ++++ .../java/com/akto/data_actor/DataActor.java | 5 ++ .../java/com/akto/data_actor/DbActor.java | 5 ++ .../java/com/akto/data_actor/DbLayer.java | 32 ++++++++ .../java/com/akto/metrics/AllMetrics.java | 30 +++++++ 12 files changed, 349 insertions(+), 1 deletion(-) create mode 100644 libs/dao/src/main/java/com/akto/dao/traffic_metrics/RuntimeMetricsDao.java create mode 100644 libs/dao/src/main/java/com/akto/dto/traffic_metrics/RuntimeMetrics.java diff --git a/apps/dashboard/src/main/java/com/akto/action/traffic_metrics/TrafficMetricsAction.java b/apps/dashboard/src/main/java/com/akto/action/traffic_metrics/TrafficMetricsAction.java index e85ca6b47f..95ef4bdfb4 100644 --- a/apps/dashboard/src/main/java/com/akto/action/traffic_metrics/TrafficMetricsAction.java +++ b/apps/dashboard/src/main/java/com/akto/action/traffic_metrics/TrafficMetricsAction.java @@ -1,13 +1,18 @@ package com.akto.action.traffic_metrics; import com.akto.action.UserAction; +import com.akto.dao.traffic_metrics.RuntimeMetricsDao; import com.akto.dao.traffic_metrics.TrafficMetricsDao; +import com.akto.dto.traffic_metrics.RuntimeMetrics; import com.akto.dto.traffic_metrics.TrafficMetrics; import com.mongodb.BasicDBObject; import com.mongodb.client.MongoCursor; import com.mongodb.client.model.Accumulators; import com.mongodb.client.model.Aggregates; import com.mongodb.client.model.Filters; +import com.mongodb.client.model.Projections; +import com.mongodb.client.model.Sorts; + import org.bson.Document; import org.bson.conversions.Bson; @@ -16,11 +21,16 @@ public class TrafficMetricsAction extends UserAction { private int startTimestamp; private int endTimestamp; + private String instanceId; + private List names; private String groupBy; private String host; private int vxlanID; + List runtimeMetrics; + List instanceIds; + public static final String ID = "_id."; @@ -113,6 +123,39 @@ public String execute() { return SUCCESS.toUpperCase(); } + public String fetchRuntimeInstances() { + instanceIds = new ArrayList<>(); + Bson filters = RuntimeMetricsDao.buildFilters(startTimestamp, endTimestamp); + runtimeMetrics = RuntimeMetricsDao.instance.findAll(filters, 0, 0, Sorts.descending("timestamp"), Projections.include("instanceId")); + for (RuntimeMetrics metric: runtimeMetrics) { + instanceIds.add(metric.getInstanceId()); + } + + return SUCCESS.toUpperCase(); + } + + public String fetchRuntimeMetrics() { + Bson filters = RuntimeMetricsDao.buildFilters(startTimestamp, endTimestamp, instanceId); + // runtimeMetrics = RuntimeMetricsDao.instance.findAll(filters, 0, 0, Sorts.descending("timestamp")); + runtimeMetrics = new ArrayList<>(); + + try (MongoCursor cursor = RuntimeMetricsDao.instance.getMCollection().aggregate( + Arrays.asList( + Aggregates.match(filters), + Aggregates.sort(Sorts.descending("timestamp")), + Aggregates.group(new BasicDBObject("name", "$name"), Accumulators.first("latestDoc", "$$ROOT")) + ), BasicDBObject.class + ).cursor()) { + while (cursor.hasNext()) { + BasicDBObject basicDBObject = cursor.next(); + BasicDBObject latestDoc = (BasicDBObject) basicDBObject.get("latestDoc"); + runtimeMetrics.add(new RuntimeMetrics(latestDoc.getString("name"), 0, instanceId, latestDoc.getDouble("val"))); + } + } + + return SUCCESS.toUpperCase(); + } + public String fetchTrafficMetricsDesciptions(){ names = Arrays.asList(TrafficMetrics.Name.values()); return SUCCESS.toUpperCase(); @@ -150,4 +193,29 @@ public void setHost(String host) { public void setVxlanID(int vxlanID) { this.vxlanID = vxlanID; } + + public List getRuntimeMetrics() { + return runtimeMetrics; + } + + public void setRuntimeMetrics(List runtimeMetrics) { + this.runtimeMetrics = runtimeMetrics; + } + + public List getInstanceIds() { + return instanceIds; + } + + public void setInstanceIds(List instanceIds) { + this.instanceIds = instanceIds; + } + + public String getInstanceId() { + return instanceId; + } + + public void setInstanceId(String instanceId) { + this.instanceId = instanceId; + } + } diff --git a/apps/dashboard/src/main/resources/struts.xml b/apps/dashboard/src/main/resources/struts.xml index 7a3e924065..631cab2cf2 100644 --- a/apps/dashboard/src/main/resources/struts.xml +++ b/apps/dashboard/src/main/resources/struts.xml @@ -2849,6 +2849,28 @@ ^actionErrors.* + + + + + + + 422 + false + ^actionErrors.* + + + + + + + + + 422 + false + ^actionErrors.* + + diff --git a/apps/database-abstractor/src/main/java/com/akto/action/DbAction.java b/apps/database-abstractor/src/main/java/com/akto/action/DbAction.java index e0eab40889..52d2bb5f11 100644 --- a/apps/database-abstractor/src/main/java/com/akto/action/DbAction.java +++ b/apps/database-abstractor/src/main/java/com/akto/action/DbAction.java @@ -213,6 +213,8 @@ public void setIssuesIds(BasicDBList issuesIds) { String endpointLogicalGroupId; DataControlSettings dataControlSettings; + BasicDBList metricsData; + public String fetchDataControlSettings() { try { String prevCommand = ""; @@ -1604,6 +1606,15 @@ public String fetchLatestEndpointsForTesting() { return Action.SUCCESS.toUpperCase(); } + public String insertRuntimeMetricsData() { + try { + DbLayer.insertRuntimeMetricsData(metricsData); + } catch (Exception e) { + return Action.ERROR.toUpperCase(); + } + return Action.SUCCESS.toUpperCase(); + } + public List getCustomDataTypes() { return customDataTypes; } @@ -2482,4 +2493,12 @@ public void setNewEps(List newEps) { this.newEps = newEps; } + public BasicDBList getMetricsData() { + return metricsData; + } + + public void setMetricsData(BasicDBList metricsData) { + this.metricsData = metricsData; + } + } diff --git a/apps/database-abstractor/src/main/resources/struts.xml b/apps/database-abstractor/src/main/resources/struts.xml index e8c3a6b49b..04f7049996 100644 --- a/apps/database-abstractor/src/main/resources/struts.xml +++ b/apps/database-abstractor/src/main/resources/struts.xml @@ -1113,6 +1113,17 @@ + + + + + + 422 + false + ^actionErrors.* + + + diff --git a/libs/dao/src/main/java/com/akto/DaoInit.java b/libs/dao/src/main/java/com/akto/DaoInit.java index fa21968747..72388801df 100644 --- a/libs/dao/src/main/java/com/akto/DaoInit.java +++ b/libs/dao/src/main/java/com/akto/DaoInit.java @@ -8,6 +8,7 @@ import com.akto.dao.testing.TestingRunResultDao; import com.akto.dao.testing.TestingRunResultSummariesDao; import com.akto.dao.testing_run_findings.TestingRunIssuesDao; +import com.akto.dao.traffic_metrics.RuntimeMetricsDao; import com.akto.dao.traffic_metrics.TrafficMetricsDao; import com.akto.dao.usage.UsageMetricsDao; import com.akto.dto.*; @@ -36,6 +37,7 @@ import com.akto.dto.third_party_access.Credential; import com.akto.dto.third_party_access.ThirdPartyAccess; import com.akto.dto.traffic.SampleData; +import com.akto.dto.traffic_metrics.RuntimeMetrics; import com.akto.dto.traffic_metrics.TrafficMetrics; import com.akto.dto.traffic_metrics.TrafficMetricsAlert; import com.akto.dto.type.SingleTypeInfo; @@ -246,6 +248,7 @@ public static CodecRegistry createCodecRegistry(){ ClassModel codeAnalysisApiInfoKeyClassModel = ClassModel.builder(CodeAnalysisApiInfo.CodeAnalysisApiInfoKey.class).enableDiscriminator(true).build(); ClassModel riskScoreTestingEndpointsClassModel = ClassModel.builder(RiskScoreTestingEndpoints.class).enableDiscriminator(true).build(); ClassModel OrganizationFlagsClassModel = ClassModel.builder(OrganizationFlags.class).enableDiscriminator(true).build(); + ClassModel RuntimeMetricsClassModel = ClassModel.builder(RuntimeMetrics.class).enableDiscriminator(true).build(); CodecRegistry pojoCodecRegistry = fromProviders(PojoCodecProvider.builder().register( configClassModel, signupInfoClassModel, apiAuthClassModel, attempResultModel, urlTemplateModel, @@ -275,7 +278,7 @@ public static CodecRegistry createCodecRegistry(){ yamlNodeDetails, multiExecTestResultClassModel, workflowTestClassModel, dependencyNodeClassModel, paramInfoClassModel, nodeClassModel, connectionClassModel, edgeClassModel, replaceDetailClassModel, modifyHostDetailClassModel, fileUploadClassModel ,fileUploadLogClassModel, codeAnalysisCollectionClassModel, codeAnalysisApiLocationClassModel, codeAnalysisApiInfoClassModel, codeAnalysisApiInfoKeyClassModel, - riskScoreTestingEndpointsClassModel, OrganizationFlagsClassModel).automatic(true).build()); + riskScoreTestingEndpointsClassModel, OrganizationFlagsClassModel, RuntimeMetricsClassModel).automatic(true).build()); final CodecRegistry customEnumCodecs = CodecRegistries.fromCodecs( new EnumCodec<>(Conditions.Operator.class), @@ -374,6 +377,7 @@ public static void createIndices() { DependencyFlowNodesDao.instance.createIndicesIfAbsent(); CodeAnalysisCollectionDao.instance.createIndicesIfAbsent(); CodeAnalysisApiInfoDao.instance.createIndicesIfAbsent(); + RuntimeMetricsDao.instance.createIndicesIfAbsent(); } } diff --git a/libs/dao/src/main/java/com/akto/dao/traffic_metrics/RuntimeMetricsDao.java b/libs/dao/src/main/java/com/akto/dao/traffic_metrics/RuntimeMetricsDao.java new file mode 100644 index 0000000000..e8bb16ae9a --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dao/traffic_metrics/RuntimeMetricsDao.java @@ -0,0 +1,82 @@ +package com.akto.dao.traffic_metrics; + +import java.util.ArrayList; + +import org.bson.conversions.Bson; + +import com.akto.dao.AccountsContextDao; +import com.akto.dao.MCollection; +import com.akto.dao.context.Context; +import com.akto.dto.traffic_metrics.RuntimeMetrics; +import com.akto.dto.type.URLMethods; +import com.akto.util.DbMode; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.Filters; +import com.mongodb.client.model.WriteModel; + +public class RuntimeMetricsDao extends AccountsContextDao { + + public static final RuntimeMetricsDao instance = new RuntimeMetricsDao(); + public static final int maxDocuments = 100_000; + public static final int sizeInBytes = 100_000_000; + + @Override + public String getCollName() { + return "runtime_metrics"; + } + + @Override + public Class getClassT() { + return RuntimeMetrics.class; + } + + public void createIndicesIfAbsent() { + boolean exists = false; + String dbName = Context.accountId.get()+""; + MongoDatabase db = clients[0].getDatabase(dbName); + for (String col: db.listCollectionNames()){ + if (getCollName().equalsIgnoreCase(col)){ + exists = true; + break; + } + }; + + if (!exists) { + db.createCollection(getCollName()); + } + + if (!exists) { + if (DbMode.allowCappedCollections()) { + db.createCollection(getCollName(), new CreateCollectionOptions().capped(true).maxDocuments(maxDocuments).sizeInBytes(sizeInBytes)); + } else { + db.createCollection(getCollName()); + } + } + + MCollection.createIndexIfAbsent(getDBName(), getCollName(), + new String[] { "timestamp" }, true); + MCollection.createIndexIfAbsent(getDBName(), getCollName(), + new String[] { "timestamp", "instanceId" }, true); + } + + public static void bulkInsertMetrics(ArrayList> bulkUpdates) { + RuntimeMetricsDao.instance. getMCollection().bulkWrite(bulkUpdates); + } + + public static Bson buildFilters(int startTs, int endTs) { + return Filters.and( + Filters.gte("timestamp", startTs), + Filters.lte("timestamp", endTs) + ); + } + + public static Bson buildFilters(int startTs, int endTs, String instanceId) { + return Filters.and( + Filters.gte("timestamp", startTs), + Filters.lte("timestamp", endTs), + Filters.eq("instanceId", instanceId) + ); + } + +} diff --git a/libs/dao/src/main/java/com/akto/dto/traffic_metrics/RuntimeMetrics.java b/libs/dao/src/main/java/com/akto/dto/traffic_metrics/RuntimeMetrics.java new file mode 100644 index 0000000000..53899ac03d --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dto/traffic_metrics/RuntimeMetrics.java @@ -0,0 +1,52 @@ +package com.akto.dto.traffic_metrics; + +public class RuntimeMetrics { + + private String name; + private int timestamp; + private String instanceId; + private Double val; + + public RuntimeMetrics() { + } + + public RuntimeMetrics(String name, int timestamp, String instanceId, Double val) { + this.name = name; + this.timestamp = timestamp; + this.instanceId = instanceId; + this.val = val; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public int getTimestamp() { + return timestamp; + } + + public void setTimestamp(int timestamp) { + this.timestamp = timestamp; + } + + public String getInstanceId() { + return instanceId; + } + + public void setInstanceId(String instanceId) { + this.instanceId = instanceId; + } + + public Double getVal() { + return val; + } + + public void setVal(Double val) { + this.val = val; + } + +} diff --git a/libs/utils/src/main/java/com/akto/data_actor/ClientActor.java b/libs/utils/src/main/java/com/akto/data_actor/ClientActor.java index b1451f8043..f3eaa224cd 100644 --- a/libs/utils/src/main/java/com/akto/data_actor/ClientActor.java +++ b/libs/utils/src/main/java/com/akto/data_actor/ClientActor.java @@ -2974,4 +2974,22 @@ public List fetchLatestEndpointsForTesting(int startTimestam return respList; } + public void insertRuntimeMetricsData(BasicDBList metricsData) { + Map> headers = buildHeaders(); + BasicDBObject obj = new BasicDBObject(); + obj.put("metricsData", metricsData); + OriginalHttpRequest request = new OriginalHttpRequest(url + "/insertRuntimeMetricsData", "", "POST", obj.toString(), headers, ""); + try { + OriginalHttpResponse response = ApiExecutor.sendRequest(request, true, null, false, null); + String responsePayload = response.getBody(); + if (response.getStatusCode() != 200 || responsePayload == null) { + System.out.println("non 2xx response in insertRuntimeMetricsData"); + return; + } + } catch (Exception e) { + System.out.println("error in insertRuntimeMetricsData" + e); + return; + } + } + } diff --git a/libs/utils/src/main/java/com/akto/data_actor/DataActor.java b/libs/utils/src/main/java/com/akto/data_actor/DataActor.java index 434935be21..ace030954d 100644 --- a/libs/utils/src/main/java/com/akto/data_actor/DataActor.java +++ b/libs/utils/src/main/java/com/akto/data_actor/DataActor.java @@ -23,6 +23,7 @@ import com.akto.dto.traffic.TrafficInfo; import com.akto.dto.type.SingleTypeInfo; import com.akto.dto.type.URLMethods; +import com.mongodb.BasicDBList; import com.mongodb.client.model.WriteModel; import java.util.List; @@ -223,5 +224,9 @@ public abstract class DataActor { public abstract DataControlSettings fetchDataControlSettings(String prevResult, String prevCommand); public abstract void bulkWriteDependencyNodes(List dependencyNodeList); + public abstract List fetchLatestEndpointsForTesting(int startTimestamp, int endTimestamp, int apiCollectionId); + + public abstract void insertRuntimeMetricsData(BasicDBList metricsData); + } diff --git a/libs/utils/src/main/java/com/akto/data_actor/DbActor.java b/libs/utils/src/main/java/com/akto/data_actor/DbActor.java index f9c49f1b1c..f98bea4545 100644 --- a/libs/utils/src/main/java/com/akto/data_actor/DbActor.java +++ b/libs/utils/src/main/java/com/akto/data_actor/DbActor.java @@ -26,6 +26,7 @@ import com.akto.dto.type.SingleTypeInfo; import com.akto.dto.type.URLMethods; import com.akto.dto.type.URLMethods.Method; +import com.mongodb.BasicDBList; import com.mongodb.client.model.WriteModel; import java.util.ArrayList; @@ -457,4 +458,8 @@ public List fetchLatestEndpointsForTesting(int startTimestam return DbLayer.fetchLatestEndpointsForTesting(startTimestamp, endTimestamp, apiCollectionId); } + public void insertRuntimeMetricsData(BasicDBList metricsData) { + DbLayer.insertRuntimeMetricsData(metricsData); + } + } diff --git a/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java b/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java index 3cb420a423..43f90ac595 100644 --- a/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java +++ b/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java @@ -16,6 +16,7 @@ import com.akto.dependency_analyser.DependencyAnalyserUtils; import com.akto.dto.*; import com.akto.dto.settings.DataControlSettings; +import com.mongodb.BasicDBList; import com.mongodb.client.model.*; import org.bson.conversions.Bson; import org.bson.types.ObjectId; @@ -36,6 +37,7 @@ import com.akto.dao.testing.WorkflowTestsDao; import com.akto.dao.testing.sources.TestSourceConfigsDao; import com.akto.dao.testing_run_findings.TestingRunIssuesDao; +import com.akto.dao.traffic_metrics.RuntimeMetricsDao; import com.akto.dao.traffic_metrics.TrafficMetricsDao; import com.akto.dto.ApiInfo.ApiInfoKey; import com.akto.dto.billing.Organization; @@ -58,6 +60,7 @@ import com.akto.dto.testing.sources.TestSourceConfig; import com.akto.dto.traffic.SampleData; import com.akto.dto.traffic.TrafficInfo; +import com.akto.dto.traffic_metrics.RuntimeMetrics; import com.akto.dto.traffic_metrics.TrafficMetrics; import com.akto.dto.type.SingleTypeInfo; import com.akto.dto.type.URLMethods; @@ -833,4 +836,33 @@ public static void bulkWriteDependencyNodes(List dependencyNodeL public static List fetchLatestEndpointsForTesting(int startTimestamp, int endTimestamp, int apiCollectionId) { return SingleTypeInfoDao.fetchLatestEndpointsForTesting(startTimestamp, endTimestamp, apiCollectionId); } + + public static void insertRuntimeMetricsData(BasicDBList metricsData) { + + ArrayList> bulkUpdates = new ArrayList<>(); + RuntimeMetrics runtimeMetrics; + for (Object metrics: metricsData) { + try { + Map obj = (Map) metrics; + String name = (String) obj.get("metric_id"); + String instanceId = (String) obj.get("instance_id"); + Long tsVal = (Long) obj.get("timestamp"); + int ts = tsVal.intValue(); + Double valDouble = (Double) obj.get("val"); + int val = valDouble.intValue(); + if (name == null || name.length() == 0) { + continue; + } + runtimeMetrics = new RuntimeMetrics(name, ts, instanceId, val); + bulkUpdates.add(new InsertOneModel<>(runtimeMetrics)); + } catch (Exception e) { + loggerMaker.errorAndAddToDb("error writing bulk update " + e.getMessage()); + } + } + + if (bulkUpdates.size() > 0) { + loggerMaker.infoAndAddToDb("insertRuntimeMetricsData bulk write size " + metricsData.size()); + RuntimeMetricsDao.bulkInsertMetrics(bulkUpdates); + } + } } diff --git a/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java b/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java index e7e79c88ee..8c9625e06c 100644 --- a/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java +++ b/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java @@ -1,6 +1,7 @@ package com.akto.metrics; import com.akto.dao.context.Context; +import com.akto.data_actor.DataActor; import com.akto.data_actor.DataActorFactory; import com.akto.dto.billing.Organization; import com.akto.log.LoggerMaker; @@ -19,6 +20,7 @@ public class AllMetrics { + public static final DataActor dataActor = DataActorFactory.fetchInstance(); public void init(){ int accountId = Context.accountId.get(); @@ -83,11 +85,13 @@ public void init(){ metricsData.put("org_id", m.orgId); metricsData.put("instance_id", instance_id); metricsData.put("account_id", m.accountId); + metricsData.put("timestamp", Context.now()); list.add(metricsData); } if(!list.isEmpty()) { sendDataToAkto(list); + dataActor.insertRuntimeMetricsData(list); } } catch (Exception e){ loggerMaker.errorAndAddToDb("Error while sending metrics to akto: " + e.getMessage(), LoggerMaker.LogDb.RUNTIME); @@ -431,4 +435,30 @@ public static void sendDataToAkto(BasicDBList list){ loggerMaker.infoAndAddToDb("Traffic_metrics not sent", LoggerMaker.LogDb.RUNTIME); } } + + public static void sendData(BasicDBList list){ + + MediaType mediaType = MediaType.parse("application/json"); + RequestBody body = RequestBody.create(new BasicDBObject("data", list).toJson(), mediaType); + Request request = new Request.Builder() + .url(URL) + .method("POST", body) + .addHeader("Content-Type", "application/json") + .build(); + Response response = null; + try { + response = client.newCall(request).execute(); + } catch (IOException e) { + loggerMaker.errorAndAddToDb("Error while executing request " + request.url() + ": " + e.getMessage(), LoggerMaker.LogDb.RUNTIME); + } finally { + if (response != null) { + response.close(); + } + } + if (response!= null && response.isSuccessful()) { + loggerMaker.infoAndAddToDb("Updated traffic_metrics", LoggerMaker.LogDb.RUNTIME); + } else { + loggerMaker.infoAndAddToDb("Traffic_metrics not sent", LoggerMaker.LogDb.RUNTIME); + } + } } From 434ea2081b7a33cc3b52650c5f5a2be99b90daf8 Mon Sep 17 00:00:00 2001 From: ayushaga14 Date: Wed, 28 Aug 2024 12:40:45 +0530 Subject: [PATCH 02/14] fix --- libs/utils/src/main/java/com/akto/data_actor/DbLayer.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java b/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java index 43f90ac595..8a3c09a930 100644 --- a/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java +++ b/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java @@ -848,8 +848,7 @@ public static void insertRuntimeMetricsData(BasicDBList metricsData) { String instanceId = (String) obj.get("instance_id"); Long tsVal = (Long) obj.get("timestamp"); int ts = tsVal.intValue(); - Double valDouble = (Double) obj.get("val"); - int val = valDouble.intValue(); + Double val = (Double) obj.get("val"); if (name == null || name.length() == 0) { continue; } From 6807ca8ef89ecbd33175f751892cb2dcd6f3bff7 Mon Sep 17 00:00:00 2001 From: ayushaga14 Date: Thu, 29 Aug 2024 16:00:39 +0530 Subject: [PATCH 03/14] write instanceid to file and add creds in kafka --- .../java/com/akto/hybrid_runtime/Main.java | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java index 39605bdba7..e87a92534f 100644 --- a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java +++ b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java @@ -1,5 +1,10 @@ package com.akto.hybrid_runtime; +import java.io.BufferedReader; +import java.io.File; +import java.io.FileReader; +import java.io.FileWriter; +import java.io.IOException; import java.time.Duration; import java.util.*; import java.util.concurrent.Executors; @@ -23,6 +28,8 @@ import com.akto.data_actor.DataActorFactory; import com.akto.util.DashboardMode; import com.google.gson.Gson; +import com.mongodb.BasicDBObject; + import org.apache.kafka.clients.consumer.*; import org.apache.kafka.common.Metric; import org.apache.kafka.common.MetricName; @@ -109,6 +116,37 @@ private static void buildKafka() { } } + private static void insertCredsRecordInKafka(String brokerUrl) { + File f = new File("creds.txt"); + String instanceId = UUID.randomUUID().toString(); + if (f.exists()) { + try (FileReader reader = new FileReader(f); + BufferedReader bufferedReader = new BufferedReader(reader)) { + String line; + while ((line = bufferedReader.readLine()) != null) { + instanceId = line; + } + } catch (IOException e) { + loggerMaker.errorAndAddToDb("Error reading instanceId from file: " + e.getMessage()); + } + } else { + try (FileWriter writer = new FileWriter(f)) { + writer.write(instanceId); + } catch (IOException e) { + loggerMaker.errorAndAddToDb("Error writing instanceId to file: " + e.getMessage()); + } + } + + int batchSize = Integer.parseInt(System.getenv("AKTO_KAFKA_PRODUCER_BATCH_SIZE")); + int kafkaLingerMS = Integer.parseInt(System.getenv("AKTO_KAFKA_PRODUCER_LINGER_MS")); + kafkaProducer = new Kafka(brokerUrl, kafkaLingerMS, batchSize); + BasicDBObject creds = new BasicDBObject(); + creds.put("id", instanceId); + creds.put("token", System.getenv("DATABASE_ABSTRACTOR_SERVICE_TOKEN")); + creds.put("url", System.getenv("DATABASE_ABSTRACTOR_SERVICE_URL")); + kafkaProducer.send(creds.toJson(), "credentials"); + } + public static final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(2); public static class AccountInfo { @@ -171,6 +209,7 @@ public static void main(String[] args) { fetchAllSTI = false; } int maxPollRecordsConfig = Integer.parseInt(System.getenv("AKTO_KAFKA_MAX_POLL_RECORDS_CONFIG")); + insertCredsRecordInKafka(kafkaBrokerUrl); AccountSettings aSettings = dataActor.fetchAccountSettings(); if (aSettings == null) { From 15ca6ff947b1b7cf9a5b3a302fae0dddaca41a94 Mon Sep 17 00:00:00 2001 From: ayushaga14 Date: Sat, 31 Aug 2024 17:01:45 +0530 Subject: [PATCH 04/14] modify prod yml --- .github/workflows/prod.yml | 19 ++++--------------- 1 file changed, 4 insertions(+), 15 deletions(-) diff --git a/.github/workflows/prod.yml b/.github/workflows/prod.yml index 064df6898d..1cf8a890b7 100644 --- a/.github/workflows/prod.yml +++ b/.github/workflows/prod.yml @@ -58,19 +58,13 @@ jobs: ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} ECR_REPOSITORY: akto-api-security REGISTRY_ALIAS: p7q3h0z2 - IMAGE_TAG: latest - IMAGE_TAG1: testruntime - IMAGE_TAG2: local - IMAGE_TAG3: 1.41.18_local + IMAGE_TAG: traffictest run: | docker buildx create --use # Build a docker container and push it to DockerHub cd apps/mini-runtime - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG1 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG2 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG3 . --push + docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG . --push echo "::set-output name=image::$ECR_REGISTRY/akto-api-security-mini-runtime:$IMAGE_TAG" - cd ../mini-testing - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-testing:$IMAGE_TAG3 . --push - echo "::set-output name=image::$ECR_REGISTRY/akto-api-security-mini-testing:$IMAGE_TAG3" - name: DockerHub login env: @@ -83,15 +77,10 @@ jobs: env: ECR_REGISTRY: aktosecurity ECR_REPOSITORY: akto-api-security - IMAGE_TAG: latest - IMAGE_TAG1: testruntime - IMAGE_TAG2: local - IMAGE_TAG3: 1.41.18_local + IMAGE_TAG: traffictest run: | echo $IMAGE_TAG >> $GITHUB_STEP_SUMMARY docker buildx create --use # Build a docker container and push it to DockerHub cd apps/mini-runtime - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG1 -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG2 -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG3 . --push - cd ../mini-testing - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/mini-testing:$IMAGE_TAG3 . --push \ No newline at end of file + docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG . --push \ No newline at end of file From 8dea0d6d6db98548fd15d1fc57c24c477b5222b5 Mon Sep 17 00:00:00 2001 From: ayushaga14 Date: Tue, 10 Sep 2024 11:39:33 +0530 Subject: [PATCH 05/14] use instanceid from file --- .../java/com/akto/hybrid_runtime/Main.java | 19 ++++++++++++------- .../java/com/akto/metrics/AllMetrics.java | 18 ++++++++++++++---- 2 files changed, 26 insertions(+), 11 deletions(-) diff --git a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java index e87a92534f..21c8e5f3ff 100644 --- a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java +++ b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java @@ -116,7 +116,7 @@ private static void buildKafka() { } } - private static void insertCredsRecordInKafka(String brokerUrl) { + private static String insertCredsRecordInKafka(String brokerUrl) { File f = new File("creds.txt"); String instanceId = UUID.randomUUID().toString(); if (f.exists()) { @@ -144,7 +144,12 @@ private static void insertCredsRecordInKafka(String brokerUrl) { creds.put("id", instanceId); creds.put("token", System.getenv("DATABASE_ABSTRACTOR_SERVICE_TOKEN")); creds.put("url", System.getenv("DATABASE_ABSTRACTOR_SERVICE_URL")); - kafkaProducer.send(creds.toJson(), "credentials"); + try { + kafkaProducer.send(creds.toJson(), "credentials"); + } catch (Exception e) { + loggerMaker.errorAndAddToDb("Error inserting creds record in kafka: " + e.getMessage()); + } + return instanceId; } public static final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(2); @@ -191,7 +196,7 @@ public static void main(String[] args) { //String mongoURI = System.getenv("AKTO_MONGO_CONN");; String configName = System.getenv("AKTO_CONFIG_NAME"); String topicName = getTopicName(); - String kafkaBrokerUrl = "kafka1:19092"; //System.getenv("AKTO_KAFKA_BROKER_URL"); + String kafkaBrokerUrl = "localhost:29092"; //System.getenv("AKTO_KAFKA_BROKER_URL"); String isKubernetes = System.getenv("IS_KUBERNETES"); if (isKubernetes != null && isKubernetes.equalsIgnoreCase("true")) { loggerMaker.infoAndAddToDb("is_kubernetes: true", LogDb.RUNTIME); @@ -209,7 +214,7 @@ public static void main(String[] args) { fetchAllSTI = false; } int maxPollRecordsConfig = Integer.parseInt(System.getenv("AKTO_KAFKA_MAX_POLL_RECORDS_CONFIG")); - insertCredsRecordInKafka(kafkaBrokerUrl); + String instanceId = insertCredsRecordInKafka(kafkaBrokerUrl); AccountSettings aSettings = dataActor.fetchAccountSettings(); if (aSettings == null) { @@ -227,7 +232,7 @@ public static void main(String[] args) { dataActor.modifyHybridSaasSetting(RuntimeMode.isHybridDeployment()); - initializeRuntime(); + initializeRuntime(instanceId); String centralKafkaTopicName = AccountSettings.DEFAULT_CENTRAL_KAFKA_TOPIC_NAME; @@ -560,12 +565,12 @@ public static void changeTargetCollection(Map> apiC } } - public static void initializeRuntime(){ + public static void initializeRuntime(String instanceId){ Account account = dataActor.fetchActiveAccount(); Context.accountId.set(account.getId()); - AllMetrics.instance.init(); + AllMetrics.instance.init(instanceId); loggerMaker.infoAndAddToDb("All metrics initialized", LogDb.RUNTIME); Setup setup = dataActor.fetchSetup(); diff --git a/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java b/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java index 8c9625e06c..0a7b48d54e 100644 --- a/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java +++ b/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java @@ -13,7 +13,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; -import java.util.UUID; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; @@ -21,8 +20,11 @@ public class AllMetrics { public static final DataActor dataActor = DataActorFactory.fetchInstance(); - public void init(){ + private String instanceId; + + public void init(String instanceId){ int accountId = Context.accountId.get(); + this.setInstanceId(instanceId); Organization organization = DataActorFactory.fetchInstance().fetchOrganization(accountId); String orgId = organization.getId(); @@ -83,7 +85,7 @@ public void init(){ metricsData.put("metric_id", m.metricId); metricsData.put("val", metric); metricsData.put("org_id", m.orgId); - metricsData.put("instance_id", instance_id); + metricsData.put("instance_id", this.getInstanceId()); metricsData.put("account_id", m.accountId); metricsData.put("timestamp", Context.now()); list.add(metricsData); @@ -114,7 +116,6 @@ private AllMetrics(){} private final static LoggerMaker loggerMaker = new LoggerMaker(AllMetrics.class); - private static final String instance_id = UUID.randomUUID().toString(); private Metric runtimeKafkaRecordCount; private Metric runtimeKafkaRecordSize; private Metric runtimeProcessLatency = null; @@ -461,4 +462,13 @@ public static void sendData(BasicDBList list){ loggerMaker.infoAndAddToDb("Traffic_metrics not sent", LoggerMaker.LogDb.RUNTIME); } } + + public String getInstanceId() { + return instanceId; + } + + public void setInstanceId(String instanceId) { + this.instanceId = instanceId; + } + } From afeeb33077647181282ce3344f34f7d6c565f288 Mon Sep 17 00:00:00 2001 From: ayushaga14 Date: Tue, 10 Sep 2024 11:48:05 +0530 Subject: [PATCH 06/14] revert kafka broker url val --- .../src/main/java/com/akto/hybrid_runtime/Main.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java index 21c8e5f3ff..c71daa5d3d 100644 --- a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java +++ b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java @@ -196,7 +196,7 @@ public static void main(String[] args) { //String mongoURI = System.getenv("AKTO_MONGO_CONN");; String configName = System.getenv("AKTO_CONFIG_NAME"); String topicName = getTopicName(); - String kafkaBrokerUrl = "localhost:29092"; //System.getenv("AKTO_KAFKA_BROKER_URL"); + String kafkaBrokerUrl = "kafka1:19092"; //System.getenv("AKTO_KAFKA_BROKER_URL"); String isKubernetes = System.getenv("IS_KUBERNETES"); if (isKubernetes != null && isKubernetes.equalsIgnoreCase("true")) { loggerMaker.infoAndAddToDb("is_kubernetes: true", LogDb.RUNTIME); From a4d700b49993313e2db9a2419fcbe6ad9bf9f59d Mon Sep 17 00:00:00 2001 From: TangoBeeAkto Date: Thu, 19 Sep 2024 11:18:10 +0530 Subject: [PATCH 07/14] feat: adding version and instanceId for traffic processor --- .../java/com/akto/hybrid_runtime/Main.java | 59 +++++++++++++++---- .../akto/hybrid_runtime/RuntimeVersion.java | 3 +- .../java/com/akto/metrics/AllMetrics.java | 28 ++++++++- 3 files changed, 76 insertions(+), 14 deletions(-) diff --git a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java index b2401ed941..dab3f20d37 100644 --- a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java +++ b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java @@ -1,5 +1,6 @@ package com.akto.hybrid_runtime; +import java.io.*; import java.time.Duration; import java.util.*; import java.util.concurrent.Executors; @@ -23,6 +24,7 @@ import com.akto.data_actor.DataActorFactory; import com.akto.util.DashboardMode; import com.google.gson.Gson; +import com.mongodb.BasicDBObject; import org.apache.kafka.clients.consumer.*; import org.apache.kafka.common.Metric; import org.apache.kafka.common.MetricName; @@ -109,6 +111,41 @@ private static void buildKafka() { } } + private static String insertCredsRecordInKafka(String brokerUrl) { + File f = new File("creds.txt"); + String instanceId = UUID.randomUUID().toString(); + if (f.exists()) { + try (FileReader reader = new FileReader(f); + BufferedReader bufferedReader = new BufferedReader(reader)) { + String line; + while ((line = bufferedReader.readLine()) != null) { + instanceId = line; + } + } catch (IOException e) { + loggerMaker.errorAndAddToDb("Error reading instanceId from file: " + e.getMessage()); + } + } else { + try (FileWriter writer = new FileWriter(f)) { + writer.write(instanceId); + } catch (IOException e) { + loggerMaker.errorAndAddToDb("Error writing instanceId to file: " + e.getMessage()); + } + } + int batchSize = Integer.parseInt(System.getenv("AKTO_KAFKA_PRODUCER_BATCH_SIZE")); + int kafkaLingerMS = Integer.parseInt(System.getenv("AKTO_KAFKA_PRODUCER_LINGER_MS")); + kafkaProducer = new Kafka(brokerUrl, kafkaLingerMS, batchSize); + BasicDBObject creds = new BasicDBObject(); + creds.put("id", instanceId); + creds.put("token", System.getenv("DATABASE_ABSTRACTOR_SERVICE_TOKEN")); + creds.put("url", System.getenv("DATABASE_ABSTRACTOR_SERVICE_URL")); + try { + kafkaProducer.send(creds.toJson(), "credentials"); + } catch (Exception e) { + loggerMaker.errorAndAddToDb("Error inserting creds record in kafka: " + e.getMessage()); + } + return instanceId; + } + public static final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(2); public static class AccountInfo { @@ -171,6 +208,7 @@ public static void main(String[] args) { fetchAllSTI = false; } int maxPollRecordsConfig = Integer.parseInt(System.getenv("AKTO_KAFKA_MAX_POLL_RECORDS_CONFIG")); + String instanceId = insertCredsRecordInKafka(kafkaBrokerUrl); AccountSettings aSettings = dataActor.fetchAccountSettings(); if (aSettings == null) { @@ -188,7 +226,7 @@ public static void main(String[] args) { dataActor.modifyHybridSaasSetting(RuntimeMode.isHybridDeployment()); - initializeRuntime(); + initializeRuntime(instanceId); String centralKafkaTopicName = AccountSettings.DEFAULT_CENTRAL_KAFKA_TOPIC_NAME; @@ -510,12 +548,20 @@ public static void changeTargetCollection(Map> apiC } } - public static void initializeRuntime(){ + public static void initializeRuntime(String instanceId){ Account account = dataActor.fetchActiveAccount(); Context.accountId.set(account.getId()); - AllMetrics.instance.init(); + String version = ""; + RuntimeVersion runtimeVersion = new RuntimeVersion(); + try { + version = runtimeVersion.updateVersion(AccountSettings.API_RUNTIME_VERSION, dataActor); + } catch (Exception e) { + loggerMaker.errorAndAddToDb("error while updating dashboard version: " + e.getMessage(), LogDb.RUNTIME); + } + + AllMetrics.instance.init(instanceId, version); loggerMaker.infoAndAddToDb("All metrics initialized", LogDb.RUNTIME); Setup setup = dataActor.fetchSetup(); @@ -526,13 +572,6 @@ public static void initializeRuntime(){ } isOnprem = dashboardMode.equalsIgnoreCase(DashboardMode.ON_PREM.name()); - - RuntimeVersion runtimeVersion = new RuntimeVersion(); - try { - runtimeVersion.updateVersion(AccountSettings.API_RUNTIME_VERSION, dataActor); - } catch (Exception e) { - loggerMaker.errorAndAddToDb("error while updating dashboard version: " + e.getMessage(), LogDb.RUNTIME); - } initFromRuntime(account.getId()); diff --git a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/RuntimeVersion.java b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/RuntimeVersion.java index 2c37e49551..53a4037374 100644 --- a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/RuntimeVersion.java +++ b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/RuntimeVersion.java @@ -9,7 +9,7 @@ public class RuntimeVersion { - public void updateVersion(String fieldName, DataActor dataActor) throws Exception { + public String updateVersion(String fieldName, DataActor dataActor) throws Exception { try (InputStream in = getClass().getResourceAsStream("/version.txt")) { if (in != null) { BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in)); @@ -18,6 +18,7 @@ public void updateVersion(String fieldName, DataActor dataActor) throws Exceptio String version = imageTag + " - " + buildTime; dataActor.updateRuntimeVersion(fieldName, version); + return version; } else { throw new Exception("Input stream null"); } diff --git a/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java b/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java index e7e79c88ee..fa43f6bacb 100644 --- a/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java +++ b/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java @@ -19,8 +19,13 @@ public class AllMetrics { - public void init(){ + private String instanceId; + private String version; + + public void init(String instanceId, String version){ int accountId = Context.accountId.get(); + this.setInstanceId(instanceId); + this.setVersion(version); Organization organization = DataActorFactory.fetchInstance().fetchOrganization(accountId); String orgId = organization.getId(); @@ -81,8 +86,10 @@ public void init(){ metricsData.put("metric_id", m.metricId); metricsData.put("val", metric); metricsData.put("org_id", m.orgId); - metricsData.put("instance_id", instance_id); + metricsData.put("instance_id", this.getInstanceId()); + metricsData.put("version", this.getVersion()); metricsData.put("account_id", m.accountId); + metricsData.put("timestamp", Context.now()); list.add(metricsData); } @@ -110,7 +117,6 @@ private AllMetrics(){} private final static LoggerMaker loggerMaker = new LoggerMaker(AllMetrics.class); - private static final String instance_id = UUID.randomUUID().toString(); private Metric runtimeKafkaRecordCount; private Metric runtimeKafkaRecordSize; private Metric runtimeProcessLatency = null; @@ -431,4 +437,20 @@ public static void sendDataToAkto(BasicDBList list){ loggerMaker.infoAndAddToDb("Traffic_metrics not sent", LoggerMaker.LogDb.RUNTIME); } } + + public String getInstanceId() { + return instanceId; + } + + public void setInstanceId(String instanceId) { + this.instanceId = instanceId; + } + + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } } From ee0a7332d1b9e86da71b6026fd1dda37b8144d63 Mon Sep 17 00:00:00 2001 From: Avneesh Hota Date: Thu, 19 Sep 2024 17:52:31 +0530 Subject: [PATCH 08/14] changed env vars and updated prod.yml --- .github/workflows/prod.yml | 19 ++++--------------- .../java/com/akto/hybrid_runtime/Main.java | 4 ++-- 2 files changed, 6 insertions(+), 17 deletions(-) diff --git a/.github/workflows/prod.yml b/.github/workflows/prod.yml index 03de907457..e8be1a3358 100644 --- a/.github/workflows/prod.yml +++ b/.github/workflows/prod.yml @@ -58,19 +58,13 @@ jobs: ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} ECR_REPOSITORY: akto-api-security REGISTRY_ALIAS: p7q3h0z2 - IMAGE_TAG: latest - IMAGE_TAG1: testruntime - IMAGE_TAG2: local - IMAGE_TAG3: 1.41.20_local + IMAGE_TAG: collector_testing run: | docker buildx create --use # Build a docker container and push it to DockerHub cd apps/mini-runtime - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG1 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG2 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG3 . --push + docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG -t . --push echo "::set-output name=image::$ECR_REGISTRY/akto-api-security-mini-runtime:$IMAGE_TAG" - cd ../mini-testing - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-testing:$IMAGE_TAG -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-testing:$IMAGE_TAG1 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-testing:$IMAGE_TAG2 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-testing:$IMAGE_TAG3 . --push - echo "::set-output name=image::$ECR_REGISTRY/akto-api-security-mini-testing:$IMAGE_TAG3" - name: DockerHub login env: @@ -83,15 +77,10 @@ jobs: env: ECR_REGISTRY: aktosecurity ECR_REPOSITORY: akto-api-security - IMAGE_TAG: latest - IMAGE_TAG1: testruntime - IMAGE_TAG2: local - IMAGE_TAG3: 1.41.20_local + IMAGE_TAG: collector_testing run: | echo $IMAGE_TAG >> $GITHUB_STEP_SUMMARY docker buildx create --use # Build a docker container and push it to DockerHub cd apps/mini-runtime - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG1 -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG2 -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG3 . --push - cd ../mini-testing - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/mini-testing:$IMAGE_TAG -t $ECR_REGISTRY/mini-testing:$IMAGE_TAG1 -t $ECR_REGISTRY/mini-testing:$IMAGE_TAG2 -t $ECR_REGISTRY/mini-testing:$IMAGE_TAG3 . --push \ No newline at end of file + docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG . --push \ No newline at end of file diff --git a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java index dab3f20d37..f9c079dcf1 100644 --- a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java +++ b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java @@ -131,8 +131,8 @@ private static String insertCredsRecordInKafka(String brokerUrl) { loggerMaker.errorAndAddToDb("Error writing instanceId to file: " + e.getMessage()); } } - int batchSize = Integer.parseInt(System.getenv("AKTO_KAFKA_PRODUCER_BATCH_SIZE")); - int kafkaLingerMS = Integer.parseInt(System.getenv("AKTO_KAFKA_PRODUCER_LINGER_MS")); + int batchSize = Integer.parseInt(System.getenv("AKTO_TRAFFIC_BATCH_SIZE")); + int kafkaLingerMS = Integer.parseInt(System.getenv("AKTO_TRAFFIC_BATCH_TIME_SECS")); kafkaProducer = new Kafka(brokerUrl, kafkaLingerMS, batchSize); BasicDBObject creds = new BasicDBObject(); creds.put("id", instanceId); From bb97815b6273220eee901f64f0b093f8d1c735fe Mon Sep 17 00:00:00 2001 From: Avneesh Hota Date: Thu, 19 Sep 2024 17:58:04 +0530 Subject: [PATCH 09/14] fixed prod.yml --- .github/workflows/prod.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/prod.yml b/.github/workflows/prod.yml index e8be1a3358..f32f30049d 100644 --- a/.github/workflows/prod.yml +++ b/.github/workflows/prod.yml @@ -63,7 +63,7 @@ jobs: docker buildx create --use # Build a docker container and push it to DockerHub cd apps/mini-runtime - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG -t . --push + docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG . --push echo "::set-output name=image::$ECR_REGISTRY/akto-api-security-mini-runtime:$IMAGE_TAG" - name: DockerHub login From a5dd0d4734c8298fb540899e7f160941dc1588dd Mon Sep 17 00:00:00 2001 From: TangoBeeAkto Date: Thu, 19 Sep 2024 17:47:49 +0530 Subject: [PATCH 10/14] fix: change in prod yml --- .github/workflows/prod.yml | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/.github/workflows/prod.yml b/.github/workflows/prod.yml index f32f30049d..03de907457 100644 --- a/.github/workflows/prod.yml +++ b/.github/workflows/prod.yml @@ -58,13 +58,19 @@ jobs: ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} ECR_REPOSITORY: akto-api-security REGISTRY_ALIAS: p7q3h0z2 - IMAGE_TAG: collector_testing + IMAGE_TAG: latest + IMAGE_TAG1: testruntime + IMAGE_TAG2: local + IMAGE_TAG3: 1.41.20_local run: | docker buildx create --use # Build a docker container and push it to DockerHub cd apps/mini-runtime - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG . --push + docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG1 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG2 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG3 . --push echo "::set-output name=image::$ECR_REGISTRY/akto-api-security-mini-runtime:$IMAGE_TAG" + cd ../mini-testing + docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-testing:$IMAGE_TAG -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-testing:$IMAGE_TAG1 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-testing:$IMAGE_TAG2 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-testing:$IMAGE_TAG3 . --push + echo "::set-output name=image::$ECR_REGISTRY/akto-api-security-mini-testing:$IMAGE_TAG3" - name: DockerHub login env: @@ -77,10 +83,15 @@ jobs: env: ECR_REGISTRY: aktosecurity ECR_REPOSITORY: akto-api-security - IMAGE_TAG: collector_testing + IMAGE_TAG: latest + IMAGE_TAG1: testruntime + IMAGE_TAG2: local + IMAGE_TAG3: 1.41.20_local run: | echo $IMAGE_TAG >> $GITHUB_STEP_SUMMARY docker buildx create --use # Build a docker container and push it to DockerHub cd apps/mini-runtime - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG . --push \ No newline at end of file + docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG1 -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG2 -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG3 . --push + cd ../mini-testing + docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/mini-testing:$IMAGE_TAG -t $ECR_REGISTRY/mini-testing:$IMAGE_TAG1 -t $ECR_REGISTRY/mini-testing:$IMAGE_TAG2 -t $ECR_REGISTRY/mini-testing:$IMAGE_TAG3 . --push \ No newline at end of file From a0ebd3c451384b2a70e30ff763e697236978f1c9 Mon Sep 17 00:00:00 2001 From: Avneesh Hota Date: Fri, 20 Sep 2024 11:39:34 +0530 Subject: [PATCH 11/14] added logs and modified prod.yml for testing --- .github/workflows/prod.yml | 19 ++++--------------- .../java/com/akto/metrics/AllMetrics.java | 10 ++++++++++ 2 files changed, 14 insertions(+), 15 deletions(-) diff --git a/.github/workflows/prod.yml b/.github/workflows/prod.yml index 03de907457..4a7ba0ed2a 100644 --- a/.github/workflows/prod.yml +++ b/.github/workflows/prod.yml @@ -58,19 +58,13 @@ jobs: ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} ECR_REPOSITORY: akto-api-security REGISTRY_ALIAS: p7q3h0z2 - IMAGE_TAG: latest - IMAGE_TAG1: testruntime - IMAGE_TAG2: local - IMAGE_TAG3: 1.41.20_local + IMAGE_TAG: collector-testing run: | docker buildx create --use # Build a docker container and push it to DockerHub cd apps/mini-runtime - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG1 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG2 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG3 . --push + docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG . --push echo "::set-output name=image::$ECR_REGISTRY/akto-api-security-mini-runtime:$IMAGE_TAG" - cd ../mini-testing - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-testing:$IMAGE_TAG -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-testing:$IMAGE_TAG1 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-testing:$IMAGE_TAG2 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-testing:$IMAGE_TAG3 . --push - echo "::set-output name=image::$ECR_REGISTRY/akto-api-security-mini-testing:$IMAGE_TAG3" - name: DockerHub login env: @@ -83,15 +77,10 @@ jobs: env: ECR_REGISTRY: aktosecurity ECR_REPOSITORY: akto-api-security - IMAGE_TAG: latest - IMAGE_TAG1: testruntime - IMAGE_TAG2: local - IMAGE_TAG3: 1.41.20_local + IMAGE_TAG: collector-testing run: | echo $IMAGE_TAG >> $GITHUB_STEP_SUMMARY docker buildx create --use # Build a docker container and push it to DockerHub cd apps/mini-runtime - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG1 -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG2 -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG3 . --push - cd ../mini-testing - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/mini-testing:$IMAGE_TAG -t $ECR_REGISTRY/mini-testing:$IMAGE_TAG1 -t $ECR_REGISTRY/mini-testing:$IMAGE_TAG2 -t $ECR_REGISTRY/mini-testing:$IMAGE_TAG3 . --push \ No newline at end of file + docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG . --push \ No newline at end of file diff --git a/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java b/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java index d950bbc977..d6005bfc15 100644 --- a/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java +++ b/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java @@ -25,6 +25,7 @@ public class AllMetrics { private String version; public void init(String instanceId, String version){ + loggerMaker.infoAndAddToDb("start init"); int accountId = Context.accountId.get(); this.setInstanceId(instanceId); this.setVersion(version); @@ -32,6 +33,9 @@ public void init(String instanceId, String version){ Organization organization = DataActorFactory.fetchInstance().fetchOrganization(accountId); String orgId = organization.getId(); + loggerMaker.infoAndAddToDb("Org id: " + orgId); + loggerMaker.infoAndAddToDb("Version: " + version); + runtimeKafkaRecordCount = new SumMetric("RT_KAFKA_RECORD_COUNT", 60, accountId, orgId); runtimeKafkaRecordSize = new SumMetric("RT_KAFKA_RECORD_SIZE", 60, accountId, orgId); runtimeProcessLatency = new LatencyMetric("RT_KAFKA_LATENCY", 60, accountId, orgId); @@ -95,9 +99,15 @@ public void init(String instanceId, String version){ list.add(metricsData); } + + loggerMaker.infoAndAddToDb("List is empty: " + list.isEmpty(), LoggerMaker.LogDb.RUNTIME); + if(!list.isEmpty()) { + loggerMaker.infoAndAddToDb("starting sendDataToAkto", LoggerMaker.LogDb.RUNTIME); sendDataToAkto(list); + loggerMaker.infoAndAddToDb("finished sendDataToAkto", LoggerMaker.LogDb.RUNTIME); dataActor.insertRuntimeMetricsData(list); + loggerMaker.infoAndAddToDb("finished insertRuntimeMetricsData", LoggerMaker.LogDb.RUNTIME); } } catch (Exception e){ loggerMaker.errorAndAddToDb("Error while sending metrics to akto: " + e.getMessage(), LoggerMaker.LogDb.RUNTIME); From 8cb72e07bc41a0c53f293095a70d1485de745c76 Mon Sep 17 00:00:00 2001 From: TangoBeeAkto Date: Fri, 20 Sep 2024 12:09:09 +0530 Subject: [PATCH 12/14] fix: adding akto image tag and build time building mvn packaging --- .github/workflows/prod.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/prod.yml b/.github/workflows/prod.yml index 4a7ba0ed2a..090a45e6b8 100644 --- a/.github/workflows/prod.yml +++ b/.github/workflows/prod.yml @@ -31,7 +31,7 @@ jobs: node-version: '17' - name: mvn package command - run: mvn package + run: mvn package -Dakto-image-tag=$IMAGE_TAG -Dakto-build-time=$(eval "date +%s") - name: Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v1 From 8a5e4d291ac8a9e5ae62e4a142a4dffe79123672 Mon Sep 17 00:00:00 2001 From: TangoBeeAkto Date: Fri, 20 Sep 2024 12:30:06 +0530 Subject: [PATCH 13/14] fix: adding image tag version in env for mvn package --- .github/workflows/prod.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/prod.yml b/.github/workflows/prod.yml index 090a45e6b8..05db1d3685 100644 --- a/.github/workflows/prod.yml +++ b/.github/workflows/prod.yml @@ -31,6 +31,8 @@ jobs: node-version: '17' - name: mvn package command + env: + IMAGE_TAG: collector_testing run: mvn package -Dakto-image-tag=$IMAGE_TAG -Dakto-build-time=$(eval "date +%s") - name: Configure AWS Credentials From a12ac77bf4e32024b2f80a029a84d45844fbaac3 Mon Sep 17 00:00:00 2001 From: TangoBeeAkto Date: Fri, 20 Sep 2024 12:44:13 +0530 Subject: [PATCH 14/14] fix: fixed image tag typo --- .github/workflows/prod.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/prod.yml b/.github/workflows/prod.yml index 05db1d3685..e8edbd05ab 100644 --- a/.github/workflows/prod.yml +++ b/.github/workflows/prod.yml @@ -32,7 +32,7 @@ jobs: - name: mvn package command env: - IMAGE_TAG: collector_testing + IMAGE_TAG: collector-testing run: mvn package -Dakto-image-tag=$IMAGE_TAG -Dakto-build-time=$(eval "date +%s") - name: Configure AWS Credentials