Skip to content

Commit

Permalink
Merge pull request #1523 from akto-api-security/fix/count_in_groups
Browse files Browse the repository at this point in the history
Fixing count in groups
  • Loading branch information
avneesh-akto authored Sep 20, 2024
2 parents 9637134 + 1ec7c98 commit 33e43d9
Show file tree
Hide file tree
Showing 11 changed files with 172 additions and 52 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,6 @@
import com.akto.dto.CollectionConditions.ConditionUtils;
import com.akto.dto.billing.Organization;
import com.akto.dto.type.SingleTypeInfo;
import com.akto.dto.usage.MetricTypes;
import com.akto.dto.usage.UsageMetric;
import com.akto.listener.InitializerListener;
import com.akto.listener.RuntimeListener;
import com.akto.log.LoggerMaker;
import com.akto.log.LoggerMaker.LogDb;
Expand Down Expand Up @@ -78,10 +75,10 @@ public void setApiList(List<ApiInfoKey> apiList) {

boolean redacted;

public List<ApiCollection> fillApiCollectionsUrlCount(List<ApiCollection> apiCollections) {
public List<ApiCollection> fillApiCollectionsUrlCount(List<ApiCollection> apiCollections, Bson filter) {
int tsRandom = Context.now();
loggerMaker.infoAndAddToDb("fillApiCollectionsUrlCount started: " + tsRandom, LoggerMaker.LogDb.DASHBOARD);
Map<Integer, Integer> countMap = ApiCollectionsDao.instance.buildEndpointsCountToApiCollectionMap();
Map<Integer, Integer> countMap = ApiCollectionsDao.instance.buildEndpointsCountToApiCollectionMap(filter);
loggerMaker.infoAndAddToDb("fillApiCollectionsUrlCount buildEndpointsCountToApiCollectionMap done: " + tsRandom, LoggerMaker.LogDb.DASHBOARD);

for (ApiCollection apiCollection: apiCollections) {
Expand All @@ -93,11 +90,7 @@ public List<ApiCollection> fillApiCollectionsUrlCount(List<ApiCollection> apiCol
apiCollection.setUrlsCount(count);
} else if(ApiCollection.Type.API_GROUP.equals(apiCollection.getType())){
if (count == null) {
List<Bson> filters = SingleTypeInfoDao.filterForHostHostHeaderRaw();
filters.add(Filters.in(SingleTypeInfo._COLLECTION_IDS, apiCollectionId));
count = (int) SingleTypeInfoDao.instance.count(
Filters.and(filters)
);
count = fallbackCount;
}
apiCollection.setUrlsCount(count);
} else {
Expand All @@ -116,9 +109,35 @@ public List<ApiCollection> fillApiCollectionsUrlCount(List<ApiCollection> apiCol
return apiCollections;
}

private Map<Integer, Integer> deactivatedHostnameCountMap;

public String getCountForHostnameDeactivatedCollections(){
this.deactivatedHostnameCountMap = new HashMap<>();
if(deactivatedCollections == null || deactivatedCollections.isEmpty()){
return SUCCESS.toUpperCase();
}
Bson filter = Filters.and(Filters.exists(ApiCollection.HOST_NAME), Filters.in(Constants.ID, deactivatedCollections));
List<ApiCollection> hCollections = ApiCollectionsDao.instance.findAll(filter, Projections.include(Constants.ID));
List<Integer> deactivatedIds = new ArrayList<>();
for(ApiCollection collection : hCollections){
if(deactivatedCollections.contains(collection.getId())){
deactivatedIds.add(collection.getId());
}
}

if(deactivatedIds.isEmpty()){
return SUCCESS.toUpperCase();
}

this.deactivatedHostnameCountMap = ApiCollectionsDao.instance.buildEndpointsCountToApiCollectionMap(
Filters.in(SingleTypeInfo._COLLECTION_IDS, deactivatedIds)
);
return SUCCESS.toUpperCase();
}

public String fetchAllCollections() {
this.apiCollections = ApiCollectionsDao.instance.findAll(new BasicDBObject());
this.apiCollections = fillApiCollectionsUrlCount(this.apiCollections);
this.apiCollections = fillApiCollectionsUrlCount(this.apiCollections, Filters.empty());
return Action.SUCCESS.toUpperCase();
}

Expand Down Expand Up @@ -178,7 +197,7 @@ public String fetchAllCollectionsBasic() {
}
}

this.apiCollections = fillApiCollectionsUrlCount(this.apiCollections);
this.apiCollections = fillApiCollectionsUrlCount(this.apiCollections, Filters.nin(SingleTypeInfo._API_COLLECTION_ID, deactivatedCollections));

return Action.SUCCESS.toUpperCase();
}
Expand Down Expand Up @@ -599,7 +618,7 @@ private List<ApiCollection> filterCollections(List<ApiCollection> apiCollections

public String deactivateCollections() {
this.apiCollections = filterCollections(this.apiCollections, false);
this.apiCollections = fillApiCollectionsUrlCount(this.apiCollections);
this.apiCollections = fillApiCollectionsUrlCount(this.apiCollections,Filters.empty());
int deltaUsage = (-1) * this.apiCollections.stream().mapToInt(apiCollection -> apiCollection.getUrlsCount()).sum();
List<Integer> apiCollectionIds = reduceApiCollectionToId(this.apiCollections);
ApiCollectionsDao.instance.updateMany(Filters.in(Constants.ID, apiCollectionIds),
Expand All @@ -613,7 +632,7 @@ public String activateCollections() {
if (this.apiCollections.isEmpty()) {
return Action.SUCCESS.toUpperCase();
}
this.apiCollections = fillApiCollectionsUrlCount(this.apiCollections);
this.apiCollections = fillApiCollectionsUrlCount(this.apiCollections,Filters.empty());

int accountId = Context.accountId.get();
FeatureAccess featureAccess = UsageMetricUtils.getFeatureAccess(accountId, MetricTypes.ACTIVE_ENDPOINTS);
Expand Down Expand Up @@ -788,4 +807,8 @@ public void setStartTimestamp(int startTimestamp) {
public void setEndTimestamp(int endTimestamp) {
this.endTimestamp = endTimestamp;
}

public Map<Integer, Integer> getDeactivatedHostnameCountMap() {
return deactivatedHostnameCountMap;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -421,10 +421,25 @@ public String fetchSensitiveParamsForEndpoints() {
return Action.SUCCESS.toUpperCase();
}

public String loadRecentApiInfos(){
Bson filter = Filters.and(
Filters.nin(ApiInfo.ID_API_COLLECTION_ID,deactivatedCollections),
Filters.gte(ApiInfo.DISCOVERED_TIMESTAMP, startTimestamp),
Filters.lte(ApiInfo.DISCOVERED_TIMESTAMP, endTimestamp)
);
List<ApiInfo> apiInfos = ApiInfoDao.instance.findAll(filter);
for(ApiInfo info: apiInfos){
info.calculateActualAuth();
}
response = new BasicDBObject();
response.put("apiInfoList", apiInfos);
return Action.SUCCESS.toUpperCase();
}

public String loadRecentEndpoints() {
List<BasicDBObject> list = fetchRecentEndpoints(startTimestamp, endTimestamp);
attachTagsInAPIList(list);
attachAPIInfoListInResponse(list, -1);
response = new BasicDBObject();
response.put("endpoints", list);
return Action.SUCCESS.toUpperCase();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ public void sendAlerts(String webhookUrl, String metricsUrl, int thresholdSecond

public List<TrafficMetricsAlert> filterTrafficMetricsAlertsList(List<TrafficMetricsAlert> trafficMetricsAlertList) {
List<ApiCollection> apiCollections = ApiCollectionsDao.instance.getMetaAll();
Map<Integer, Integer> countMap = ApiCollectionsDao.instance.buildEndpointsCountToApiCollectionMap();
Map<Integer, Integer> countMap = ApiCollectionsDao.instance.buildEndpointsCountToApiCollectionMap(Filters.empty());
Set<String> allowedHosts = new HashSet<>();
for (ApiCollection apiCollection: apiCollections) {
int apiCollectionId = apiCollection.getId();
Expand Down
41 changes: 41 additions & 0 deletions apps/dashboard/src/main/resources/struts.xml
Original file line number Diff line number Diff line change
Expand Up @@ -865,6 +865,27 @@
</result>
</action>

<action name="api/loadRecentApiInfos" class="com.akto.action.observe.InventoryAction" method="loadRecentApiInfos">
<interceptor-ref name="json"/>
<interceptor-ref name="defaultStack" />
<interceptor-ref name="roleAccessInterceptor">
<param name="featureLabel">API_COLLECTIONS</param>
<param name="accessType">READ</param>
</interceptor-ref>

<result name="FORBIDDEN" type="json">
<param name="statusCode">403</param>
<param name="ignoreHierarchy">false</param>
<param name="includeProperties">^actionErrors.*</param>
</result>
<result name="SUCCESS" type="json">
<param name="root">response</param>
</result>
<result name="ERROR" type="httpheader">
<param name="status">401</param>
</result>
</action>

<action name="api/fetchNewParametersTrend" class="com.akto.action.observe.InventoryAction" method="fetchNewParametersTrend">
<interceptor-ref name="json"/>
<interceptor-ref name="defaultStack" />
Expand Down Expand Up @@ -1472,6 +1493,26 @@
<result name="ERROR" type="httpheader">
<param name="status">401</param>
</result>
</action>

<action name="api/getCountForHostnameDeactivatedCollections" class="com.akto.action.ApiCollectionsAction" method="getCountForHostnameDeactivatedCollections">
<interceptor-ref name="json"/>
<interceptor-ref name="defaultStack" />
<interceptor-ref name="roleAccessInterceptor">
<param name="featureLabel">API_COLLECTIONS</param>
<param name="accessType">READ</param>
</interceptor-ref>
<result name="FORBIDDEN" type="json">
<param name="statusCode">403</param>
<param name="ignoreHierarchy">false</param>
<param name="includeProperties">^actionErrors.*</param>
</result>
<result name="SUCCESS" type="json">
<param name="root">deactivatedHostnameCountMap</param>
</result>
<result name="ERROR" type="httpheader">
<param name="status">401</param>
</result>
</action>

<action name="api/getSensitiveInfoForCollections" class="com.akto.action.ApiCollectionsAction" method="fetchSensitiveInfoInCollections">
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -329,6 +329,15 @@ export default {
})
return resp
},

async loadRecentApiInfos (startTimestamp, endTimestamp) {
const resp = await request({
url: '/api/loadRecentApiInfos',
method: 'post',
data: { startTimestamp, endTimestamp }
})
return resp
},
async fetchSensitiveParamsForEndpoints (urls) {
const resp = await request({
url: '/api/fetchSensitiveParamsForEndpoints',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,29 +61,40 @@ function ApiChanges() {
})
}

async function fetchData() {
let apiCollection, apiCollectionUrls, apiInfoList;
let apiPromises = [
api.loadRecentEndpoints(startTimestamp, endTimestamp),
api.loadRecentApiInfos(startTimestamp, endTimestamp),
api.fetchNewParametersTrend(startTimestamp, endTimestamp)
];
let results = await Promise.allSettled(apiPromises);
let endpointsFromStiResp = results[0].status === 'fulfilled' ? results[0].value : {"endpoints": []}
let endpointsFromApiInfos = results[1].status === 'fulfilled' ? results[1].value : {"apiInfoList" : []}
let parametersResp = results[2].status === 'fulfilled' ? results[2].value : {}

apiCollection = endpointsFromStiResp.endpoints.map(x => { return { ...x._id, startTs: x.startTs } })
apiCollectionUrls = endpointsFromStiResp.endpoints.map(x => x._id.url)
apiInfoList = endpointsFromApiInfos.apiInfoList

await api.fetchSensitiveParamsForEndpoints(apiCollectionUrls).then(allSensitiveFields => {
let sensitiveParams = allSensitiveFields.data.endpoints
setSensitiveParams([...sensitiveParams]);
apiCollection = transform.fillSensitiveParams(sensitiveParams, apiCollection);
})

let data = func.mergeApiInfoAndApiCollection(apiCollection, apiInfoList, collectionsMap);
const prettifiedData = transform.prettifyEndpointsData(data)
setNewEndpoints({prettify: prettifiedData, normal: data});

const trendObj = transform.findNewParametersCountTrend(parametersResp, startTimestamp, endTimestamp)
setNewParametersCount(trendObj.count)
setParametersTrend(trendObj.trend)

setLoading(false);
}

useEffect(() => {
async function fetchData() {
let apiCollection, apiCollectionUrls, apiInfoList;
await api.loadRecentEndpoints(startTimestamp, endTimestamp).then((res) => {
apiCollection = res.data.endpoints.map(x => { return { ...x._id, startTs: x.startTs } })
apiCollectionUrls = res.data.endpoints.map(x => x._id.url)
apiInfoList = res.data.apiInfoList
})
await api.fetchSensitiveParamsForEndpoints(apiCollectionUrls).then(allSensitiveFields => {
let sensitiveParams = allSensitiveFields.data.endpoints
setSensitiveParams([...sensitiveParams]);
apiCollection = transform.fillSensitiveParams(sensitiveParams, apiCollection);
})
let data = func.mergeApiInfoAndApiCollection(apiCollection, apiInfoList, collectionsMap);
const prettifiedData = transform.prettifyEndpointsData(data)
setNewEndpoints({prettify: prettifiedData, normal: data});
await api.fetchNewParametersTrend(startTimestamp, endTimestamp).then((resp) => {
const trendObj = transform.findNewParametersCountTrend(resp, startTimestamp, endTimestamp)
setNewParametersCount(trendObj.count)
setParametersTrend(trendObj.trend)
})
setLoading(false);
}
if (allCollections.length > 0) {
fetchData();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -175,12 +175,12 @@ const convertToNewData = (collectionsArr, sensitiveInfoMap, severityInfoMap, cov
return{
...c,
displayNameComp: (<Box maxWidth="20vw"><TooltipText tooltip={c.displayName} text={c.displayName} textProps={{fontWeight: 'medium'}}/></Box>),
testedEndpoints: coverageMap[c.id] ? coverageMap[c.id] : 0,
testedEndpoints: c.urlsCount === 0 ? 0 : (coverageMap[c.id] ? coverageMap[c.id] : 0),
sensitiveInRespTypes: sensitiveInfoMap[c.id] ? sensitiveInfoMap[c.id] : [],
severityInfo: severityInfoMap[c.id] ? severityInfoMap[c.id] : {},
detected: func.prettifyEpoch(trafficInfoMap[c.id] || 0),
detectedTimestamp: trafficInfoMap[c.id] || 0,
riskScore: riskScoreMap[c.id] ? riskScoreMap[c.id] : 0,
detectedTimestamp: c.urlsCount === 0 ? 0 : (trafficInfoMap[c.id] || 0),
riskScore: c.urlsCount === 0 ? 0 : (riskScoreMap[c.id] ? riskScoreMap[c.id] : 0),
discovered: func.prettifyEpoch(c.startTs || 0),
}
})
Expand Down Expand Up @@ -295,6 +295,7 @@ function ApiCollections() {
let apiPromises = [
api.getCoverageInfoForCollections(),
api.getLastTrafficSeen(),
collectionApi.fetchCountForHostnameDeactivatedCollections()
];
if(shouldCallHeavyApis){
apiPromises = [
Expand All @@ -307,30 +308,31 @@ function ApiCollections() {
let coverageInfo = results[0].status === 'fulfilled' ? results[0].value : {};
// let coverageInfo = dummyData.coverageMap
let trafficInfo = results[1].status === 'fulfilled' ? results[1].value : {};
let deactivatedCountInfo = results[2].status === 'fulfilled' ? results[2].value : {};

let riskScoreObj = lastFetchedResp
let sensitiveInfo = lastFetchedSensitiveResp
let severityObj = lastFetchedSeverityResp

if(shouldCallHeavyApis){
if(results[2]?.status === "fulfilled"){
const res = results[2].value
if(results[3]?.status === "fulfilled"){
const res = results[3].value
riskScoreObj = {
criticalUrls: res.criticalEndpointsCount,
riskScoreMap: res.riskScoreOfCollectionsMap
}
}

if(results[3]?.status === "fulfilled"){
const res = results[3].value
if(results[4]?.status === "fulfilled"){
const res = results[4].value
sensitiveInfo ={
sensitiveUrls: res.sensitiveUrlsInResponse,
sensitiveInfoMap: res.sensitiveSubtypesInCollection
}
}

if(results[4]?.status === "fulfilled"){
const res = results[4].value
if(results[5]?.status === "fulfilled"){
const res = results[5].value
severityObj = res
}

Expand All @@ -349,7 +351,12 @@ function ApiCollections() {
setNormalData(dataObj.normal)

// Separate active and deactivated collections
const deactivatedCollections = dataObj.prettify.filter(c => c.deactivated);
const deactivatedCollections = dataObj.prettify.filter(c => c.deactivated).map((c)=>{
if(deactivatedCountInfo.hasOwnProperty(c.id)){
c.urlsCount = deactivatedCountInfo[c.id]
}
return c
});

// Calculate summary data only for active collections
const summary = transform.getSummaryData(dataObj.normal)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,12 @@ export default {
method: 'post',
data: { apiCollections: items }
})
},
fetchCountForHostnameDeactivatedCollections(){
return request({
url: '/api/getCountForHostnameDeactivatedCollections',
method: 'post',
data: {}
})
}
}
8 changes: 6 additions & 2 deletions libs/dao/src/main/java/com/akto/dao/ApiCollectionsDao.java
Original file line number Diff line number Diff line change
Expand Up @@ -137,11 +137,15 @@ public List<Integer> fetchNonTrafficApiCollectionsIds() {
return apiCollectionIds;
}

public Map<Integer, Integer> buildEndpointsCountToApiCollectionMap() {
public Map<Integer, Integer> buildEndpointsCountToApiCollectionMap(Bson filter) {
Map<Integer, Integer> countMap = new HashMap<>();
List<Bson> pipeline = new ArrayList<>();

pipeline.add(Aggregates.match(SingleTypeInfoDao.filterForHostHeader(0, false)));
pipeline.add(Aggregates.match(Filters.and(
SingleTypeInfoDao.filterForHostHeader(0, false),
filter
)
));
BasicDBObject groupedId = new BasicDBObject(SingleTypeInfo._COLLECTION_IDS, "$" + SingleTypeInfo._COLLECTION_IDS);
pipeline.add(Aggregates.unwind("$" + SingleTypeInfo._COLLECTION_IDS));
pipeline.add(Aggregates.group(groupedId, Accumulators.sum("count",1)));
Expand Down
3 changes: 3 additions & 0 deletions libs/dao/src/main/java/com/akto/dao/ApiInfoDao.java
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,9 @@ public void createIndicesIfAbsent() {

MCollection.createIndexIfAbsent(getDBName(), getCollName(),
new String[] { ApiInfo.RISK_SCORE, ApiInfo.ID_API_COLLECTION_ID }, false);

MCollection.createIndexIfAbsent(getDBName(), getCollName(),
new String[] {ApiInfo.DISCOVERED_TIMESTAMP }, false);
}


Expand Down
Loading

0 comments on commit 33e43d9

Please sign in to comment.