Skip to content

Commit

Permalink
Merge pull request akto-api-security#350 from akto-api-security/hotfi…
Browse files Browse the repository at this point in the history
…x/match_sensitive_data

dry run to invalidate sensitive data
  • Loading branch information
ankush-jain-akto committed Jun 15, 2023
2 parents 8665fb7 + 97e17f1 commit 131dfb5
Show file tree
Hide file tree
Showing 4 changed files with 178 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,9 @@ public String fetchSensitiveSampleData() {
);

for (SensitiveSampleData sensitiveSampleData: sensitiveSampleDataList) {
if (sensitiveSampleData.getInvalid()) {
continue;
}
for (String data: sensitiveSampleData.getSampleData()) {
List<SingleTypeInfo.ParamId> s = this.sensitiveSampleData.getOrDefault(data, new ArrayList<>());
s.add(sensitiveSampleData.getId());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,24 +34,28 @@
import com.akto.dto.testing.rate_limit.GlobalApiRateLimit;
import com.akto.dto.testing.rate_limit.RateLimitHandler;
import com.akto.dto.testing.sources.TestSourceConfig;
import com.akto.dto.traffic.SampleData;
import com.akto.dto.type.SingleTypeInfo;
import com.akto.log.LoggerMaker;
import com.akto.log.LoggerMaker.LogDb;
import com.akto.notifications.slack.DailyUpdate;
import com.akto.notifications.slack.TestSummaryGenerator;
import com.akto.testing.ApiExecutor;
import com.akto.testing.ApiWorkflowExecutor;
import com.akto.util.JSONUtils;
import com.akto.util.Pair;
import com.akto.util.enums.GlobalEnums.Severity;
import com.akto.util.enums.GlobalEnums.TestCategory;
import com.akto.utils.DashboardMode;
import com.akto.utils.HttpUtils;
import com.akto.utils.RedactSampleData;
import com.google.gson.Gson;
import com.mongodb.BasicDBList;
import com.mongodb.BasicDBObject;
import com.mongodb.ConnectionString;
import com.mongodb.client.model.Filters;
import com.mongodb.client.model.Updates;
import com.mongodb.bulk.BulkWriteResult;
import com.mongodb.client.MongoCursor;
import com.mongodb.client.model.*;
import com.slack.api.Slack;
import com.slack.api.webhook.WebhookResponse;

Expand Down Expand Up @@ -107,6 +111,22 @@ public static String getDomain() {
return domain;
}

public void setUpPiiCleanerScheduler(){
scheduler.scheduleAtFixedRate(new Runnable() {
public void run() {
String mongoURI = System.getenv("AKTO_MONGO_CONN");
DaoInit.init(new ConnectionString(mongoURI));
Context.accountId.set(1_000_000);
try {
executePiiCleaner(true);
} catch (Exception e) {

}
}
}, 0, 4, TimeUnit.HOURS);
}


public void setUpPiiAndTestSourcesScheduler(){
scheduler.scheduleAtFixedRate(new Runnable() {
public void run() {
Expand Down Expand Up @@ -252,6 +272,146 @@ static void executeTestSourcesFetch() {

}

static void executePiiCleaner(boolean isDryRun) {
final int BATCH_SIZE = 100;
int currMarker = 0;
Bson filterSsdQ =
Filters.and(
Filters.ne("_id.responseCode", -1),
Filters.eq("_id.isHeader", false)
);

MongoCursor<SensitiveSampleData> cursor = null;
int dataPoints = 0;
List<SingleTypeInfo.ParamId> idsToDelete = new ArrayList<>();
do {
idsToDelete = new ArrayList<>();
cursor = SensitiveSampleDataDao.instance.getMCollection().find(filterSsdQ).projection(Projections.exclude(SensitiveSampleData.SAMPLE_DATA)).skip(currMarker).limit(BATCH_SIZE).cursor();
currMarker += BATCH_SIZE;
dataPoints = 0;
loggerMaker.infoAndAddToDb("processing batch: " + currMarker, LogDb.DASHBOARD);
while(cursor.hasNext()) {
SensitiveSampleData ssd = cursor.next();
SingleTypeInfo.ParamId ssdId = ssd.getId();
Bson filterCommonSampleData =
Filters.and(
Filters.eq("_id.method", ssdId.getMethod()),
Filters.eq("_id.url", ssdId.getUrl()),
Filters.eq("_id.apiCollectionId", ssdId.getApiCollectionId())
);


SampleData commonSampleData = SampleDataDao.instance.findOne(filterCommonSampleData);
List<String> commonPayloads = commonSampleData.getSamples();

if (!isSimilar(ssdId.getParam(), commonPayloads)) {
idsToDelete.add(ssdId);
}

dataPoints++;
}

bulkSensitiveInvalidate(idsToDelete, isDryRun);
bulkSingleTypeInfoDelete(idsToDelete, isDryRun);

} while (dataPoints == BATCH_SIZE);
}

private static void bulkSensitiveInvalidate(List<SingleTypeInfo.ParamId> idsToDelete, boolean isDryRun) {
ArrayList<WriteModel<SensitiveSampleData>> bulkSensitiveInvalidateUpdates = new ArrayList<>();
for(SingleTypeInfo.ParamId paramId: idsToDelete) {
String paramStr = "PII cleaner - invalidating: " + paramId.getApiCollectionId() + ": " + paramId.getMethod() + " " + paramId.getUrl() + " > " + paramId.getParam();
String url = "dashboard/observe/inventory/"+paramId.getApiCollectionId()+"/"+Base64.getEncoder().encodeToString((paramId.getUrl() + " " + paramId.getMethod()).getBytes());
loggerMaker.infoAndAddToDb(paramStr + url, LogDb.DASHBOARD);
List<Bson> filters = new ArrayList<>();
filters.add(Filters.eq("url", paramId.getUrl()));
filters.add(Filters.eq("method", paramId.getMethod()));
filters.add(Filters.eq("responseCode", paramId.getResponseCode()));
filters.add(Filters.eq("isHeader", paramId.getIsHeader()));
filters.add(Filters.eq("param", paramId.getParam()));
filters.add(Filters.eq("apiCollectionId", paramId.getApiCollectionId()));

bulkSensitiveInvalidateUpdates.add(new UpdateOneModel<>(Filters.and(filters), Updates.set("invalid", true)));
}

if (!bulkSensitiveInvalidateUpdates.isEmpty()) {
if (!isDryRun) {
BulkWriteResult bwr =
SensitiveSampleDataDao.instance.getMCollection().bulkWrite(bulkSensitiveInvalidateUpdates, new BulkWriteOptions().ordered(false));

loggerMaker.infoAndAddToDb("PII cleaner - modified " + bwr.getModifiedCount() + " from STI", LogDb.DASHBOARD);
}
}

}

private static void bulkSingleTypeInfoDelete(List<SingleTypeInfo.ParamId> idsToDelete, boolean isDryRun) {
ArrayList<WriteModel<SingleTypeInfo>> bulkUpdatesForSingleTypeInfo = new ArrayList<>();
for(SingleTypeInfo.ParamId paramId: idsToDelete) {
String paramStr = "PII cleaner - deleting: " + paramId.getApiCollectionId() + ": " + paramId.getMethod() + " " + paramId.getUrl() + " > " + paramId.getParam();
loggerMaker.infoAndAddToDb(paramStr, LogDb.DASHBOARD);
List<Bson> filters = new ArrayList<>();
filters.add(Filters.eq("url", paramId.getUrl()));
filters.add(Filters.eq("method", paramId.getMethod()));
filters.add(Filters.eq("responseCode", paramId.getResponseCode()));
filters.add(Filters.eq("isHeader", paramId.getIsHeader()));
filters.add(Filters.eq("param", paramId.getParam()));
filters.add(Filters.eq("apiCollectionId", paramId.getApiCollectionId()));

bulkUpdatesForSingleTypeInfo.add(new DeleteOneModel<>(Filters.and(filters)));
}

if (!bulkUpdatesForSingleTypeInfo.isEmpty()) {
if (!isDryRun) {
BulkWriteResult bwr =
SingleTypeInfoDao.instance.getMCollection().bulkWrite(bulkUpdatesForSingleTypeInfo, new BulkWriteOptions().ordered(false));

loggerMaker.infoAndAddToDb("PII cleaner - deleted " + bwr.getDeletedCount() + " from STI", LogDb.DASHBOARD);
}
}

}

private static final Gson gson = new Gson();

private static BasicDBObject extractJsonResponse(String message) {
Map<String, Object> json = gson.fromJson(message, Map.class);

String respPayload = (String) json.get("responsePayload");

if (respPayload == null || respPayload.isEmpty()) {
respPayload = "{}";
}

if(respPayload.startsWith("[")) {
respPayload = "{\"json\": "+respPayload+"}";
}

BasicDBObject payload;
try {
payload = BasicDBObject.parse(respPayload);
} catch (Exception e) {
payload = BasicDBObject.parse("{}");
}

return payload;
}

private static boolean isSimilar(String param, List<String> commonPayloads) {
for(String commonPayload: commonPayloads) {
// if (commonPayload.equals(sensitivePayload)) {
// continue;
// }

BasicDBObject commonPayloadObj = extractJsonResponse(commonPayload);
if (JSONUtils.flatten(commonPayloadObj).containsKey(param)) {
return true;
}
}

return false;
}

static void executePIISourceFetch() {
List<PIISource> piiSources = PIISourceDao.instance.findAll("active", true);
for (PIISource piiSource : piiSources) {
Expand Down Expand Up @@ -924,6 +1084,7 @@ public void runInitializerFunctions() {
PIISourceDao.instance.insertOne(piiSource);
}

setUpPiiCleanerScheduler();
setUpDailyScheduler();
setUpWebhookScheduler();
setUpPiiAndTestSourcesScheduler();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@
<spinner v-else/>
</template>
<template slot="Sensitive Values">
<sample-data-list :messages="sensitiveSampleData" v-if="sensitiveSampleData"/>
<sample-data-list :messages="sensitiveSampleData.filter(x => !x.invalid)" v-if="sensitiveSampleData"/>
<spinner v-else/>
</template>
</layout-with-tabs>
Expand Down
11 changes: 11 additions & 0 deletions libs/dao/src/main/java/com/akto/dto/SensitiveSampleData.java
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ public class SensitiveSampleData {
private SingleTypeInfo.ParamId id;
public static final String SAMPLE_DATA = "sampleData";
private List<String> sampleData;

private boolean invalid;
public static final int cap = 10;

public SensitiveSampleData() {}
Expand All @@ -32,4 +34,13 @@ public List<String> getSampleData() {
public void setSampleData(List<String> sampleData) {
this.sampleData = sampleData;
}


public boolean getInvalid() {
return invalid;
}

public void setInvalid(boolean invalid) {
this.invalid = invalid;
}
}

0 comments on commit 131dfb5

Please sign in to comment.