Skip to content

Commit

Permalink
Merge branch 'develop' into feature/change_test_host
Browse files Browse the repository at this point in the history
  • Loading branch information
ankush-jain-akto committed Jun 14, 2023
2 parents efe8631 + 532ed97 commit e8af7eb
Show file tree
Hide file tree
Showing 16 changed files with 191 additions and 54 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/discord-release-msg.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ name: Release messages to discord announcement channel
on:
release:
types:
- created
- published

jobs:
run_main:
Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,7 @@ OpenJDK 8, node(v18.7.0+ [link](https://nodejs.org/download/release/v18.7.0/)),
d. A list of running Java processes with show up. Select the web server process to attach the debugger

</details>
<a href="https://hits.sh/github.com/akto-api-security/hits.svg?label=Hits%20since%2020%2F5&color=FFFFFF&labelColor=FFFFFF"><img alt="Hits" src="https://hits.sh/github.com/akto-api-security/hits.svg?label=Hits%20since%2020%2F5&color=FFFFFF&labelColor=FFFFFF"/></a>

## Contributing

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ public void buildFromDb(boolean fetchAllSTI) {
apiInfoList = ApiInfoDao.instance.findAll(Filters.in("_id.apiCollectionId", apiCollectionIds));
}

List<FilterSampleData> filterSampleDataList = FilterSampleDataDao.instance.findAll(new BasicDBObject());
List<FilterSampleData> filterSampleDataList = new ArrayList<>(); // FilterSampleDataDao.instance.findAll(new BasicDBObject());

Map<ApiInfo.ApiInfoKey, Map<Integer, FilterSampleData>> filterSampleDataMapToApiInfo = new HashMap<>();
for (FilterSampleData filterSampleData: filterSampleDataList) {
Expand Down Expand Up @@ -329,19 +329,19 @@ public static List<WriteModel<ApiInfo>> getUpdatesForApiInfo(List<ApiInfo> apiIn

public static List<WriteModel<FilterSampleData>> getUpdatesForSampleData(List<FilterSampleData> filterSampleDataList) {
ArrayList<WriteModel<FilterSampleData>> bulkUpdates = new ArrayList<>();
if (filterSampleDataList == null) filterSampleDataList = new ArrayList<>();

for (FilterSampleData filterSampleData: filterSampleDataList) {
List<String> sampleData = filterSampleData.getSamples().get();
Bson bson = Updates.pushEach(FilterSampleData.SAMPLES+".elements", sampleData, new PushOptions().slice(-1 * FilterSampleData.cap));
bulkUpdates.add(
new UpdateOneModel<>(
FilterSampleDataDao.getFilter(filterSampleData.getId().getApiInfoKey(), filterSampleData.getId().getFilterId()),
bson,
new UpdateOptions().upsert(true)
)
);
}
// if (filterSampleDataList == null) filterSampleDataList = new ArrayList<>();
//
// for (FilterSampleData filterSampleData: filterSampleDataList) {
// List<String> sampleData = filterSampleData.getSamples().get();
// Bson bson = Updates.pushEach(FilterSampleData.SAMPLES+".elements", sampleData, new PushOptions().slice(-1 * FilterSampleData.cap));
// bulkUpdates.add(
// new UpdateOneModel<>(
// FilterSampleDataDao.getFilter(filterSampleData.getId().getApiInfoKey(), filterSampleData.getId().getFilterId()),
// bson,
// new UpdateOptions().upsert(true)
// )
// );
// }

return bulkUpdates;
}
Expand Down
25 changes: 20 additions & 5 deletions apps/api-runtime/src/main/java/com/akto/utils/CustomAuthUtil.java
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import java.util.Set;
import java.util.ArrayList;

import com.mongodb.BasicDBList;
import com.mongodb.client.model.*;
import org.bson.conversions.Bson;

Expand All @@ -17,9 +18,11 @@
import com.akto.dto.type.SingleTypeInfo;
import com.akto.runtime.policies.AuthPolicy;

import static com.akto.dto.ApiInfo.ALL_AUTH_TYPES_FOUND;

public class CustomAuthUtil {
public static Bson getFilters(ApiInfo apiInfo,Boolean isHeader,List<String> params){

public static Bson getFilters(ApiInfo apiInfo, Boolean isHeader, List<String> params){
return Filters.and(
Filters.eq(SingleTypeInfo._RESPONSE_CODE, -1),
Filters.eq(SingleTypeInfo._URL,apiInfo.getId().getUrl()),
Expand All @@ -32,7 +35,7 @@ public static Bson getFilters(ApiInfo apiInfo,Boolean isHeader,List<String> para
public static void customAuthTypeUtil(List<CustomAuthType> customAuthTypes){

Set<ApiInfo.AuthType> unauthenticatedTypes = new HashSet<>(Collections.singletonList(ApiInfo.AuthType.UNAUTHENTICATED));
List<ApiInfo> apiInfos = ApiInfoDao.instance.findAll(Filters.eq("allAuthTypesFound",unauthenticatedTypes));
List<ApiInfo> apiInfos = ApiInfoDao.instance.findAll(Filters.eq(ALL_AUTH_TYPES_FOUND,unauthenticatedTypes));

Set<ApiInfo.AuthType> customTypes = new HashSet<>(Collections.singletonList(ApiInfo.AuthType.CUSTOM));
Set<Set<ApiInfo.AuthType>> authTypes = new HashSet<>(Collections.singletonList(customTypes));
Expand Down Expand Up @@ -61,20 +64,24 @@ public static void customAuthTypeUtil(List<CustomAuthType> customAuthTypes){
if (!headerAndCookieKeys.isEmpty() && !customAuthType.getHeaderKeys().isEmpty() && headerAndCookieKeys.containsAll(customAuthType.getHeaderKeys())) {
UpdateOneModel<ApiInfo> update = new UpdateOneModel<>(
ApiInfoDao.getFilter(apiInfo.getId()),
Updates.set(ApiInfo.ALL_AUTH_TYPES_FOUND, authTypes),
Updates.set(ALL_AUTH_TYPES_FOUND, authTypes),
new UpdateOptions().upsert(false)
);
apiInfosUpdates.add(update);
break;
}

if (customAuthType.getPayloadKeys().isEmpty()) {
continue;
}

// checking if all payload keys occur in any unauthenticated API
List<SingleTypeInfo> payloadSTIs = SingleTypeInfoDao.instance.findAll(getFilters(apiInfo, false, customAuthType.getPayloadKeys()));
if (payloadSTIs!=null && payloadSTIs.size()==customAuthType.getPayloadKeys().size()) {

UpdateOneModel<ApiInfo> update = new UpdateOneModel<>(
ApiInfoDao.getFilter(apiInfo.getId()),
Updates.set(ApiInfo.ALL_AUTH_TYPES_FOUND, authTypes),
Updates.set(ALL_AUTH_TYPES_FOUND, authTypes),
new UpdateOptions().upsert(false)
);
apiInfosUpdates.add(update);
Expand All @@ -86,4 +93,12 @@ public static void customAuthTypeUtil(List<CustomAuthType> customAuthTypes){
}
}
}

public static void resetAllCustomAuthTypes() {
Set<ApiInfo.AuthType> customTypes = new HashSet<>(Collections.singletonList(ApiInfo.AuthType.CUSTOM));
Set<ApiInfo.AuthType> unauthenticatedType = new HashSet<>(Collections.singletonList(ApiInfo.AuthType.UNAUTHENTICATED));
Set<Set<ApiInfo.AuthType>> listUnauthenticatedType = new HashSet<>(Collections.singletonList(unauthenticatedType));

ApiInfoDao.instance.updateMany(Filters.eq(ALL_AUTH_TYPES_FOUND, customTypes), Updates.set(ALL_AUTH_TYPES_FOUND, listUnauthenticatedType));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,13 @@
public class CustomAuthUtilTest extends MongoBasedTest{

public static SingleTypeInfo generateSingleTypeInfo(String param, Boolean isHeader) {
SingleTypeInfo.ParamId p = new SingleTypeInfo.ParamId("/api","POST",200,isHeader,param,SingleTypeInfo.GENERIC,ACCOUNT_ID, false);
SingleTypeInfo.ParamId p = new SingleTypeInfo.ParamId("/api","POST",-1,isHeader,param,SingleTypeInfo.GENERIC,ACCOUNT_ID, false);
return new SingleTypeInfo(p,new HashSet<>(),new HashSet<>(),0,0,0, new CappedSet<>(), SingleTypeInfo.Domain.ENUM, SingleTypeInfo.ACCEPTED_MAX_VALUE, SingleTypeInfo.ACCEPTED_MIN_VALUE);
}

@Test
public void test1(){
ApiInfo apiInfo = new ApiInfo(ACCOUNT_ID, "/api", Method.POST);
ApiInfo apiInfo = new ApiInfo(ACCOUNT_ID, "/api", Method.POST);
Set<Set<ApiInfo.AuthType>> authTypes = new HashSet<>();
Set<ApiInfo.AuthType> types = new HashSet<>();
types.add(ApiInfo.AuthType.UNAUTHENTICATED);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
import com.akto.dao.UsersDao;
import com.akto.dao.context.Context;
import com.akto.dto.CustomAuthType;
import com.akto.log.LoggerMaker;
import com.akto.testing.ApiExecutor;
import com.mongodb.BasicDBObject;
import com.mongodb.client.model.Filters;
import com.mongodb.client.model.Updates;
Expand All @@ -30,6 +32,7 @@ public class CustomAuthTypeAction extends UserAction{
private CustomAuthType customAuthType;

private static final ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor();
private static final LoggerMaker loggerMaker = new LoggerMaker(CustomAuthTypeAction.class);

public String fetchCustomAuthTypes(){
customAuthTypes = CustomAuthTypeDao.instance.findAll(new BasicDBObject());
Expand Down Expand Up @@ -116,6 +119,25 @@ public String updateCustomAuthTypeStatus(){
return Action.SUCCESS.toUpperCase();
}

public String resetAllCustomAuthTypes() {
try {
CustomAuthUtil.resetAllCustomAuthTypes();
SingleTypeInfo.fetchCustomAuthTypes();
int accountId = Context.accountId.get();
executorService.schedule( new Runnable() {
public void run() {
Context.accountId.set(accountId);
CustomAuthUtil.customAuthTypeUtil(SingleTypeInfo.activeCustomAuthTypes);
}
}, 5 , TimeUnit.SECONDS);

return SUCCESS.toUpperCase();
} catch (Exception e) {
loggerMaker.errorAndAddToDb(e.getMessage(), LoggerMaker.LogDb.DASHBOARD);
return ERROR.toUpperCase();
}
}

public void setName(String name) {
this.name = name;
}
Expand Down
17 changes: 8 additions & 9 deletions apps/dashboard/src/main/java/com/akto/action/HarAction.java
Original file line number Diff line number Diff line change
Expand Up @@ -3,19 +3,16 @@
import com.akto.dao.ApiCollectionsDao;
import com.akto.dao.BurpPluginInfoDao;
import com.akto.dto.ApiCollection;
import com.akto.dto.ApiToken.Utility;
import com.akto.har.HAR;
import com.akto.log.LoggerMaker;
import com.akto.dto.ApiToken.Utility;
import com.akto.utils.DashboardMode;
import com.akto.utils.Utils;
import com.mongodb.BasicDBObject;
import com.mongodb.client.model.Filters;
import com.opensymphony.xwork2.Action;
import com.sun.jna.Library;
import com.sun.jna.Native;
import com.sun.jna.Structure;
import org.apache.commons.io.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.sun.jna.*;

import java.io.File;
import java.io.IOException;
Expand All @@ -33,11 +30,12 @@ public class HarAction extends UserAction {

private boolean skipKafka = DashboardMode.isLocalDeployment();
private byte[] tcpContent;
private static final Logger logger = LoggerFactory.getLogger(HarAction.class);
private static final LoggerMaker loggerMaker = new LoggerMaker(HarAction.class);

@Override
public String execute() throws IOException {
ApiCollection apiCollection = null;
loggerMaker.infoAndAddToDb("HarAction.execute() started", LoggerMaker.LogDb.DASHBOARD);
if (apiCollectionName != null) {
apiCollection = ApiCollectionsDao.instance.findByName(apiCollectionName);
if (apiCollection == null) {
Expand Down Expand Up @@ -95,12 +93,13 @@ public String execute() throws IOException {

try {
HAR har = new HAR();
logger.info("Har file upload processing for collectionId : {}", apiCollectionId);
loggerMaker.infoAndAddToDb("Har file upload processing for collectionId:" + apiCollectionId, LoggerMaker.LogDb.DASHBOARD);
List<String> messages = har.getMessages(harString, apiCollectionId);
harErrors = har.getErrors();
Utils.pushDataToKafka(apiCollectionId, topic, messages, harErrors, skipKafka);
loggerMaker.infoAndAddToDb("Har file upload processing for collectionId:" + apiCollectionId + " finished", LoggerMaker.LogDb.DASHBOARD);
} catch (Exception e) {
logger.error("Exception while parsing harString");
loggerMaker.errorAndAddToDb("Exception while parsing harString", LoggerMaker.LogDb.DASHBOARD);
e.printStackTrace();
return SUCCESS.toUpperCase();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,11 +50,11 @@ execute:
requests:
- req:
- modify_query_param:
payload_location_key: evil.com/?p=${payload_location_value}
payload_location_key: http:https://evil.com/?p=${payload_location_value}
- modify_body_param:
payload_location_key: evil.com/?p=${payload_location_value}
payload_location_key: http:https://evil.com/?p=${payload_location_value}
- modify_header:
payload_location_key: evil.com/?p=${payload_location_value}
payload_location_key: http:https://evil.com/?p=${payload_location_value}


validate:
Expand Down
13 changes: 13 additions & 0 deletions apps/dashboard/src/main/resources/struts.xml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
<constant name="struts.mapper.alwaysSelectFullNamespace" value="false"/>
<constant name="struts.patternMatcher" value="regex" />
<constant name="struts.matcher.appendNamedParameters" value="true"/>
<constant name="struts.multipart.maxSize" value = "209715200" />

<package name="user" namespace="" extends="struts-default, json-default">
<default-action-ref name="home" />
Expand Down Expand Up @@ -752,6 +753,18 @@
</result>
</action>


<action name="api/resetAllCustomAuthTypes" class="com.akto.action.CustomAuthTypeAction" method="resetAllCustomAuthTypes">
<interceptor-ref name="json"/>
<interceptor-ref name="defaultStack" />
<result name="SUCCESS" type="json"/>
<result name="ERROR" type="json">
<param name="statusCode">422</param>
<param name="ignoreHierarchy">false</param>
<param name="includeProperties">^actionErrors.*</param>
</result>
</action>

<action name="api/updateCustomAuthTypeStatus" class="com.akto.action.CustomAuthTypeAction" method="updateCustomAuthTypeStatus">
<interceptor-ref name="json"/>
<interceptor-ref name="defaultStack" />
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,13 +28,11 @@ export default {
}
})
},
uploadHarFile(content, apiCollectionId, skipKafka) {
uploadHarFile(formData) {
return request({
url: '/api/uploadHar',
method: 'post',
data: {
content, apiCollectionId, skipKafka
}
data: formData,
})
},
uploadTcpFile(content, apiCollectionId, skipKafka) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -384,6 +384,14 @@ export default {
// of the file in the v-model prop
let isHar = file.name.endsWith(".har")
if(isHar && file.size >= 52428800){
window._AKTO.$emit('SHOW_SNACKBAR', {
show: true,
text: "Please limit the file size to less than 50 MB",
color: 'red'
})
return
}
let isJson = file.name.endsWith(".json")
let isPcap = file.name.endsWith(".pcap")
if (isHar || isJson) {
Expand All @@ -394,7 +402,46 @@ export default {
reader.onload = async () => {
let skipKafka = false;//window.location.href.indexOf("http:https://localhost") != -1
if (isHar) {
await this.$store.dispatch('inventory/uploadHarFile', { content: JSON.parse(reader.result), filename: file.name, skipKafka})
var formData = new FormData();
formData.append("harString", reader.result)
formData.append("hsFile", reader.result)
formData.append("skipKafka", skipKafka)
window._AKTO.$emit('SHOW_SNACKBAR', {
show: true,
text: "We are uploading your har file, please dont refresh the page!",
color: 'green'
})
this.$store.dispatch('inventory/uploadHarFile', { formData }).then(resp => {
if(file.size > 2097152){
window._AKTO.$emit('SHOW_SNACKBAR', {
show: true,
text: "We have successfully read your file, please refresh the page in a few mins to check your APIs",
color: 'green'
})
}
else {
window._AKTO.$emit('SHOW_SNACKBAR', {
show: true,
text: "Your Har file has been successfully processed, please refresh the page to see your APIs",
color: 'green'
})
}
}).catch(err => {
if(err.message.includes(404)){
window._AKTO.$emit('SHOW_SNACKBAR', {
show: true,
text: "Please limit the file size to less than 50 MB",
color: 'red'
})
} else {
window._AKTO.$emit('SHOW_SNACKBAR', {
show: true,
text: "Something went wrong while processing the file",
color: 'red'
})
}
})
} else if (isPcap) {
var arrayBuffer = reader.result
var bytes = new Uint8Array(arrayBuffer);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -185,8 +185,9 @@ const inventory = {
return resp
})
},
uploadHarFile({commit,state},{content,filename, skipKafka}) {
return api.uploadHarFile(content,state.apiCollectionId,skipKafka).then(resp => {
uploadHarFile({commit,state},{formData}) {
formData.append("apiCollectionId",state.apiCollectionId);
return api.uploadHarFile(formData).then(resp => {
return resp
})
},
Expand Down
Loading

0 comments on commit e8af7eb

Please sign in to comment.