Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

test cases for activity feed backend #1976

Merged
merged 17 commits into from
Mar 21, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
issue-700-backend: fetchAllLogs route added with pagination
  • Loading branch information
palakgupta2712 committed Mar 14, 2024
commit 1e0b157d5a2193c6e972623a334812a98dff8b3d
10 changes: 8 additions & 2 deletions constants/logs.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
const logType = {
import { REQUEST_LOG_TYPE } from "./requests";

export const logType = {
PROFILE_DIFF_APPROVED: "PROFILE_DIFF_APPROVED",
PROFILE_DIFF_REJECTED: "PROFILE_DIFF_REJECTED",
CLOUDFLARE_CACHE_PURGED: "CLOUDFLARE_CACHE_PURGED",
Expand All @@ -8,6 +10,10 @@ const logType = {
TASKS_MISSED_UPDATES_ERRORS: "TASKS_MISSED_UPDATES_ERRORS",
DISCORD_INVITES: "DISCORD_INVITES",
EXTERNAL_SERVICE: "EXTERNAL_SERVICE",
EXTENSION_REQUESTS: "extensionRequests",
...REQUEST_LOG_TYPE,
};

module.exports = { logType };
export const ALL_LOGS_FETCHED_SUCCESSFULLY = "All Logs fetched successfully";
export const LOGS_FETCHED_SUCCESSFULLY = "Logs fetched successfully";
export const ERROR_WHILE_FETCHING_LOGS = "Error while fetching logs";
64 changes: 62 additions & 2 deletions controllers/logs.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import { getPaginatedLink } from "../utils/helper";
import { ALL_LOGS_FETCHED_SUCCESSFULLY, ERROR_WHILE_FETCHING_LOGS, LOGS_FETCHED_SUCCESSFULLY } from "../constants/logs";
const logsQuery = require("../models/logs");
const { SOMETHING_WENT_WRONG } = require("../constants/errorMessages");

Expand All @@ -11,15 +13,73 @@ const fetchLogs = async (req, res) => {
try {
const logs = await logsQuery.fetchLogs(req.query, req.params.type);
return res.json({
message: "Logs returned successfully!",
message: LOGS_FETCHED_SUCCESSFULLY,
logs,
});
} catch (error) {
logger.error(`Error while fetching logs: ${error}`);
logger.error(`${ERROR_WHILE_FETCHING_LOGS}: ${error}`);
return res.boom.serverUnavailable(SOMETHING_WENT_WRONG);
}
};

const fetchAllLogs = async (req, res) => {
const { query } = req;
try {
if (query.dev !== "true") {
return res.boom.badRequest("Please use feature flag to make this request!");
}
const logs = await logsQuery.fetchAllLogs(query);
if (!logs) {
return res.status(204).send();
}
const { allLogs, next, prev, page } = logs;
if (allLogs.length === 0) {
return res.status(204).send();
}

if (page) {
const pageLink = `/logs?page=${page}&dev=${query.dev}`;
return res.status(200).json({
message: ALL_LOGS_FETCHED_SUCCESSFULLY,
data: allLogs,
page: pageLink,
});
}

let nextUrl = null;
let prevUrl = null;
if (next) {
const nextLink = getPaginatedLink({
endpoint: "/logs",
query,
cursorKey: "next",
docId: next,
});
nextUrl = nextLink;
}
if (prev) {
const prevLink = getPaginatedLink({
endpoint: "/logs",
query,
cursorKey: "prev",
docId: prev,
});
prevUrl = prevLink;
}

return res.status(200).json({
message: ALL_LOGS_FETCHED_SUCCESSFULLY,
data: allLogs,
next: nextUrl,
prev: prevUrl,
});
} catch (err) {
logger.error(ERROR_WHILE_FETCHING_LOGS, err);
return res.boom.badImplementation(ERROR_WHILE_FETCHING_LOGS);
}
};

module.exports = {
fetchLogs,
fetchAllLogs,
};
120 changes: 119 additions & 1 deletion models/logs.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,10 @@ const firestore = require("../utils/firestore");
const { getBeforeHourTime } = require("../utils/time");
const logsModel = firestore.collection("logs");
const admin = require("firebase-admin");
const { logType } = require("../constants/logs");
const { logType, ERROR_WHILE_FETCHING_LOGS } = require("../constants/logs");
const { INTERNAL_SERVER_ERROR } = require("../constants/errorMessages");
const { getFullName } = require("../utils/users");
const SIZE = 5;

/**
* Adds log
Expand Down Expand Up @@ -155,9 +156,126 @@ const fetchLastAddedCacheLog = async (id) => {
}
};

const fetchAllLogs = async (query) => {
try {
let { type, prev, next, page, size = SIZE } = query;
size = parseInt(size);
page = parseInt(page);

try {
let requestQuery = logsModel;

if (type) {
const logType = type.split(",");
if (logType.length >= 1) requestQuery = requestQuery.where("type", "in", logType);
}

requestQuery = requestQuery.orderBy("timestamp", "desc");
let requestQueryDoc = requestQuery;

if (prev) {
requestQueryDoc = requestQueryDoc.limitToLast(size);
} else {
requestQueryDoc = requestQueryDoc.limit(size);
}

if (page) {
const startAfter = (page - 1) * size;
requestQueryDoc = requestQueryDoc.offset(startAfter);
} else if (next) {
const doc = await logsModel.doc(next).get();
requestQueryDoc = requestQueryDoc.startAt(doc);
} else if (prev) {
const doc = await logsModel.doc(prev).get();
requestQueryDoc = requestQueryDoc.endAt(doc);
}

const snapshot = await requestQueryDoc.get();
let nextDoc, prevDoc;
if (!snapshot.empty) {
const first = snapshot.docs[0];
prevDoc = await requestQuery.endBefore(first).limitToLast(1).get();

const last = snapshot.docs[snapshot.docs.length - 1];
nextDoc = await requestQuery.startAfter(last).limit(1).get();
}
const allLogs = [];
if (!snapshot.empty) {
snapshot.forEach((doc) => {
allLogs.push({ ...doc.data() });
});
}

if (allLogs.length === 0) {
return null;
}

return {
allLogs,
prev: prevDoc.empty ? null : prevDoc.docs[0].id,
next: nextDoc.empty ? null : nextDoc.docs[0].id,
page: page ? page + 1 : null,
};
} catch (error) {
logger.error(ERROR_WHILE_FETCHING_LOGS, error);
throw error;
}
// if (type) {
// const logType = type.split(",");
// if (logType.length >= 1) initialQuery = initialQuery.where("type", "in", logType);
// }
//
// let queryDoc = initialQuery;
//
// if (prev) {
// queryDoc = queryDoc.limitToLast(size);
// } else {
// queryDoc = queryDoc.limit(size);
// }
//
// const snapshot = await queryDoc.get();
// let logsData = [];
//
// if (!snapshot.empty) {
// snapshot.forEach((doc) => {
// logsData.push({ ...doc.data() });
// });
// }
// if (format === "feed") {
// const extensionLog = {
// type: logType.EXTENSION_REQUESTS,
// meta: {
// taskId: "6yH97tlNH053Z8oSvW9E",
// createdBy:"S5d1xNU1ZTYMWTaahjqp",
// },
// body: {
// extensionRequestId: "HKEMNTbaRzIiTVeTLbAM",
// oldEndsOn: 1707264000,
// newEndsOn: 1706918400,
// assignee: "S5d1xNU1ZTYMWTaahjqp",
// status: EXTENSION_REQUEST_STATUS.PENDING,
// },
// };
//
// await addLog(extensionLog.type, extensionLog.meta, extensionLog.body);
//
//
// logsData = logsData.map(async (data) => {
// return await formatLogs(data);
// });
// return await Promise.all(logsData);

// return logsData ?? [];
} catch (err) {
logger.error("Error in fetching all logs", err);
throw err;
}
};

module.exports = {
addLog,
fetchLogs,
fetchCacheLogs,
fetchLastAddedCacheLog,
fetchAllLogs,
};
1 change: 1 addition & 0 deletions routes/logs.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,5 +6,6 @@
const { SUPERUSER } = require("../constants/roles");

router.get("/:type", authenticate, authorizeRoles([SUPERUSER]), logs.fetchLogs);
router.get("/", authenticate, authorizeRoles([SUPERUSER]), logs.fetchAllLogs);

Check failure

Code scanning / CodeQL

Missing rate limiting High

This route handler performs
authorization
, but is not rate-limited.
This route handler performs
authorization
, but is not rate-limited.
This route handler performs
authorization
, but is not rate-limited.

module.exports = router;
Loading