Close #1625 Squashed commit of the following: commit a01f12c4e5831c43dbe3ae8a80f4db12077dbb2a Author: ArtemBaskal <a.baskal@adguard.com> Date: Mon Jul 13 15:50:15 2020 +0300 minor commit b8ceb17a3b12e47de81af85fa30c2961a4a42fab Merge: 702c55ed fecf5494 Author: Andrey Meshkov <am@adguard.com> Date: Mon Jul 13 15:32:44 2020 +0300 Merge branch 'feature/1625' of ssh://bit.adguard.com:7999/dns/adguard-home into feature/1625 commit 702c55edc1ba2ab330eda8189498dfff33c92f5f Author: Andrey Meshkov <am@adguard.com> Date: Mon Jul 13 15:32:41 2020 +0300 fix makefile when there's no gopath commit fecf5494b8c1719cb70044f336fe99c341802d25 Merge: d4c811f98a417604
Author: ArtemBaskal <a.baskal@adguard.com> Date: Mon Jul 13 15:30:21 2020 +0300 Merge branch 'master' into feature/1625 commit d4c811f9630dee448012434e2f50f34ab8b8b899 Merge: b0a037daa33164bf
Author: ArtemBaskal <a.baskal@adguard.com> Date: Mon Jul 13 12:35:16 2020 +0300 Merge branch 'master' into feature/1625 commit b0a037daf48913fd8a4cda16d520835630072520 Author: ArtemBaskal <a.baskal@adguard.com> Date: Mon Jul 13 12:34:42 2020 +0300 Simplify sync logs action creators commit eeeb620ae100a554f59783fc2a14fad525ce1a82 Author: ArtemBaskal <a.baskal@adguard.com> Date: Mon Jul 13 11:17:08 2020 +0300 Review changes commit 4cbc59eec5c794df18d6cb9b33f39091ce7cfde9 Author: ArtemBaskal <a.baskal@adguard.com> Date: Fri Jul 10 15:23:37 2020 +0300 Update tracker tooltip class commit 0a705301d4726af1c8f7f7a5776b11d338ab1d54 Author: ArtemBaskal <a.baskal@adguard.com> Date: Fri Jul 10 13:46:10 2020 +0300 Replace depricated addListener commit 2ac0843239853da1725d2e038b5e4cbaef253732 Author: ArtemBaskal <a.baskal@adguard.com> Date: Fri Jul 10 13:39:45 2020 +0300 Validate response_status url param commit 2178039ebbd0cbe2c0048cb5ab7ad7c7e7571bd1 Author: ArtemBaskal <a.baskal@adguard.com> Date: Fri Jul 10 12:58:18 2020 +0300 Fix setting empty search value, use strict search on drill down, extract refreshFilteredLogs action commit 4b11c6a34049bd133077bad035d267f87cdec141 Author: ArtemBaskal <a.baskal@adguard.com> Date: Thu Jul 9 19:41:48 2020 +0300 Normalize input search commit 3fded3575b21bdd017723f5e487c268074599e4f Author: ArtemBaskal <a.baskal@adguard.com> Date: Thu Jul 9 18:20:05 2020 +0300 Optimize search commit 9073e032e4aadcdef9d826f16a10c300ee46b30e Author: ArtemBaskal <a.baskal@adguard.com> Date: Thu Jul 9 14:28:41 2020 +0300 Update url string params commit a18cffc8bfac83103fb78ffae2f786f89aea8ba1 Author: ArtemBaskal <a.baskal@adguard.com> Date: Thu Jul 9 12:55:50 2020 +0300 Fix reset search commit 33f769aed56369aacedd29ffd52b527b527d4a59 Author: ArtemBaskal <a.baskal@adguard.com> Date: Wed Jul 8 19:13:21 2020 +0300 WIP: Add permlinks commit 4422641cf5cff06c8485ea23d58e5d42f7cca5cd Author: ArtemBaskal <a.baskal@adguard.com> Date: Wed Jul 8 14:42:28 2020 +0300 Refactor Counters, add response_status links to query log commit e8bb0b70ca55f31ef3fcdda13dcaad6f5d8479b5 Author: ArtemBaskal <a.baskal@adguard.com> Date: Tue Jul 7 19:33:04 2020 +0300 Delete unnecessary file commit b20816e9dad79866e3ec04d3093c972967b3b226 Merge: 6281084e d2c3af5c Author: ArtemBaskal <a.baskal@adguard.com> Date: Tue Jul 7 19:30:44 2020 +0300 Resolve conflict commit d2c3af5cf227d76f876d6d94ca016d4b242b2515 Author: ArtemBaskal <a.baskal@adguard.com> Date: Tue Jul 7 17:14:51 2020 +0300 + client: Add git hooks ... and 5 more commits
197 lines
6.7 KiB
JavaScript
197 lines
6.7 KiB
JavaScript
import { createAction } from 'redux-actions';
|
|
|
|
import apiClient from '../api/Api';
|
|
import { normalizeLogs, getParamsForClientsSearch, addClientInfo } from '../helpers/helpers';
|
|
import {
|
|
DEFAULT_LOGS_FILTER,
|
|
TABLE_DEFAULT_PAGE_SIZE,
|
|
TABLE_FIRST_PAGE,
|
|
} from '../helpers/constants';
|
|
import { addErrorToast, addSuccessToast } from './toasts';
|
|
|
|
const getLogsWithParams = async (config) => {
|
|
const { older_than, filter, ...values } = config;
|
|
const rawLogs = await apiClient.getQueryLog({
|
|
...filter,
|
|
older_than,
|
|
});
|
|
const { data, oldest } = rawLogs;
|
|
let logs = normalizeLogs(data);
|
|
const clientsParams = getParamsForClientsSearch(logs, 'client');
|
|
|
|
if (Object.keys(clientsParams).length > 0) {
|
|
const clients = await apiClient.findClients(clientsParams);
|
|
logs = addClientInfo(logs, clients, 'client');
|
|
}
|
|
|
|
return {
|
|
logs,
|
|
oldest,
|
|
older_than,
|
|
filter,
|
|
...values,
|
|
};
|
|
};
|
|
|
|
export const getAdditionalLogsRequest = createAction('GET_ADDITIONAL_LOGS_REQUEST');
|
|
export const getAdditionalLogsFailure = createAction('GET_ADDITIONAL_LOGS_FAILURE');
|
|
export const getAdditionalLogsSuccess = createAction('GET_ADDITIONAL_LOGS_SUCCESS');
|
|
|
|
const checkFilteredLogs = async (data, filter, dispatch, total) => {
|
|
const { logs, oldest } = data;
|
|
const totalData = total || { logs };
|
|
|
|
const needToGetAdditionalLogs = (logs.length < TABLE_DEFAULT_PAGE_SIZE
|
|
|| totalData.logs.length < TABLE_DEFAULT_PAGE_SIZE)
|
|
&& oldest !== '';
|
|
|
|
if (needToGetAdditionalLogs) {
|
|
dispatch(getAdditionalLogsRequest());
|
|
|
|
try {
|
|
const additionalLogs = await getLogsWithParams({
|
|
older_than: oldest,
|
|
filter,
|
|
});
|
|
if (additionalLogs.oldest.length > 0) {
|
|
return await checkFilteredLogs(additionalLogs, filter, dispatch, {
|
|
logs: [...totalData.logs, ...additionalLogs.logs],
|
|
oldest: additionalLogs.oldest,
|
|
});
|
|
}
|
|
dispatch(getAdditionalLogsSuccess());
|
|
return totalData;
|
|
} catch (error) {
|
|
dispatch(addErrorToast({ error }));
|
|
dispatch(getAdditionalLogsFailure(error));
|
|
}
|
|
}
|
|
|
|
dispatch(getAdditionalLogsSuccess());
|
|
return totalData;
|
|
};
|
|
|
|
export const setLogsPagination = createAction('LOGS_PAGINATION');
|
|
export const setLogsPage = createAction('SET_LOG_PAGE');
|
|
export const toggleDetailedLogs = createAction('TOGGLE_DETAILED_LOGS');
|
|
|
|
export const getLogsRequest = createAction('GET_LOGS_REQUEST');
|
|
export const getLogsFailure = createAction('GET_LOGS_FAILURE');
|
|
export const getLogsSuccess = createAction('GET_LOGS_SUCCESS');
|
|
|
|
export const getLogs = (config) => async (dispatch, getState) => {
|
|
dispatch(getLogsRequest());
|
|
try {
|
|
const { isFiltered, filter, page } = getState().queryLogs;
|
|
const data = await getLogsWithParams({
|
|
...config,
|
|
filter,
|
|
});
|
|
|
|
if (isFiltered) {
|
|
const additionalData = await checkFilteredLogs(data, filter, dispatch);
|
|
const updatedData = additionalData.logs ? { ...data, ...additionalData } : data;
|
|
dispatch(getLogsSuccess(updatedData));
|
|
dispatch(setLogsPagination({
|
|
page,
|
|
pageSize: TABLE_DEFAULT_PAGE_SIZE,
|
|
}));
|
|
} else {
|
|
dispatch(getLogsSuccess(data));
|
|
}
|
|
} catch (error) {
|
|
dispatch(addErrorToast({ error }));
|
|
dispatch(getLogsFailure(error));
|
|
}
|
|
};
|
|
|
|
export const setLogsFilterRequest = createAction('SET_LOGS_FILTER_REQUEST');
|
|
|
|
/**
|
|
*
|
|
* @param filter
|
|
* @param {string} filter.search
|
|
* @param {string} filter.response_status query field of RESPONSE_FILTER object
|
|
* @returns function
|
|
*/
|
|
export const setLogsFilter = (filter) => setLogsFilterRequest(filter);
|
|
|
|
export const setFilteredLogsRequest = createAction('SET_FILTERED_LOGS_REQUEST');
|
|
export const setFilteredLogsFailure = createAction('SET_FILTERED_LOGS_FAILURE');
|
|
export const setFilteredLogsSuccess = createAction('SET_FILTERED_LOGS_SUCCESS');
|
|
|
|
export const setFilteredLogs = (filter) => async (dispatch) => {
|
|
dispatch(setFilteredLogsRequest());
|
|
try {
|
|
const data = await getLogsWithParams({
|
|
older_than: '',
|
|
filter,
|
|
});
|
|
const additionalData = await checkFilteredLogs(data, filter, dispatch);
|
|
const updatedData = additionalData.logs ? { ...data, ...additionalData } : data;
|
|
|
|
dispatch(setFilteredLogsSuccess({
|
|
...updatedData,
|
|
filter,
|
|
}));
|
|
dispatch(setLogsPage(TABLE_FIRST_PAGE));
|
|
} catch (error) {
|
|
dispatch(addErrorToast({ error }));
|
|
dispatch(setFilteredLogsFailure(error));
|
|
}
|
|
};
|
|
|
|
export const resetFilteredLogs = () => setFilteredLogs(DEFAULT_LOGS_FILTER);
|
|
|
|
export const refreshFilteredLogs = () => async (dispatch, getState) => {
|
|
const { filter } = getState().queryLogs;
|
|
await dispatch(setFilteredLogs(filter));
|
|
};
|
|
|
|
export const clearLogsRequest = createAction('CLEAR_LOGS_REQUEST');
|
|
export const clearLogsFailure = createAction('CLEAR_LOGS_FAILURE');
|
|
export const clearLogsSuccess = createAction('CLEAR_LOGS_SUCCESS');
|
|
|
|
export const clearLogs = () => async (dispatch) => {
|
|
dispatch(clearLogsRequest());
|
|
try {
|
|
await apiClient.clearQueryLog();
|
|
dispatch(clearLogsSuccess());
|
|
dispatch(addSuccessToast('query_log_cleared'));
|
|
} catch (error) {
|
|
dispatch(addErrorToast({ error }));
|
|
dispatch(clearLogsFailure(error));
|
|
}
|
|
};
|
|
|
|
export const getLogsConfigRequest = createAction('GET_LOGS_CONFIG_REQUEST');
|
|
export const getLogsConfigFailure = createAction('GET_LOGS_CONFIG_FAILURE');
|
|
export const getLogsConfigSuccess = createAction('GET_LOGS_CONFIG_SUCCESS');
|
|
|
|
export const getLogsConfig = () => async (dispatch) => {
|
|
dispatch(getLogsConfigRequest());
|
|
try {
|
|
const data = await apiClient.getQueryLogInfo();
|
|
dispatch(getLogsConfigSuccess(data));
|
|
} catch (error) {
|
|
dispatch(addErrorToast({ error }));
|
|
dispatch(getLogsConfigFailure());
|
|
}
|
|
};
|
|
|
|
export const setLogsConfigRequest = createAction('SET_LOGS_CONFIG_REQUEST');
|
|
export const setLogsConfigFailure = createAction('SET_LOGS_CONFIG_FAILURE');
|
|
export const setLogsConfigSuccess = createAction('SET_LOGS_CONFIG_SUCCESS');
|
|
|
|
export const setLogsConfig = (config) => async (dispatch) => {
|
|
dispatch(setLogsConfigRequest());
|
|
try {
|
|
await apiClient.setQueryLogConfig(config);
|
|
dispatch(addSuccessToast('config_successfully_saved'));
|
|
dispatch(setLogsConfigSuccess(config));
|
|
} catch (error) {
|
|
dispatch(addErrorToast({ error }));
|
|
dispatch(setLogsConfigFailure());
|
|
}
|
|
};
|