Merge: new Statistics module
Close #827 * commit '17a26155cdf0150ea9d8cf915311f16fafa6354a': - client: convert average processing time to milliseconds + client: handle clear statistics + client: update time interval notice + client: add notice to the time interval input + client: functional components for dashboard * client: fix missed translations * client: change the result message after adding a filter + client: handle the new statistics format + client: handle time interval for statistics + openapi: add /stats_config, /stats_info methods * openapi: update /stats method + statistics: store in separate file * stats: remove old code + config: "statistics_interval" setting + /control/stats_config, /control/stats_info
This commit is contained in:
commit
e346eddf94
113
AGHTechDoc.md
113
AGHTechDoc.md
|
@ -37,6 +37,11 @@ Contents:
|
||||||
* Services Filter
|
* Services Filter
|
||||||
* API: Get blocked services list
|
* API: Get blocked services list
|
||||||
* API: Set blocked services list
|
* API: Set blocked services list
|
||||||
|
* Statistics
|
||||||
|
* API: Get statistics data
|
||||||
|
* API: Clear statistics data
|
||||||
|
* API: Set statistics parameters
|
||||||
|
* API: Get statistics parameters
|
||||||
|
|
||||||
|
|
||||||
## First startup
|
## First startup
|
||||||
|
@ -863,3 +868,111 @@ Request:
|
||||||
Response:
|
Response:
|
||||||
|
|
||||||
200 OK
|
200 OK
|
||||||
|
|
||||||
|
|
||||||
|
## Statistics
|
||||||
|
|
||||||
|
Load (main thread):
|
||||||
|
. Load data from the last bucket from DB for the current hour
|
||||||
|
|
||||||
|
Runtime (DNS worker threads):
|
||||||
|
. Update current unit
|
||||||
|
|
||||||
|
Runtime (goroutine):
|
||||||
|
. Periodically check that current unit should be flushed to file (when the current hour changes)
|
||||||
|
. If so, flush it, allocate a new empty unit
|
||||||
|
|
||||||
|
Runtime (HTTP worker threads):
|
||||||
|
. To respond to "Get statistics" API request we:
|
||||||
|
. load all units from file
|
||||||
|
. load current unit
|
||||||
|
. process data from all loaded units:
|
||||||
|
. sum up data for "total counters" output values
|
||||||
|
. add value into "per time unit counters" output arrays
|
||||||
|
. aggregate data for "top_" output arrays; sort in descending order
|
||||||
|
|
||||||
|
Unload (main thread):
|
||||||
|
. Flush current unit to file
|
||||||
|
|
||||||
|
|
||||||
|
### API: Get statistics data
|
||||||
|
|
||||||
|
Request:
|
||||||
|
|
||||||
|
GET /control/stats
|
||||||
|
|
||||||
|
Response:
|
||||||
|
|
||||||
|
200 OK
|
||||||
|
|
||||||
|
{
|
||||||
|
time_units: hours | days
|
||||||
|
|
||||||
|
// total counters:
|
||||||
|
num_dns_queries: 123
|
||||||
|
num_blocked_filtering: 123
|
||||||
|
num_replaced_safebrowsing: 123
|
||||||
|
num_replaced_safesearch: 123
|
||||||
|
num_replaced_parental: 123
|
||||||
|
avg_processing_time: 123.123
|
||||||
|
|
||||||
|
// per time unit counters
|
||||||
|
dns_queries: [123, ...]
|
||||||
|
blocked_filtering: [123, ...]
|
||||||
|
replaced_parental: [123, ...]
|
||||||
|
replaced_safebrowsing: [123, ...]
|
||||||
|
|
||||||
|
top_queried_domains: [
|
||||||
|
{host: 123},
|
||||||
|
...
|
||||||
|
]
|
||||||
|
top_blocked_domains: [
|
||||||
|
{host: 123},
|
||||||
|
...
|
||||||
|
]
|
||||||
|
top_clients: [
|
||||||
|
{IP: 123},
|
||||||
|
...
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
### API: Clear statistics data
|
||||||
|
|
||||||
|
Request:
|
||||||
|
|
||||||
|
POST /control/stats_reset
|
||||||
|
|
||||||
|
Response:
|
||||||
|
|
||||||
|
200 OK
|
||||||
|
|
||||||
|
|
||||||
|
### API: Set statistics parameters
|
||||||
|
|
||||||
|
Request:
|
||||||
|
|
||||||
|
POST /control/stats_config
|
||||||
|
|
||||||
|
{
|
||||||
|
"interval": 1 | 7 | 30 | 90
|
||||||
|
}
|
||||||
|
|
||||||
|
Response:
|
||||||
|
|
||||||
|
200 OK
|
||||||
|
|
||||||
|
|
||||||
|
### API: Get statistics parameters
|
||||||
|
|
||||||
|
Request:
|
||||||
|
|
||||||
|
GET /control/stats_info
|
||||||
|
|
||||||
|
Response:
|
||||||
|
|
||||||
|
200 OK
|
||||||
|
|
||||||
|
{
|
||||||
|
"interval": 1 | 7 | 30 | 90
|
||||||
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
"build-dev": "NODE_ENV=development ./node_modules/.bin/webpack --config webpack.dev.js",
|
"build-dev": "NODE_ENV=development ./node_modules/.bin/webpack --config webpack.dev.js",
|
||||||
"watch": "NODE_ENV=development ./node_modules/.bin/webpack --config webpack.dev.js --watch",
|
"watch": "NODE_ENV=development ./node_modules/.bin/webpack --config webpack.dev.js --watch",
|
||||||
"build-prod": "NODE_ENV=production ./node_modules/.bin/webpack --config webpack.prod.js",
|
"build-prod": "NODE_ENV=production ./node_modules/.bin/webpack --config webpack.prod.js",
|
||||||
"lint": "eslint frontend/"
|
"lint": "eslint client/"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@nivo/line": "^0.49.1",
|
"@nivo/line": "^0.49.1",
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
"upstream_parallel": "Use parallel queries to speed up resolving by simultaneously querying all upstream servers",
|
"upstream_parallel": "Use parallel queries to speed up resolving by simultaneously querying all upstream servers",
|
||||||
"bootstrap_dns": "Bootstrap DNS servers",
|
"bootstrap_dns": "Bootstrap DNS servers",
|
||||||
"bootstrap_dns_desc": "Bootstrap DNS servers are used to resolve IP addresses of the DoH/DoT resolvers you specify as upstreams.",
|
"bootstrap_dns_desc": "Bootstrap DNS servers are used to resolve IP addresses of the DoH/DoT resolvers you specify as upstreams.",
|
||||||
"url_added_successfully": "URL added successfully",
|
|
||||||
"check_dhcp_servers": "Check for DHCP servers",
|
"check_dhcp_servers": "Check for DHCP servers",
|
||||||
"save_config": "Save config",
|
"save_config": "Save config",
|
||||||
"enabled_dhcp": "DHCP server enabled",
|
"enabled_dhcp": "DHCP server enabled",
|
||||||
|
@ -67,18 +66,22 @@
|
||||||
"disabled_protection": "Disabled protection",
|
"disabled_protection": "Disabled protection",
|
||||||
"refresh_statics": "Refresh statistics",
|
"refresh_statics": "Refresh statistics",
|
||||||
"dns_query": "DNS Queries",
|
"dns_query": "DNS Queries",
|
||||||
"blocked_by": "Blocked by Filters",
|
"blocked_by": "<0>Blocked by Filters</0>",
|
||||||
"stats_malware_phishing": "Blocked malware/phishing",
|
"stats_malware_phishing": "Blocked malware/phishing",
|
||||||
"stats_adult": "Blocked adult websites",
|
"stats_adult": "Blocked adult websites",
|
||||||
"stats_query_domain": "Top queried domains",
|
"stats_query_domain": "Top queried domains",
|
||||||
"for_last_24_hours": "for the last 24 hours",
|
"for_last_24_hours": "for the last 24 hours",
|
||||||
|
"for_last_days": "for the last {{count}} day",
|
||||||
|
"for_last_days_plural": "for the last {{count}} days",
|
||||||
"no_domains_found": "No domains found",
|
"no_domains_found": "No domains found",
|
||||||
"requests_count": "Requests count",
|
"requests_count": "Requests count",
|
||||||
"top_blocked_domains": "Top blocked domains",
|
"top_blocked_domains": "Top blocked domains",
|
||||||
"top_clients": "Top clients",
|
"top_clients": "Top clients",
|
||||||
"no_clients_found": "No clients found",
|
"no_clients_found": "No clients found",
|
||||||
"general_statistics": "General statistics",
|
"general_statistics": "General statistics",
|
||||||
"number_of_dns_query_24_hours": "A number of DNS quieries processed for the last 24 hours",
|
"number_of_dns_query_days": "A number of DNS queries processed for the last {{count}} day",
|
||||||
|
"number_of_dns_query_days_plural": "A number of DNS queries processed for the last {{count}} days",
|
||||||
|
"number_of_dns_query_24_hours": "A number of DNS queries processed for the last 24 hours",
|
||||||
"number_of_dns_query_blocked_24_hours": "A number of DNS requests blocked by adblock filters and hosts blocklists",
|
"number_of_dns_query_blocked_24_hours": "A number of DNS requests blocked by adblock filters and hosts blocklists",
|
||||||
"number_of_dns_query_blocked_24_hours_by_sec": "A number of DNS requests blocked by the AdGuard browsing security module",
|
"number_of_dns_query_blocked_24_hours_by_sec": "A number of DNS requests blocked by the AdGuard browsing security module",
|
||||||
"number_of_dns_query_blocked_24_hours_adult": "A number of adult websites blocked",
|
"number_of_dns_query_blocked_24_hours_adult": "A number of adult websites blocked",
|
||||||
|
@ -300,7 +303,6 @@
|
||||||
"client_deleted": "Client \"{{key}}\" successfully deleted",
|
"client_deleted": "Client \"{{key}}\" successfully deleted",
|
||||||
"client_added": "Client \"{{key}}\" successfully added",
|
"client_added": "Client \"{{key}}\" successfully added",
|
||||||
"client_updated": "Client \"{{key}}\" successfully updated",
|
"client_updated": "Client \"{{key}}\" successfully updated",
|
||||||
"table_statistics": "Requests count (last 24 hours)",
|
|
||||||
"clients_not_found": "No clients found",
|
"clients_not_found": "No clients found",
|
||||||
"client_confirm_delete": "Are you sure you want to delete client \"{{key}}\"?",
|
"client_confirm_delete": "Are you sure you want to delete client \"{{key}}\"?",
|
||||||
"filter_confirm_delete": "Are you sure you want to delete filter?",
|
"filter_confirm_delete": "Are you sure you want to delete filter?",
|
||||||
|
@ -361,5 +363,19 @@
|
||||||
"encryption_certificates_source_path": "Set a certificates file path",
|
"encryption_certificates_source_path": "Set a certificates file path",
|
||||||
"encryption_certificates_source_content":"Paste the certificates contents",
|
"encryption_certificates_source_content":"Paste the certificates contents",
|
||||||
"encryption_key_source_path": "Set a private key file",
|
"encryption_key_source_path": "Set a private key file",
|
||||||
"encryption_key_source_content": "Paste the private key contents"
|
"encryption_key_source_content": "Paste the private key contents",
|
||||||
|
"stats_params": "Statistics configuration",
|
||||||
|
"config_successfully_saved": "Configuration successfully saved",
|
||||||
|
"interval_24_hour": "24 hours",
|
||||||
|
"interval_days": "{{count}} day",
|
||||||
|
"interval_days_plural": "{{count}} days",
|
||||||
|
"domain": "Domain",
|
||||||
|
"answer": "Answer",
|
||||||
|
"filter_added_successfully": "The filter has been successfully added",
|
||||||
|
"statistics_logs": "Statistics and logs",
|
||||||
|
"statistics_retention": "Statistics retention",
|
||||||
|
"statistics_retention_desc": "If you decrease the interval value, some data will be lost",
|
||||||
|
"statistics_clear": " Clear statistics",
|
||||||
|
"statistics_clear_confirm": "Are you sure you want to clear statistics?",
|
||||||
|
"statistics_cleared": "Statistics successfully cleared"
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,11 +1,10 @@
|
||||||
import { createAction } from 'redux-actions';
|
import { createAction } from 'redux-actions';
|
||||||
import round from 'lodash/round';
|
|
||||||
import { t } from 'i18next';
|
import { t } from 'i18next';
|
||||||
import { showLoading, hideLoading } from 'react-redux-loading-bar';
|
import { showLoading, hideLoading } from 'react-redux-loading-bar';
|
||||||
import axios from 'axios';
|
import axios from 'axios';
|
||||||
|
|
||||||
import versionCompare from '../helpers/versionCompare';
|
import versionCompare from '../helpers/versionCompare';
|
||||||
import { normalizeHistory, normalizeFilteringStatus, normalizeLogs, normalizeTextarea, sortClients } from '../helpers/helpers';
|
import { normalizeFilteringStatus, normalizeLogs, normalizeTextarea, sortClients } from '../helpers/helpers';
|
||||||
import { SETTINGS_NAMES, CHECK_TIMEOUT } from '../helpers/constants';
|
import { SETTINGS_NAMES, CHECK_TIMEOUT } from '../helpers/constants';
|
||||||
import { getTlsStatus } from './encryption';
|
import { getTlsStatus } from './encryption';
|
||||||
import Api from '../api/Api';
|
import Api from '../api/Api';
|
||||||
|
@ -246,27 +245,6 @@ export const getClients = () => async (dispatch) => {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getTopStatsRequest = createAction('GET_TOP_STATS_REQUEST');
|
|
||||||
export const getTopStatsFailure = createAction('GET_TOP_STATS_FAILURE');
|
|
||||||
export const getTopStatsSuccess = createAction('GET_TOP_STATS_SUCCESS');
|
|
||||||
|
|
||||||
export const getTopStats = () => async (dispatch, getState) => {
|
|
||||||
dispatch(getTopStatsRequest());
|
|
||||||
const timer = setInterval(async () => {
|
|
||||||
const state = getState();
|
|
||||||
if (state.dashboard.isCoreRunning) {
|
|
||||||
clearInterval(timer);
|
|
||||||
try {
|
|
||||||
const stats = await apiClient.getGlobalStatsTop();
|
|
||||||
dispatch(getTopStatsSuccess(stats));
|
|
||||||
} catch (error) {
|
|
||||||
dispatch(addErrorToast({ error }));
|
|
||||||
dispatch(getTopStatsFailure(error));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}, 100);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const dnsStatusRequest = createAction('DNS_STATUS_REQUEST');
|
export const dnsStatusRequest = createAction('DNS_STATUS_REQUEST');
|
||||||
export const dnsStatusFailure = createAction('DNS_STATUS_FAILURE');
|
export const dnsStatusFailure = createAction('DNS_STATUS_FAILURE');
|
||||||
export const dnsStatusSuccess = createAction('DNS_STATUS_SUCCESS');
|
export const dnsStatusSuccess = createAction('DNS_STATUS_SUCCESS');
|
||||||
|
@ -314,27 +292,6 @@ export const disableDns = () => async (dispatch) => {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getStatsRequest = createAction('GET_STATS_REQUEST');
|
|
||||||
export const getStatsFailure = createAction('GET_STATS_FAILURE');
|
|
||||||
export const getStatsSuccess = createAction('GET_STATS_SUCCESS');
|
|
||||||
|
|
||||||
export const getStats = () => async (dispatch) => {
|
|
||||||
dispatch(getStatsRequest());
|
|
||||||
try {
|
|
||||||
const stats = await apiClient.getGlobalStats();
|
|
||||||
|
|
||||||
const processedStats = {
|
|
||||||
...stats,
|
|
||||||
avg_processing_time: round(stats.avg_processing_time, 2),
|
|
||||||
};
|
|
||||||
|
|
||||||
dispatch(getStatsSuccess(processedStats));
|
|
||||||
} catch (error) {
|
|
||||||
dispatch(addErrorToast({ error }));
|
|
||||||
dispatch(getStatsFailure());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getLogsRequest = createAction('GET_LOGS_REQUEST');
|
export const getLogsRequest = createAction('GET_LOGS_REQUEST');
|
||||||
export const getLogsFailure = createAction('GET_LOGS_FAILURE');
|
export const getLogsFailure = createAction('GET_LOGS_FAILURE');
|
||||||
export const getLogsSuccess = createAction('GET_LOGS_SUCCESS');
|
export const getLogsSuccess = createAction('GET_LOGS_SUCCESS');
|
||||||
|
@ -473,22 +430,6 @@ export const refreshFilters = () => async (dispatch) => {
|
||||||
|
|
||||||
export const handleRulesChange = createAction('HANDLE_RULES_CHANGE');
|
export const handleRulesChange = createAction('HANDLE_RULES_CHANGE');
|
||||||
|
|
||||||
export const getStatsHistoryRequest = createAction('GET_STATS_HISTORY_REQUEST');
|
|
||||||
export const getStatsHistoryFailure = createAction('GET_STATS_HISTORY_FAILURE');
|
|
||||||
export const getStatsHistorySuccess = createAction('GET_STATS_HISTORY_SUCCESS');
|
|
||||||
|
|
||||||
export const getStatsHistory = () => async (dispatch) => {
|
|
||||||
dispatch(getStatsHistoryRequest());
|
|
||||||
try {
|
|
||||||
const statsHistory = await apiClient.getGlobalStatsHistory();
|
|
||||||
const normalizedHistory = normalizeHistory(statsHistory);
|
|
||||||
dispatch(getStatsHistorySuccess(normalizedHistory));
|
|
||||||
} catch (error) {
|
|
||||||
dispatch(addErrorToast({ error }));
|
|
||||||
dispatch(getStatsHistoryFailure());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const addFilterRequest = createAction('ADD_FILTER_REQUEST');
|
export const addFilterRequest = createAction('ADD_FILTER_REQUEST');
|
||||||
export const addFilterFailure = createAction('ADD_FILTER_FAILURE');
|
export const addFilterFailure = createAction('ADD_FILTER_FAILURE');
|
||||||
export const addFilterSuccess = createAction('ADD_FILTER_SUCCESS');
|
export const addFilterSuccess = createAction('ADD_FILTER_SUCCESS');
|
||||||
|
|
|
@ -0,0 +1,78 @@
|
||||||
|
import { createAction } from 'redux-actions';
|
||||||
|
|
||||||
|
import Api from '../api/Api';
|
||||||
|
import { addErrorToast, addSuccessToast } from './index';
|
||||||
|
import { normalizeTopStats, secondsToMilliseconds } from '../helpers/helpers';
|
||||||
|
|
||||||
|
const apiClient = new Api();
|
||||||
|
|
||||||
|
export const getStatsConfigRequest = createAction('GET_LOGS_CONFIG_REQUEST');
|
||||||
|
export const getStatsConfigFailure = createAction('GET_LOGS_CONFIG_FAILURE');
|
||||||
|
export const getStatsConfigSuccess = createAction('GET_LOGS_CONFIG_SUCCESS');
|
||||||
|
|
||||||
|
export const getStatsConfig = () => async (dispatch) => {
|
||||||
|
dispatch(getStatsConfigRequest());
|
||||||
|
try {
|
||||||
|
const data = await apiClient.getStatsInfo();
|
||||||
|
dispatch(getStatsConfigSuccess(data));
|
||||||
|
} catch (error) {
|
||||||
|
dispatch(addErrorToast({ error }));
|
||||||
|
dispatch(getStatsConfigFailure());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const setStatsConfigRequest = createAction('SET_STATS_CONFIG_REQUEST');
|
||||||
|
export const setStatsConfigFailure = createAction('SET_STATS_CONFIG_FAILURE');
|
||||||
|
export const setStatsConfigSuccess = createAction('SET_STATS_CONFIG_SUCCESS');
|
||||||
|
|
||||||
|
export const setStatsConfig = config => async (dispatch) => {
|
||||||
|
dispatch(setStatsConfigRequest());
|
||||||
|
try {
|
||||||
|
await apiClient.setStatsConfig(config);
|
||||||
|
dispatch(addSuccessToast('config_successfully_saved'));
|
||||||
|
dispatch(setStatsConfigSuccess(config));
|
||||||
|
} catch (error) {
|
||||||
|
dispatch(addErrorToast({ error }));
|
||||||
|
dispatch(setStatsConfigFailure());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getStatsRequest = createAction('GET_STATS_REQUEST');
|
||||||
|
export const getStatsFailure = createAction('GET_STATS_FAILURE');
|
||||||
|
export const getStatsSuccess = createAction('GET_STATS_SUCCESS');
|
||||||
|
|
||||||
|
export const getStats = () => async (dispatch) => {
|
||||||
|
dispatch(getStatsRequest());
|
||||||
|
try {
|
||||||
|
const stats = await apiClient.getStats();
|
||||||
|
|
||||||
|
const normalizedStats = {
|
||||||
|
...stats,
|
||||||
|
top_blocked_domains: normalizeTopStats(stats.top_blocked_domains),
|
||||||
|
top_clients: normalizeTopStats(stats.top_clients),
|
||||||
|
top_queried_domains: normalizeTopStats(stats.top_queried_domains),
|
||||||
|
avg_processing_time: secondsToMilliseconds(stats.avg_processing_time),
|
||||||
|
};
|
||||||
|
|
||||||
|
dispatch(getStatsSuccess(normalizedStats));
|
||||||
|
} catch (error) {
|
||||||
|
dispatch(addErrorToast({ error }));
|
||||||
|
dispatch(getStatsFailure());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const resetStatsRequest = createAction('RESET_STATS_REQUEST');
|
||||||
|
export const resetStatsFailure = createAction('RESET_STATS_FAILURE');
|
||||||
|
export const resetStatsSuccess = createAction('RESET_STATS_SUCCESS');
|
||||||
|
|
||||||
|
export const resetStats = () => async (dispatch) => {
|
||||||
|
dispatch(getStatsRequest());
|
||||||
|
try {
|
||||||
|
await apiClient.resetStats();
|
||||||
|
dispatch(addSuccessToast('statistics_cleared'));
|
||||||
|
dispatch(resetStatsSuccess());
|
||||||
|
} catch (error) {
|
||||||
|
dispatch(addErrorToast({ error }));
|
||||||
|
dispatch(resetStatsFailure());
|
||||||
|
}
|
||||||
|
};
|
|
@ -1,6 +1,4 @@
|
||||||
import axios from 'axios';
|
import axios from 'axios';
|
||||||
import subHours from 'date-fns/sub_hours';
|
|
||||||
import dateFormat from 'date-fns/format';
|
|
||||||
|
|
||||||
export default class Api {
|
export default class Api {
|
||||||
baseUrl = 'control';
|
baseUrl = 'control';
|
||||||
|
@ -24,13 +22,9 @@ export default class Api {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Global methods
|
// Global methods
|
||||||
GLOBAL_RESTART = { path: 'restart', method: 'POST' };
|
|
||||||
GLOBAL_START = { path: 'start', method: 'POST' };
|
GLOBAL_START = { path: 'start', method: 'POST' };
|
||||||
GLOBAL_STATS = { path: 'stats', method: 'GET' };
|
|
||||||
GLOBAL_STATS_HISTORY = { path: 'stats_history', method: 'GET' };
|
|
||||||
GLOBAL_STATUS = { path: 'status', method: 'GET' };
|
GLOBAL_STATUS = { path: 'status', method: 'GET' };
|
||||||
GLOBAL_STOP = { path: 'stop', method: 'POST' };
|
GLOBAL_STOP = { path: 'stop', method: 'POST' };
|
||||||
GLOBAL_STATS_TOP = { path: 'stats_top', method: 'GET' };
|
|
||||||
GLOBAL_QUERY_LOG = { path: 'querylog', method: 'GET' };
|
GLOBAL_QUERY_LOG = { path: 'querylog', method: 'GET' };
|
||||||
GLOBAL_QUERY_LOG_ENABLE = { path: 'querylog_enable', method: 'POST' };
|
GLOBAL_QUERY_LOG_ENABLE = { path: 'querylog_enable', method: 'POST' };
|
||||||
GLOBAL_QUERY_LOG_DISABLE = { path: 'querylog_disable', method: 'POST' };
|
GLOBAL_QUERY_LOG_DISABLE = { path: 'querylog_disable', method: 'POST' };
|
||||||
|
@ -41,11 +35,6 @@ export default class Api {
|
||||||
GLOBAL_DISABLE_PROTECTION = { path: 'disable_protection', method: 'POST' };
|
GLOBAL_DISABLE_PROTECTION = { path: 'disable_protection', method: 'POST' };
|
||||||
GLOBAL_UPDATE = { path: 'update', method: 'POST' };
|
GLOBAL_UPDATE = { path: 'update', method: 'POST' };
|
||||||
|
|
||||||
restartGlobalFiltering() {
|
|
||||||
const { path, method } = this.GLOBAL_RESTART;
|
|
||||||
return this.makeRequest(path, method);
|
|
||||||
}
|
|
||||||
|
|
||||||
startGlobalFiltering() {
|
startGlobalFiltering() {
|
||||||
const { path, method } = this.GLOBAL_START;
|
const { path, method } = this.GLOBAL_START;
|
||||||
return this.makeRequest(path, method);
|
return this.makeRequest(path, method);
|
||||||
|
@ -56,36 +45,11 @@ export default class Api {
|
||||||
return this.makeRequest(path, method);
|
return this.makeRequest(path, method);
|
||||||
}
|
}
|
||||||
|
|
||||||
getGlobalStats() {
|
|
||||||
const { path, method } = this.GLOBAL_STATS;
|
|
||||||
return this.makeRequest(path, method);
|
|
||||||
}
|
|
||||||
|
|
||||||
getGlobalStatsHistory() {
|
|
||||||
const { path, method } = this.GLOBAL_STATS_HISTORY;
|
|
||||||
const format = 'YYYY-MM-DDTHH:mm:ssZ';
|
|
||||||
const dateNow = Date.now();
|
|
||||||
|
|
||||||
const config = {
|
|
||||||
params: {
|
|
||||||
start_time: dateFormat(subHours(dateNow, 24), format),
|
|
||||||
end_time: dateFormat(dateNow, format),
|
|
||||||
time_unit: 'hours',
|
|
||||||
},
|
|
||||||
};
|
|
||||||
return this.makeRequest(path, method, config);
|
|
||||||
}
|
|
||||||
|
|
||||||
getGlobalStatus() {
|
getGlobalStatus() {
|
||||||
const { path, method } = this.GLOBAL_STATUS;
|
const { path, method } = this.GLOBAL_STATUS;
|
||||||
return this.makeRequest(path, method);
|
return this.makeRequest(path, method);
|
||||||
}
|
}
|
||||||
|
|
||||||
getGlobalStatsTop() {
|
|
||||||
const { path, method } = this.GLOBAL_STATS_TOP;
|
|
||||||
return this.makeRequest(path, method);
|
|
||||||
}
|
|
||||||
|
|
||||||
getQueryLog() {
|
getQueryLog() {
|
||||||
const { path, method } = this.GLOBAL_QUERY_LOG;
|
const { path, method } = this.GLOBAL_QUERY_LOG;
|
||||||
return this.makeRequest(path, method);
|
return this.makeRequest(path, method);
|
||||||
|
@ -527,4 +491,34 @@ export default class Api {
|
||||||
};
|
};
|
||||||
return this.makeRequest(path, method, parameters);
|
return this.makeRequest(path, method, parameters);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Settings for statistics
|
||||||
|
GET_STATS = { path: 'stats', method: 'GET' };
|
||||||
|
STATS_INFO = { path: 'stats_info', method: 'GET' };
|
||||||
|
STATS_CONFIG = { path: 'stats_config', method: 'POST' };
|
||||||
|
STATS_RESET = { path: 'stats_reset', method: 'POST' };
|
||||||
|
|
||||||
|
getStats() {
|
||||||
|
const { path, method } = this.GET_STATS;
|
||||||
|
return this.makeRequest(path, method);
|
||||||
|
}
|
||||||
|
|
||||||
|
getStatsInfo() {
|
||||||
|
const { path, method } = this.STATS_INFO;
|
||||||
|
return this.makeRequest(path, method);
|
||||||
|
}
|
||||||
|
|
||||||
|
setStatsConfig(data) {
|
||||||
|
const { path, method } = this.STATS_CONFIG;
|
||||||
|
const config = {
|
||||||
|
data,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
};
|
||||||
|
return this.makeRequest(path, method, config);
|
||||||
|
}
|
||||||
|
|
||||||
|
resetStats() {
|
||||||
|
const { path, method } = this.STATS_RESET;
|
||||||
|
return this.makeRequest(path, method);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,79 +1,76 @@
|
||||||
import React, { Component } from 'react';
|
import React from 'react';
|
||||||
import ReactTable from 'react-table';
|
import ReactTable from 'react-table';
|
||||||
import PropTypes from 'prop-types';
|
import PropTypes from 'prop-types';
|
||||||
import map from 'lodash/map';
|
|
||||||
import { withNamespaces, Trans } from 'react-i18next';
|
import { withNamespaces, Trans } from 'react-i18next';
|
||||||
|
|
||||||
import Card from '../ui/Card';
|
import Card from '../ui/Card';
|
||||||
import Cell from '../ui/Cell';
|
import Cell from '../ui/Cell';
|
||||||
import Popover from '../ui/Popover';
|
import DomainCell from './DomainCell';
|
||||||
|
|
||||||
import { getTrackerData } from '../../helpers/trackers/trackers';
|
|
||||||
import { getPercent } from '../../helpers/helpers';
|
import { getPercent } from '../../helpers/helpers';
|
||||||
import { STATUS_COLORS } from '../../helpers/constants';
|
import { STATUS_COLORS } from '../../helpers/constants';
|
||||||
|
|
||||||
class BlockedDomains extends Component {
|
const CountCell = totalBlocked =>
|
||||||
columns = [{
|
function cell(row) {
|
||||||
Header: 'IP',
|
|
||||||
accessor: 'ip',
|
|
||||||
Cell: (row) => {
|
|
||||||
const { value } = row;
|
const { value } = row;
|
||||||
const trackerData = getTrackerData(value);
|
const percent = getPercent(totalBlocked, value);
|
||||||
|
|
||||||
return (
|
return <Cell value={value} percent={percent} color={STATUS_COLORS.red} />;
|
||||||
<div className="logs__row">
|
};
|
||||||
<div className="logs__text" title={value}>
|
|
||||||
{value}
|
const BlockedDomains = ({
|
||||||
</div>
|
t,
|
||||||
{trackerData && <Popover data={trackerData} />}
|
refreshButton,
|
||||||
</div>
|
topBlockedDomains,
|
||||||
);
|
subtitle,
|
||||||
},
|
|
||||||
}, {
|
|
||||||
Header: <Trans>requests_count</Trans>,
|
|
||||||
accessor: 'domain',
|
|
||||||
maxWidth: 190,
|
|
||||||
Cell: ({ value }) => {
|
|
||||||
const {
|
|
||||||
blockedFiltering,
|
blockedFiltering,
|
||||||
replacedSafebrowsing,
|
replacedSafebrowsing,
|
||||||
replacedParental,
|
replacedParental,
|
||||||
} = this.props;
|
}) => {
|
||||||
const blocked = blockedFiltering + replacedSafebrowsing + replacedParental;
|
const totalBlocked = blockedFiltering + replacedSafebrowsing + replacedParental;
|
||||||
const percent = getPercent(blocked, value);
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Cell value={value} percent={percent} color={STATUS_COLORS.red} />
|
<Card
|
||||||
);
|
title={t('top_blocked_domains')}
|
||||||
},
|
subtitle={subtitle}
|
||||||
}];
|
bodyType="card-table"
|
||||||
|
refresh={refreshButton}
|
||||||
render() {
|
>
|
||||||
const { t } = this.props;
|
|
||||||
return (
|
|
||||||
<Card title={ t('top_blocked_domains') } subtitle={ t('for_last_24_hours') } bodyType="card-table" refresh={this.props.refreshButton}>
|
|
||||||
<ReactTable
|
<ReactTable
|
||||||
data={map(this.props.topBlockedDomains, (value, prop) => (
|
data={topBlockedDomains.map(({ name: domain, count }) => ({
|
||||||
{ ip: prop, domain: value }
|
domain,
|
||||||
))}
|
count,
|
||||||
columns={this.columns}
|
}))}
|
||||||
|
columns={[
|
||||||
|
{
|
||||||
|
Header: <Trans>domain</Trans>,
|
||||||
|
accessor: 'domain',
|
||||||
|
Cell: DomainCell,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Header: <Trans>requests_count</Trans>,
|
||||||
|
accessor: 'count',
|
||||||
|
maxWidth: 190,
|
||||||
|
Cell: CountCell(totalBlocked),
|
||||||
|
},
|
||||||
|
]}
|
||||||
showPagination={false}
|
showPagination={false}
|
||||||
noDataText={ t('no_domains_found') }
|
noDataText={t('no_domains_found')}
|
||||||
minRows={6}
|
minRows={6}
|
||||||
className="-striped -highlight card-table-overflow stats__table"
|
className="-striped -highlight card-table-overflow stats__table"
|
||||||
/>
|
/>
|
||||||
</Card>
|
</Card>
|
||||||
);
|
);
|
||||||
}
|
};
|
||||||
}
|
|
||||||
|
|
||||||
BlockedDomains.propTypes = {
|
BlockedDomains.propTypes = {
|
||||||
topBlockedDomains: PropTypes.object.isRequired,
|
topBlockedDomains: PropTypes.array.isRequired,
|
||||||
blockedFiltering: PropTypes.number.isRequired,
|
blockedFiltering: PropTypes.number.isRequired,
|
||||||
replacedSafebrowsing: PropTypes.number.isRequired,
|
replacedSafebrowsing: PropTypes.number.isRequired,
|
||||||
replacedParental: PropTypes.number.isRequired,
|
replacedParental: PropTypes.number.isRequired,
|
||||||
refreshButton: PropTypes.node.isRequired,
|
refreshButton: PropTypes.node.isRequired,
|
||||||
t: PropTypes.func,
|
subtitle: PropTypes.string.isRequired,
|
||||||
|
t: PropTypes.func.isRequired,
|
||||||
};
|
};
|
||||||
|
|
||||||
export default withNamespaces()(BlockedDomains);
|
export default withNamespaces()(BlockedDomains);
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
import React, { Component } from 'react';
|
import React from 'react';
|
||||||
import ReactTable from 'react-table';
|
import ReactTable from 'react-table';
|
||||||
import PropTypes from 'prop-types';
|
import PropTypes from 'prop-types';
|
||||||
import map from 'lodash/map';
|
|
||||||
import { Trans, withNamespaces } from 'react-i18next';
|
import { Trans, withNamespaces } from 'react-i18next';
|
||||||
|
|
||||||
import Card from '../ui/Card';
|
import Card from '../ui/Card';
|
||||||
|
@ -10,26 +9,27 @@ import Cell from '../ui/Cell';
|
||||||
import { getPercent, getClientName } from '../../helpers/helpers';
|
import { getPercent, getClientName } from '../../helpers/helpers';
|
||||||
import { STATUS_COLORS } from '../../helpers/constants';
|
import { STATUS_COLORS } from '../../helpers/constants';
|
||||||
|
|
||||||
class Clients extends Component {
|
const getClientsPercentColor = (percent) => {
|
||||||
getPercentColor = (percent) => {
|
|
||||||
if (percent > 50) {
|
if (percent > 50) {
|
||||||
return STATUS_COLORS.green;
|
return STATUS_COLORS.green;
|
||||||
} else if (percent > 10) {
|
} else if (percent > 10) {
|
||||||
return STATUS_COLORS.yellow;
|
return STATUS_COLORS.yellow;
|
||||||
}
|
}
|
||||||
return STATUS_COLORS.red;
|
return STATUS_COLORS.red;
|
||||||
}
|
};
|
||||||
|
|
||||||
columns = [{
|
const ipCell = (clients, autoClients) =>
|
||||||
Header: 'IP',
|
function cell(row) {
|
||||||
accessor: 'ip',
|
|
||||||
Cell: ({ value }) => {
|
|
||||||
const clientName = getClientName(this.props.clients, value)
|
|
||||||
|| getClientName(this.props.autoClients, value);
|
|
||||||
let client;
|
let client;
|
||||||
|
const { value } = row;
|
||||||
|
const clientName = getClientName(clients, value) || getClientName(autoClients, value);
|
||||||
|
|
||||||
if (clientName) {
|
if (clientName) {
|
||||||
client = <span>{clientName} <small>({value})</small></span>;
|
client = (
|
||||||
|
<span>
|
||||||
|
{clientName} <small>({value})</small>
|
||||||
|
</span>
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
client = value;
|
client = value;
|
||||||
}
|
}
|
||||||
|
@ -41,47 +41,61 @@ class Clients extends Component {
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const countCell = dnsQueries =>
|
||||||
|
function cell(row) {
|
||||||
|
const { value } = row;
|
||||||
|
const percent = getPercent(dnsQueries, value);
|
||||||
|
const percentColor = getClientsPercentColor(percent);
|
||||||
|
|
||||||
|
return <Cell value={value} percent={percent} color={percentColor} />;
|
||||||
|
};
|
||||||
|
|
||||||
|
const Clients = ({
|
||||||
|
t, refreshButton, topClients, subtitle, clients, autoClients, dnsQueries,
|
||||||
|
}) => (
|
||||||
|
<Card
|
||||||
|
title={t('top_clients')}
|
||||||
|
subtitle={subtitle}
|
||||||
|
bodyType="card-table"
|
||||||
|
refresh={refreshButton}
|
||||||
|
>
|
||||||
|
<ReactTable
|
||||||
|
data={topClients.map(({ name: ip, count }) => ({
|
||||||
|
ip,
|
||||||
|
count,
|
||||||
|
}))}
|
||||||
|
columns={[
|
||||||
|
{
|
||||||
|
Header: 'IP',
|
||||||
|
accessor: 'ip',
|
||||||
|
sortMethod: (a, b) =>
|
||||||
|
parseInt(a.replace(/\./g, ''), 10) - parseInt(b.replace(/\./g, ''), 10),
|
||||||
|
Cell: ipCell(clients, autoClients),
|
||||||
},
|
},
|
||||||
sortMethod: (a, b) => parseInt(a.replace(/\./g, ''), 10) - parseInt(b.replace(/\./g, ''), 10),
|
{
|
||||||
}, {
|
|
||||||
Header: <Trans>requests_count</Trans>,
|
Header: <Trans>requests_count</Trans>,
|
||||||
accessor: 'count',
|
accessor: 'count',
|
||||||
Cell: ({ value }) => {
|
Cell: countCell(dnsQueries),
|
||||||
const percent = getPercent(this.props.dnsQueries, value);
|
|
||||||
const percentColor = this.getPercentColor(percent);
|
|
||||||
|
|
||||||
return (
|
|
||||||
<Cell value={value} percent={percent} color={percentColor} />
|
|
||||||
);
|
|
||||||
},
|
},
|
||||||
}];
|
]}
|
||||||
|
|
||||||
render() {
|
|
||||||
const { t } = this.props;
|
|
||||||
return (
|
|
||||||
<Card title={ t('top_clients') } subtitle={ t('for_last_24_hours') } bodyType="card-table" refresh={this.props.refreshButton}>
|
|
||||||
<ReactTable
|
|
||||||
data={map(this.props.topClients, (value, prop) => (
|
|
||||||
{ ip: prop, count: value }
|
|
||||||
))}
|
|
||||||
columns={this.columns}
|
|
||||||
showPagination={false}
|
showPagination={false}
|
||||||
noDataText={ t('no_clients_found') }
|
noDataText={t('no_clients_found')}
|
||||||
minRows={6}
|
minRows={6}
|
||||||
className="-striped -highlight card-table-overflow"
|
className="-striped -highlight card-table-overflow"
|
||||||
/>
|
/>
|
||||||
</Card>
|
</Card>
|
||||||
);
|
);
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Clients.propTypes = {
|
Clients.propTypes = {
|
||||||
topClients: PropTypes.object.isRequired,
|
topClients: PropTypes.array.isRequired,
|
||||||
dnsQueries: PropTypes.number.isRequired,
|
dnsQueries: PropTypes.number.isRequired,
|
||||||
refreshButton: PropTypes.node.isRequired,
|
refreshButton: PropTypes.node.isRequired,
|
||||||
clients: PropTypes.array.isRequired,
|
clients: PropTypes.array.isRequired,
|
||||||
autoClients: PropTypes.array.isRequired,
|
autoClients: PropTypes.array.isRequired,
|
||||||
t: PropTypes.func,
|
subtitle: PropTypes.string.isRequired,
|
||||||
|
t: PropTypes.func.isRequired,
|
||||||
};
|
};
|
||||||
|
|
||||||
export default withNamespaces()(Clients);
|
export default withNamespaces()(Clients);
|
||||||
|
|
|
@ -1,88 +1,116 @@
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import PropTypes from 'prop-types';
|
import PropTypes from 'prop-types';
|
||||||
import { Trans, withNamespaces } from 'react-i18next';
|
import { Trans, withNamespaces } from 'react-i18next';
|
||||||
|
import round from 'lodash/round';
|
||||||
|
|
||||||
import Card from '../ui/Card';
|
import Card from '../ui/Card';
|
||||||
import Tooltip from '../ui/Tooltip';
|
import Tooltip from '../ui/Tooltip';
|
||||||
|
|
||||||
const tooltipType = 'tooltip-custom--narrow';
|
const tooltipType = 'tooltip-custom--narrow';
|
||||||
|
|
||||||
const Counters = props => (
|
const Counters = (props) => {
|
||||||
<Card title={ props.t('general_statistics') } subtitle={ props.t('for_last_24_hours') } bodyType="card-table" refresh={props.refreshButton}>
|
const {
|
||||||
|
t,
|
||||||
|
interval,
|
||||||
|
refreshButton,
|
||||||
|
subtitle,
|
||||||
|
dnsQueries,
|
||||||
|
blockedFiltering,
|
||||||
|
replacedSafebrowsing,
|
||||||
|
replacedParental,
|
||||||
|
replacedSafesearch,
|
||||||
|
avgProcessingTime,
|
||||||
|
} = props;
|
||||||
|
|
||||||
|
const tooltipTitle =
|
||||||
|
interval === 1
|
||||||
|
? t('number_of_dns_query_24_hours')
|
||||||
|
: t('number_of_dns_query_days', { count: interval });
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Card
|
||||||
|
title={t('general_statistics')}
|
||||||
|
subtitle={subtitle}
|
||||||
|
bodyType="card-table"
|
||||||
|
refresh={refreshButton}
|
||||||
|
>
|
||||||
<table className="table card-table">
|
<table className="table card-table">
|
||||||
<tbody>
|
<tbody>
|
||||||
<tr>
|
<tr>
|
||||||
<td>
|
<td>
|
||||||
<Trans>dns_query</Trans>
|
<Trans>dns_query</Trans>
|
||||||
<Tooltip text={ props.t('number_of_dns_query_24_hours') } type={tooltipType} />
|
<Tooltip text={tooltipTitle} type={tooltipType} />
|
||||||
</td>
|
</td>
|
||||||
<td className="text-right">
|
<td className="text-right">
|
||||||
<span className="text-muted">
|
<span className="text-muted">{dnsQueries}</span>
|
||||||
{props.dnsQueries}
|
|
||||||
</span>
|
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>
|
<td>
|
||||||
<a href="#filters">
|
<Trans components={[<a href="#filters" key="0">link</a>]}>
|
||||||
<Trans>blocked_by</Trans>
|
blocked_by
|
||||||
</a>
|
</Trans>
|
||||||
<Tooltip text={ props.t('number_of_dns_query_blocked_24_hours') } type={tooltipType} />
|
<Tooltip
|
||||||
|
text={t('number_of_dns_query_blocked_24_hours')}
|
||||||
|
type={tooltipType}
|
||||||
|
/>
|
||||||
</td>
|
</td>
|
||||||
<td className="text-right">
|
<td className="text-right">
|
||||||
<span className="text-muted">
|
<span className="text-muted">{blockedFiltering}</span>
|
||||||
{props.blockedFiltering}
|
|
||||||
</span>
|
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>
|
<td>
|
||||||
<Trans>stats_malware_phishing</Trans>
|
<Trans>stats_malware_phishing</Trans>
|
||||||
<Tooltip text={ props.t('number_of_dns_query_blocked_24_hours_by_sec') } type={tooltipType} />
|
<Tooltip
|
||||||
|
text={t('number_of_dns_query_blocked_24_hours_by_sec')}
|
||||||
|
type={tooltipType}
|
||||||
|
/>
|
||||||
</td>
|
</td>
|
||||||
<td className="text-right">
|
<td className="text-right">
|
||||||
<span className="text-muted">
|
<span className="text-muted">{replacedSafebrowsing}</span>
|
||||||
{props.replacedSafebrowsing}
|
|
||||||
</span>
|
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>
|
<td>
|
||||||
<Trans>stats_adult</Trans>
|
<Trans>stats_adult</Trans>
|
||||||
<Tooltip text={ props.t('number_of_dns_query_blocked_24_hours_adult') } type={tooltipType} />
|
<Tooltip
|
||||||
|
text={t('number_of_dns_query_blocked_24_hours_adult')}
|
||||||
|
type={tooltipType}
|
||||||
|
/>
|
||||||
</td>
|
</td>
|
||||||
<td className="text-right">
|
<td className="text-right">
|
||||||
<span className="text-muted">
|
<span className="text-muted">{replacedParental}</span>
|
||||||
{props.replacedParental}
|
|
||||||
</span>
|
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>
|
<td>
|
||||||
<Trans>enforced_save_search</Trans>
|
<Trans>enforced_save_search</Trans>
|
||||||
<Tooltip text={ props.t('number_of_dns_query_to_safe_search') } type={tooltipType} />
|
<Tooltip
|
||||||
|
text={t('number_of_dns_query_to_safe_search')}
|
||||||
|
type={tooltipType}
|
||||||
|
/>
|
||||||
</td>
|
</td>
|
||||||
<td className="text-right">
|
<td className="text-right">
|
||||||
<span className="text-muted">
|
<span className="text-muted">{replacedSafesearch}</span>
|
||||||
{props.replacedSafesearch}
|
|
||||||
</span>
|
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>
|
<td>
|
||||||
<Trans>average_processing_time</Trans>
|
<Trans>average_processing_time</Trans>
|
||||||
<Tooltip text={ props.t('average_processing_time_hint') } type={tooltipType} />
|
<Tooltip text={t('average_processing_time_hint')} type={tooltipType} />
|
||||||
</td>
|
</td>
|
||||||
<td className="text-right">
|
<td className="text-right">
|
||||||
<span className="text-muted">
|
<span className="text-muted">
|
||||||
{props.avgProcessingTime}
|
{avgProcessingTime ? `${round(avgProcessingTime)} ms` : 0}
|
||||||
</span>
|
</span>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
</Card>
|
</Card>
|
||||||
);
|
);
|
||||||
|
};
|
||||||
|
|
||||||
Counters.propTypes = {
|
Counters.propTypes = {
|
||||||
dnsQueries: PropTypes.number.isRequired,
|
dnsQueries: PropTypes.number.isRequired,
|
||||||
|
@ -92,6 +120,8 @@ Counters.propTypes = {
|
||||||
replacedSafesearch: PropTypes.number.isRequired,
|
replacedSafesearch: PropTypes.number.isRequired,
|
||||||
avgProcessingTime: PropTypes.number.isRequired,
|
avgProcessingTime: PropTypes.number.isRequired,
|
||||||
refreshButton: PropTypes.node.isRequired,
|
refreshButton: PropTypes.node.isRequired,
|
||||||
|
subtitle: PropTypes.string.isRequired,
|
||||||
|
interval: PropTypes.number.isRequired,
|
||||||
t: PropTypes.func.isRequired,
|
t: PropTypes.func.isRequired,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -20,3 +20,8 @@
|
||||||
border-top: 6px solid transparent;
|
border-top: 6px solid transparent;
|
||||||
border-bottom: 6px solid #585965;
|
border-bottom: 6px solid #585965;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.card-chart-bg {
|
||||||
|
left: -20px;
|
||||||
|
width: calc(100% + 20px);
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,24 @@
|
||||||
|
import React from 'react';
|
||||||
|
import PropTypes from 'prop-types';
|
||||||
|
|
||||||
|
import { getTrackerData } from '../../helpers/trackers/trackers';
|
||||||
|
import Popover from '../ui/Popover';
|
||||||
|
|
||||||
|
const DomainCell = ({ value }) => {
|
||||||
|
const trackerData = getTrackerData(value);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="logs__row">
|
||||||
|
<div className="logs__text" title={value}>
|
||||||
|
{value}
|
||||||
|
</div>
|
||||||
|
{trackerData && <Popover data={trackerData} />}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
DomainCell.propTypes = {
|
||||||
|
value: PropTypes.string.isRequired,
|
||||||
|
};
|
||||||
|
|
||||||
|
export default DomainCell;
|
|
@ -1,81 +1,74 @@
|
||||||
import React, { Component } from 'react';
|
import React from 'react';
|
||||||
import ReactTable from 'react-table';
|
import ReactTable from 'react-table';
|
||||||
import PropTypes from 'prop-types';
|
import PropTypes from 'prop-types';
|
||||||
import map from 'lodash/map';
|
|
||||||
import { withNamespaces, Trans } from 'react-i18next';
|
import { withNamespaces, Trans } from 'react-i18next';
|
||||||
|
|
||||||
import Card from '../ui/Card';
|
import Card from '../ui/Card';
|
||||||
import Cell from '../ui/Cell';
|
import Cell from '../ui/Cell';
|
||||||
import Popover from '../ui/Popover';
|
import DomainCell from './DomainCell';
|
||||||
|
|
||||||
import { getTrackerData } from '../../helpers/trackers/trackers';
|
|
||||||
import { getPercent } from '../../helpers/helpers';
|
|
||||||
import { STATUS_COLORS } from '../../helpers/constants';
|
import { STATUS_COLORS } from '../../helpers/constants';
|
||||||
|
import { getPercent } from '../../helpers/helpers';
|
||||||
|
|
||||||
class QueriedDomains extends Component {
|
const getQueriedPercentColor = (percent) => {
|
||||||
getPercentColor = (percent) => {
|
|
||||||
if (percent > 10) {
|
if (percent > 10) {
|
||||||
return STATUS_COLORS.red;
|
return STATUS_COLORS.red;
|
||||||
} else if (percent > 5) {
|
} else if (percent > 5) {
|
||||||
return STATUS_COLORS.yellow;
|
return STATUS_COLORS.yellow;
|
||||||
}
|
}
|
||||||
return STATUS_COLORS.green;
|
return STATUS_COLORS.green;
|
||||||
}
|
};
|
||||||
|
|
||||||
columns = [{
|
const countCell = dnsQueries =>
|
||||||
Header: 'IP',
|
function cell(row) {
|
||||||
accessor: 'ip',
|
|
||||||
Cell: (row) => {
|
|
||||||
const { value } = row;
|
const { value } = row;
|
||||||
const trackerData = getTrackerData(value);
|
const percent = getPercent(dnsQueries, value);
|
||||||
|
const percentColor = getQueriedPercentColor(percent);
|
||||||
|
|
||||||
return (
|
return <Cell value={value} percent={percent} color={percentColor} />;
|
||||||
<div className="logs__row">
|
};
|
||||||
<div className="logs__text" title={value}>
|
|
||||||
{value}
|
const QueriedDomains = ({
|
||||||
</div>
|
t, refreshButton, topQueriedDomains, subtitle, dnsQueries,
|
||||||
{trackerData && <Popover data={trackerData} />}
|
}) => (
|
||||||
</div>
|
<Card
|
||||||
);
|
title={t('stats_query_domain')}
|
||||||
|
subtitle={subtitle}
|
||||||
|
bodyType="card-table"
|
||||||
|
refresh={refreshButton}
|
||||||
|
>
|
||||||
|
<ReactTable
|
||||||
|
data={topQueriedDomains.map(({ name: domain, count }) => ({
|
||||||
|
domain,
|
||||||
|
count,
|
||||||
|
}))}
|
||||||
|
columns={[
|
||||||
|
{
|
||||||
|
Header: <Trans>domain</Trans>,
|
||||||
|
accessor: 'domain',
|
||||||
|
Cell: DomainCell,
|
||||||
},
|
},
|
||||||
}, {
|
{
|
||||||
Header: <Trans>requests_count</Trans>,
|
Header: <Trans>requests_count</Trans>,
|
||||||
accessor: 'count',
|
accessor: 'count',
|
||||||
maxWidth: 190,
|
maxWidth: 190,
|
||||||
Cell: ({ value }) => {
|
Cell: countCell(dnsQueries),
|
||||||
const percent = getPercent(this.props.dnsQueries, value);
|
|
||||||
const percentColor = this.getPercentColor(percent);
|
|
||||||
|
|
||||||
return (
|
|
||||||
<Cell value={value} percent={percent} color={percentColor} />
|
|
||||||
);
|
|
||||||
},
|
},
|
||||||
}];
|
]}
|
||||||
|
|
||||||
render() {
|
|
||||||
const { t } = this.props;
|
|
||||||
return (
|
|
||||||
<Card title={ t('stats_query_domain') } subtitle={ t('for_last_24_hours') } bodyType="card-table" refresh={this.props.refreshButton}>
|
|
||||||
<ReactTable
|
|
||||||
data={map(this.props.topQueriedDomains, (value, prop) => (
|
|
||||||
{ ip: prop, count: value }
|
|
||||||
))}
|
|
||||||
columns={this.columns}
|
|
||||||
showPagination={false}
|
showPagination={false}
|
||||||
noDataText={ t('no_domains_found') }
|
noDataText={t('no_domains_found')}
|
||||||
minRows={6}
|
minRows={6}
|
||||||
className="-striped -highlight card-table-overflow stats__table"
|
className="-striped -highlight card-table-overflow stats__table"
|
||||||
/>
|
/>
|
||||||
</Card>
|
</Card>
|
||||||
);
|
);
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
QueriedDomains.propTypes = {
|
QueriedDomains.propTypes = {
|
||||||
topQueriedDomains: PropTypes.object.isRequired,
|
topQueriedDomains: PropTypes.array.isRequired,
|
||||||
dnsQueries: PropTypes.number.isRequired,
|
dnsQueries: PropTypes.number.isRequired,
|
||||||
refreshButton: PropTypes.node.isRequired,
|
refreshButton: PropTypes.node.isRequired,
|
||||||
t: PropTypes.func,
|
subtitle: PropTypes.string.isRequired,
|
||||||
|
t: PropTypes.func.isRequired,
|
||||||
};
|
};
|
||||||
|
|
||||||
export default withNamespaces()(QueriedDomains);
|
export default withNamespaces()(QueriedDomains);
|
||||||
|
|
|
@ -1,111 +1,78 @@
|
||||||
import React, { Component } from 'react';
|
import React from 'react';
|
||||||
import PropTypes from 'prop-types';
|
import PropTypes from 'prop-types';
|
||||||
import { Trans, withNamespaces } from 'react-i18next';
|
import { withNamespaces, Trans } from 'react-i18next';
|
||||||
|
|
||||||
import Card from '../ui/Card';
|
import StatsCard from './StatsCard';
|
||||||
import Line from '../ui/Line';
|
import { getPercent, normalizeHistory } from '../../helpers/helpers';
|
||||||
|
|
||||||
import { getPercent } from '../../helpers/helpers';
|
const getNormalizedHistory = (data, interval, id) => [
|
||||||
import { STATUS_COLORS } from '../../helpers/constants';
|
{ data: normalizeHistory(data, interval), id },
|
||||||
|
];
|
||||||
|
|
||||||
class Statistics extends Component {
|
const Statistics = ({
|
||||||
render() {
|
interval,
|
||||||
const {
|
|
||||||
dnsQueries,
|
dnsQueries,
|
||||||
blockedFiltering,
|
blockedFiltering,
|
||||||
replacedSafebrowsing,
|
replacedSafebrowsing,
|
||||||
replacedParental,
|
replacedParental,
|
||||||
} = this.props;
|
numDnsQueries,
|
||||||
|
numBlockedFiltering,
|
||||||
const filteringData = [this.props.history[1]];
|
numReplacedSafebrowsing,
|
||||||
const queriesData = [this.props.history[2]];
|
numReplacedParental,
|
||||||
const parentalData = [this.props.history[3]];
|
}) => (
|
||||||
const safebrowsingData = [this.props.history[4]];
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="row">
|
<div className="row">
|
||||||
<div className="col-sm-6 col-lg-3">
|
<div className="col-sm-6 col-lg-3">
|
||||||
<Card type="card--full" bodyType="card-wrap">
|
<StatsCard
|
||||||
<div className="card-body-stats">
|
total={numDnsQueries}
|
||||||
<div className="card-value card-value-stats text-blue">
|
lineData={getNormalizedHistory(dnsQueries, interval, 'dnsQuery')}
|
||||||
{dnsQueries}
|
title={<Trans>dns_query</Trans>}
|
||||||
</div>
|
color="blue"
|
||||||
<div className="card-title-stats">
|
/>
|
||||||
<Trans>dns_query</Trans>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div className="card-chart-bg">
|
|
||||||
<Line data={queriesData} color={STATUS_COLORS.blue}/>
|
|
||||||
</div>
|
|
||||||
</Card>
|
|
||||||
</div>
|
</div>
|
||||||
<div className="col-sm-6 col-lg-3">
|
<div className="col-sm-6 col-lg-3">
|
||||||
<Card type="card--full" bodyType="card-wrap">
|
<StatsCard
|
||||||
<div className="card-body-stats">
|
total={numBlockedFiltering}
|
||||||
<div className="card-value card-value-stats text-red">
|
lineData={getNormalizedHistory(blockedFiltering, interval, 'blockedFiltering')}
|
||||||
{blockedFiltering}
|
percent={getPercent(numDnsQueries, numBlockedFiltering)}
|
||||||
</div>
|
title={<Trans components={[<a href="#filters" key="0">link</a>]}>blocked_by</Trans>}
|
||||||
<div className="card-value card-value-percent text-red">
|
color="red"
|
||||||
{getPercent(dnsQueries, blockedFiltering)}
|
/>
|
||||||
</div>
|
|
||||||
<div className="card-title-stats">
|
|
||||||
<a href="#filters">
|
|
||||||
<Trans>blocked_by</Trans>
|
|
||||||
</a>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div className="card-chart-bg">
|
|
||||||
<Line data={filteringData} color={STATUS_COLORS.red}/>
|
|
||||||
</div>
|
|
||||||
</Card>
|
|
||||||
</div>
|
</div>
|
||||||
<div className="col-sm-6 col-lg-3">
|
<div className="col-sm-6 col-lg-3">
|
||||||
<Card type="card--full" bodyType="card-wrap">
|
<StatsCard
|
||||||
<div className="card-body-stats">
|
total={numReplacedSafebrowsing}
|
||||||
<div className="card-value card-value-stats text-green">
|
lineData={getNormalizedHistory(
|
||||||
{replacedSafebrowsing}
|
replacedSafebrowsing,
|
||||||
</div>
|
interval,
|
||||||
<div className="card-value card-value-percent text-green">
|
'replacedSafebrowsing',
|
||||||
{getPercent(dnsQueries, replacedSafebrowsing)}
|
)}
|
||||||
</div>
|
percent={getPercent(numDnsQueries, numReplacedSafebrowsing)}
|
||||||
<div className="card-title-stats">
|
title={<Trans>stats_malware_phishing</Trans>}
|
||||||
<Trans>stats_malware_phishing</Trans>
|
color="green"
|
||||||
</div>
|
/>
|
||||||
</div>
|
|
||||||
<div className="card-chart-bg">
|
|
||||||
<Line data={safebrowsingData} color={STATUS_COLORS.green}/>
|
|
||||||
</div>
|
|
||||||
</Card>
|
|
||||||
</div>
|
</div>
|
||||||
<div className="col-sm-6 col-lg-3">
|
<div className="col-sm-6 col-lg-3">
|
||||||
<Card type="card--full" bodyType="card-wrap">
|
<StatsCard
|
||||||
<div className="card-body-stats">
|
total={numReplacedParental}
|
||||||
<div className="card-value card-value-stats text-yellow">
|
lineData={getNormalizedHistory(replacedParental, interval, 'replacedParental')}
|
||||||
{replacedParental}
|
percent={getPercent(numDnsQueries, numReplacedParental)}
|
||||||
</div>
|
title={<Trans>stats_adult</Trans>}
|
||||||
<div className="card-value card-value-percent text-yellow">
|
color="yellow"
|
||||||
{getPercent(dnsQueries, replacedParental)}
|
/>
|
||||||
</div>
|
|
||||||
<div className="card-title-stats">
|
|
||||||
<Trans>stats_adult</Trans>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div className="card-chart-bg">
|
);
|
||||||
<Line data={parentalData} color={STATUS_COLORS.yellow}/>
|
|
||||||
</div>
|
|
||||||
</Card>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Statistics.propTypes = {
|
Statistics.propTypes = {
|
||||||
history: PropTypes.array.isRequired,
|
interval: PropTypes.number.isRequired,
|
||||||
dnsQueries: PropTypes.number.isRequired,
|
dnsQueries: PropTypes.array.isRequired,
|
||||||
blockedFiltering: PropTypes.number.isRequired,
|
blockedFiltering: PropTypes.array.isRequired,
|
||||||
replacedSafebrowsing: PropTypes.number.isRequired,
|
replacedSafebrowsing: PropTypes.array.isRequired,
|
||||||
replacedParental: PropTypes.number.isRequired,
|
replacedParental: PropTypes.array.isRequired,
|
||||||
|
numDnsQueries: PropTypes.number.isRequired,
|
||||||
|
numBlockedFiltering: PropTypes.number.isRequired,
|
||||||
|
numReplacedSafebrowsing: PropTypes.number.isRequired,
|
||||||
|
numReplacedParental: PropTypes.number.isRequired,
|
||||||
refreshButton: PropTypes.node.isRequired,
|
refreshButton: PropTypes.node.isRequired,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
import React from 'react';
|
||||||
|
import PropTypes from 'prop-types';
|
||||||
|
|
||||||
|
import { STATUS_COLORS } from '../../helpers/constants';
|
||||||
|
import Card from '../ui/Card';
|
||||||
|
import Line from '../ui/Line';
|
||||||
|
|
||||||
|
const StatsCard = ({
|
||||||
|
total, lineData, percent, title, color,
|
||||||
|
}) => (
|
||||||
|
<Card type="card--full" bodyType="card-wrap">
|
||||||
|
<div className="card-body-stats">
|
||||||
|
<div className={`card-value card-value-stats text-${color}`}>{total}</div>
|
||||||
|
<div className="card-title-stats">{title}</div>
|
||||||
|
</div>
|
||||||
|
{percent >= 0 && (<div className={`card-value card-value-percent text-${color}`}>{percent}</div>)}
|
||||||
|
<div className="card-chart-bg">
|
||||||
|
<Line data={lineData} color={STATUS_COLORS[color]} />
|
||||||
|
</div>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
|
||||||
|
StatsCard.propTypes = {
|
||||||
|
total: PropTypes.number.isRequired,
|
||||||
|
lineData: PropTypes.array.isRequired,
|
||||||
|
title: PropTypes.object.isRequired,
|
||||||
|
color: PropTypes.string.isRequired,
|
||||||
|
percent: PropTypes.number,
|
||||||
|
};
|
||||||
|
|
||||||
|
export default StatsCard;
|
|
@ -19,10 +19,9 @@ class Dashboard extends Component {
|
||||||
|
|
||||||
getAllStats = () => {
|
getAllStats = () => {
|
||||||
this.props.getStats();
|
this.props.getStats();
|
||||||
this.props.getStatsHistory();
|
this.props.getStatsConfig();
|
||||||
this.props.getTopStats();
|
|
||||||
this.props.getClients();
|
this.props.getClients();
|
||||||
}
|
};
|
||||||
|
|
||||||
getToggleFilteringButton = () => {
|
getToggleFilteringButton = () => {
|
||||||
const { protectionEnabled, processingProtection } = this.props.dashboard;
|
const { protectionEnabled, processingProtection } = this.props.dashboard;
|
||||||
|
@ -39,16 +38,20 @@ class Dashboard extends Component {
|
||||||
<Trans>{buttonText}</Trans>
|
<Trans>{buttonText}</Trans>
|
||||||
</button>
|
</button>
|
||||||
);
|
);
|
||||||
}
|
};
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
const { dashboard, t } = this.props;
|
const { dashboard, stats, t } = this.props;
|
||||||
const dashboardProcessing =
|
const dashboardProcessing =
|
||||||
dashboard.processing ||
|
dashboard.processing ||
|
||||||
dashboard.processingStats ||
|
|
||||||
dashboard.processingStatsHistory ||
|
|
||||||
dashboard.processingClients ||
|
dashboard.processingClients ||
|
||||||
dashboard.processingTopStats;
|
stats.processingStats ||
|
||||||
|
stats.processingGetConfig;
|
||||||
|
|
||||||
|
const subtitle =
|
||||||
|
stats.interval === 1
|
||||||
|
? t('for_last_24_hours')
|
||||||
|
: t('for_last_days', { count: stats.interval });
|
||||||
|
|
||||||
const refreshFullButton = (
|
const refreshFullButton = (
|
||||||
<button
|
<button
|
||||||
|
@ -59,6 +62,7 @@ class Dashboard extends Component {
|
||||||
<Trans>refresh_statics</Trans>
|
<Trans>refresh_statics</Trans>
|
||||||
</button>
|
</button>
|
||||||
);
|
);
|
||||||
|
|
||||||
const refreshButton = (
|
const refreshButton = (
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
|
@ -73,87 +77,85 @@ class Dashboard extends Component {
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Fragment>
|
<Fragment>
|
||||||
<PageTitle title={ t('dashboard') }>
|
<PageTitle title={t('dashboard')}>
|
||||||
<div className="page-title__actions">
|
<div className="page-title__actions">
|
||||||
{this.getToggleFilteringButton()}
|
{this.getToggleFilteringButton()}
|
||||||
{refreshFullButton}
|
{refreshFullButton}
|
||||||
</div>
|
</div>
|
||||||
</PageTitle>
|
</PageTitle>
|
||||||
{dashboardProcessing && <Loading />}
|
{dashboardProcessing && <Loading />}
|
||||||
{!dashboardProcessing &&
|
{!dashboardProcessing && (
|
||||||
<div className="row row-cards">
|
<div className="row row-cards">
|
||||||
{dashboard.statsHistory &&
|
|
||||||
<div className="col-lg-12">
|
<div className="col-lg-12">
|
||||||
<Statistics
|
<Statistics
|
||||||
history={dashboard.statsHistory}
|
interval={stats.interval}
|
||||||
|
dnsQueries={stats.dnsQueries}
|
||||||
|
blockedFiltering={stats.blockedFiltering}
|
||||||
|
replacedSafebrowsing={stats.replacedSafebrowsing}
|
||||||
|
replacedParental={stats.replacedParental}
|
||||||
|
numDnsQueries={stats.numDnsQueries}
|
||||||
|
numBlockedFiltering={stats.numBlockedFiltering}
|
||||||
|
numReplacedSafebrowsing={stats.numReplacedSafebrowsing}
|
||||||
|
numReplacedParental={stats.numReplacedParental}
|
||||||
refreshButton={refreshButton}
|
refreshButton={refreshButton}
|
||||||
dnsQueries={dashboard.stats.dns_queries}
|
|
||||||
blockedFiltering={dashboard.stats.blocked_filtering}
|
|
||||||
replacedSafebrowsing={dashboard.stats.replaced_safebrowsing}
|
|
||||||
replacedParental={dashboard.stats.replaced_parental}
|
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
}
|
|
||||||
<div className="col-lg-6">
|
<div className="col-lg-6">
|
||||||
{dashboard.stats &&
|
|
||||||
<Counters
|
<Counters
|
||||||
|
subtitle={subtitle}
|
||||||
|
interval={stats.interval}
|
||||||
|
dnsQueries={stats.numDnsQueries}
|
||||||
|
blockedFiltering={stats.numBlockedFiltering}
|
||||||
|
replacedSafebrowsing={stats.numReplacedSafebrowsing}
|
||||||
|
replacedParental={stats.numReplacedParental}
|
||||||
|
replacedSafesearch={stats.numReplacedSafesearch}
|
||||||
|
avgProcessingTime={stats.avgProcessingTime}
|
||||||
refreshButton={refreshButton}
|
refreshButton={refreshButton}
|
||||||
dnsQueries={dashboard.stats.dns_queries}
|
|
||||||
blockedFiltering={dashboard.stats.blocked_filtering}
|
|
||||||
replacedSafebrowsing={dashboard.stats.replaced_safebrowsing}
|
|
||||||
replacedParental={dashboard.stats.replaced_parental}
|
|
||||||
replacedSafesearch={dashboard.stats.replaced_safesearch}
|
|
||||||
avgProcessingTime={dashboard.stats.avg_processing_time}
|
|
||||||
/>
|
/>
|
||||||
}
|
|
||||||
</div>
|
</div>
|
||||||
{dashboard.topStats &&
|
|
||||||
<Fragment>
|
|
||||||
<div className="col-lg-6">
|
<div className="col-lg-6">
|
||||||
<Clients
|
<Clients
|
||||||
dnsQueries={dashboard.stats.dns_queries}
|
subtitle={subtitle}
|
||||||
refreshButton={refreshButton}
|
dnsQueries={stats.numDnsQueries}
|
||||||
topClients={dashboard.topStats.top_clients}
|
topClients={stats.topClients}
|
||||||
clients={dashboard.clients}
|
clients={dashboard.clients}
|
||||||
autoClients={dashboard.autoClients}
|
autoClients={dashboard.autoClients}
|
||||||
|
refreshButton={refreshButton}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<div className="col-lg-6">
|
<div className="col-lg-6">
|
||||||
<QueriedDomains
|
<QueriedDomains
|
||||||
dnsQueries={dashboard.stats.dns_queries}
|
subtitle={subtitle}
|
||||||
|
dnsQueries={stats.numDnsQueries}
|
||||||
|
topQueriedDomains={stats.topQueriedDomains}
|
||||||
refreshButton={refreshButton}
|
refreshButton={refreshButton}
|
||||||
topQueriedDomains={dashboard.topStats.top_queried_domains}
|
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<div className="col-lg-6">
|
<div className="col-lg-6">
|
||||||
<BlockedDomains
|
<BlockedDomains
|
||||||
|
subtitle={subtitle}
|
||||||
|
topBlockedDomains={stats.topBlockedDomains}
|
||||||
|
blockedFiltering={stats.numBlockedFiltering}
|
||||||
|
replacedSafebrowsing={stats.numReplacedSafebrowsing}
|
||||||
|
replacedParental={stats.numReplacedParental}
|
||||||
refreshButton={refreshButton}
|
refreshButton={refreshButton}
|
||||||
topBlockedDomains={dashboard.topStats.top_blocked_domains}
|
|
||||||
blockedFiltering={dashboard.stats.blocked_filtering}
|
|
||||||
replacedSafebrowsing={dashboard.stats.replaced_safebrowsing}
|
|
||||||
replacedParental={dashboard.stats.replaced_parental}
|
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</Fragment>
|
|
||||||
}
|
|
||||||
</div>
|
</div>
|
||||||
}
|
)}
|
||||||
</Fragment>
|
</Fragment>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Dashboard.propTypes = {
|
Dashboard.propTypes = {
|
||||||
getStats: PropTypes.func,
|
dashboard: PropTypes.object.isRequired,
|
||||||
getStatsHistory: PropTypes.func,
|
stats: PropTypes.object.isRequired,
|
||||||
getTopStats: PropTypes.func,
|
getStats: PropTypes.func.isRequired,
|
||||||
dashboard: PropTypes.object,
|
getStatsConfig: PropTypes.func.isRequired,
|
||||||
isCoreRunning: PropTypes.bool,
|
toggleProtection: PropTypes.func.isRequired,
|
||||||
getFiltering: PropTypes.func,
|
getClients: PropTypes.func.isRequired,
|
||||||
toggleProtection: PropTypes.func,
|
t: PropTypes.func.isRequired,
|
||||||
getClients: PropTypes.func,
|
|
||||||
processingProtection: PropTypes.bool,
|
|
||||||
t: PropTypes.func,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export default withNamespaces()(Dashboard);
|
export default withNamespaces()(Dashboard);
|
||||||
|
|
|
@ -80,7 +80,7 @@ class Modal extends Component {
|
||||||
}
|
}
|
||||||
return (
|
return (
|
||||||
<div className="description">
|
<div className="description">
|
||||||
<Trans>url_added_successfully</Trans>
|
<Trans>filter_added_successfully</Trans>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
|
@ -3,32 +3,13 @@ import PropTypes from 'prop-types';
|
||||||
import { withNamespaces } from 'react-i18next';
|
import { withNamespaces } from 'react-i18next';
|
||||||
import ReactTable from 'react-table';
|
import ReactTable from 'react-table';
|
||||||
|
|
||||||
import { CLIENT_ID } from '../../../helpers/constants';
|
|
||||||
import Card from '../../ui/Card';
|
import Card from '../../ui/Card';
|
||||||
|
|
||||||
class AutoClients extends Component {
|
class AutoClients extends Component {
|
||||||
getClient = (name, clients) => {
|
|
||||||
const client = clients.find(item => name === item.name);
|
|
||||||
|
|
||||||
if (client) {
|
|
||||||
const identifier = client.mac ? CLIENT_ID.MAC : CLIENT_ID.IP;
|
|
||||||
|
|
||||||
return {
|
|
||||||
identifier,
|
|
||||||
use_global_settings: true,
|
|
||||||
...client,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
identifier: 'ip',
|
|
||||||
use_global_settings: true,
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
getStats = (ip, stats) => {
|
getStats = (ip, stats) => {
|
||||||
if (stats && stats.top_clients) {
|
if (stats) {
|
||||||
return stats.top_clients[ip];
|
const statsForCurrentIP = stats.find(item => item.name === ip);
|
||||||
|
return statsForCurrentIP && statsForCurrentIP.count;
|
||||||
}
|
}
|
||||||
|
|
||||||
return '';
|
return '';
|
||||||
|
@ -59,11 +40,11 @@ class AutoClients extends Component {
|
||||||
Cell: this.cellWrap,
|
Cell: this.cellWrap,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Header: this.props.t('table_statistics'),
|
Header: this.props.t('requests_count'),
|
||||||
accessor: 'statistics',
|
accessor: 'statistics',
|
||||||
Cell: (row) => {
|
Cell: (row) => {
|
||||||
const clientIP = row.original.ip;
|
const clientIP = row.original.ip;
|
||||||
const clientStats = clientIP && this.getStats(clientIP, this.props.topStats);
|
const clientStats = clientIP && this.getStats(clientIP, this.props.topClients);
|
||||||
|
|
||||||
if (clientStats) {
|
if (clientStats) {
|
||||||
return (
|
return (
|
||||||
|
@ -112,7 +93,7 @@ class AutoClients extends Component {
|
||||||
AutoClients.propTypes = {
|
AutoClients.propTypes = {
|
||||||
t: PropTypes.func.isRequired,
|
t: PropTypes.func.isRequired,
|
||||||
autoClients: PropTypes.array.isRequired,
|
autoClients: PropTypes.array.isRequired,
|
||||||
topStats: PropTypes.object.isRequired,
|
topClients: PropTypes.array.isRequired,
|
||||||
};
|
};
|
||||||
|
|
||||||
export default withNamespaces()(AutoClients);
|
export default withNamespaces()(AutoClients);
|
||||||
|
|
|
@ -63,8 +63,9 @@ class ClientsTable extends Component {
|
||||||
};
|
};
|
||||||
|
|
||||||
getStats = (ip, stats) => {
|
getStats = (ip, stats) => {
|
||||||
if (stats && stats.top_clients) {
|
if (stats) {
|
||||||
return stats.top_clients[ip];
|
const statsForCurrentIP = stats.find(item => item.name === ip);
|
||||||
|
return statsForCurrentIP && statsForCurrentIP.count;
|
||||||
}
|
}
|
||||||
|
|
||||||
return '';
|
return '';
|
||||||
|
@ -149,11 +150,11 @@ class ClientsTable extends Component {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Header: this.props.t('table_statistics'),
|
Header: this.props.t('requests_count'),
|
||||||
accessor: 'statistics',
|
accessor: 'statistics',
|
||||||
Cell: (row) => {
|
Cell: (row) => {
|
||||||
const clientIP = row.original.ip;
|
const clientIP = row.original.ip;
|
||||||
const clientStats = clientIP && this.getStats(clientIP, this.props.topStats);
|
const clientStats = clientIP && this.getStats(clientIP, this.props.topClients);
|
||||||
|
|
||||||
if (clientStats) {
|
if (clientStats) {
|
||||||
return (
|
return (
|
||||||
|
@ -276,7 +277,7 @@ class ClientsTable extends Component {
|
||||||
ClientsTable.propTypes = {
|
ClientsTable.propTypes = {
|
||||||
t: PropTypes.func.isRequired,
|
t: PropTypes.func.isRequired,
|
||||||
clients: PropTypes.array.isRequired,
|
clients: PropTypes.array.isRequired,
|
||||||
topStats: PropTypes.object.isRequired,
|
topClients: PropTypes.array.isRequired,
|
||||||
toggleClientModal: PropTypes.func.isRequired,
|
toggleClientModal: PropTypes.func.isRequired,
|
||||||
deleteClient: PropTypes.func.isRequired,
|
deleteClient: PropTypes.func.isRequired,
|
||||||
addClient: PropTypes.func.isRequired,
|
addClient: PropTypes.func.isRequired,
|
||||||
|
|
|
@ -10,13 +10,14 @@ import Loading from '../../ui/Loading';
|
||||||
class Clients extends Component {
|
class Clients extends Component {
|
||||||
componentDidMount() {
|
componentDidMount() {
|
||||||
this.props.getClients();
|
this.props.getClients();
|
||||||
this.props.getTopStats();
|
this.props.getStats();
|
||||||
}
|
}
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
const {
|
const {
|
||||||
t,
|
t,
|
||||||
dashboard,
|
dashboard,
|
||||||
|
stats,
|
||||||
clients,
|
clients,
|
||||||
addClient,
|
addClient,
|
||||||
updateClient,
|
updateClient,
|
||||||
|
@ -27,12 +28,12 @@ class Clients extends Component {
|
||||||
return (
|
return (
|
||||||
<Fragment>
|
<Fragment>
|
||||||
<PageTitle title={t('client_settings')} />
|
<PageTitle title={t('client_settings')} />
|
||||||
{(dashboard.processingTopStats || dashboard.processingClients) && <Loading />}
|
{(stats.processingStats || dashboard.processingClients) && <Loading />}
|
||||||
{!dashboard.processingTopStats && !dashboard.processingClients && (
|
{!stats.processingStats && !dashboard.processingClients && (
|
||||||
<Fragment>
|
<Fragment>
|
||||||
<ClientsTable
|
<ClientsTable
|
||||||
clients={dashboard.clients}
|
clients={dashboard.clients}
|
||||||
topStats={dashboard.topStats}
|
topClients={stats.topClients}
|
||||||
isModalOpen={clients.isModalOpen}
|
isModalOpen={clients.isModalOpen}
|
||||||
modalClientName={clients.modalClientName}
|
modalClientName={clients.modalClientName}
|
||||||
modalType={clients.modalType}
|
modalType={clients.modalType}
|
||||||
|
@ -46,7 +47,7 @@ class Clients extends Component {
|
||||||
/>
|
/>
|
||||||
<AutoClients
|
<AutoClients
|
||||||
autoClients={dashboard.autoClients}
|
autoClients={dashboard.autoClients}
|
||||||
topStats={dashboard.topStats}
|
topClients={stats.topClients}
|
||||||
/>
|
/>
|
||||||
</Fragment>
|
</Fragment>
|
||||||
)}
|
)}
|
||||||
|
@ -58,14 +59,14 @@ class Clients extends Component {
|
||||||
Clients.propTypes = {
|
Clients.propTypes = {
|
||||||
t: PropTypes.func.isRequired,
|
t: PropTypes.func.isRequired,
|
||||||
dashboard: PropTypes.object.isRequired,
|
dashboard: PropTypes.object.isRequired,
|
||||||
|
stats: PropTypes.object.isRequired,
|
||||||
clients: PropTypes.object.isRequired,
|
clients: PropTypes.object.isRequired,
|
||||||
toggleClientModal: PropTypes.func.isRequired,
|
toggleClientModal: PropTypes.func.isRequired,
|
||||||
deleteClient: PropTypes.func.isRequired,
|
deleteClient: PropTypes.func.isRequired,
|
||||||
addClient: PropTypes.func.isRequired,
|
addClient: PropTypes.func.isRequired,
|
||||||
updateClient: PropTypes.func.isRequired,
|
updateClient: PropTypes.func.isRequired,
|
||||||
getClients: PropTypes.func.isRequired,
|
getClients: PropTypes.func.isRequired,
|
||||||
getTopStats: PropTypes.func.isRequired,
|
getStats: PropTypes.func.isRequired,
|
||||||
topStats: PropTypes.object,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export default withNamespaces()(Clients);
|
export default withNamespaces()(Clients);
|
||||||
|
|
|
@ -24,7 +24,7 @@ const Modal = (props) => {
|
||||||
<div className="modal-content">
|
<div className="modal-content">
|
||||||
<div className="modal-header">
|
<div className="modal-header">
|
||||||
<h4 className="modal-title">
|
<h4 className="modal-title">
|
||||||
<Trans>Add DNS rewrite</Trans>
|
<Trans>rewrite_add</Trans>
|
||||||
</h4>
|
</h4>
|
||||||
<button type="button" className="close" onClick={() => toggleRewritesModal()}>
|
<button type="button" className="close" onClick={() => toggleRewritesModal()}>
|
||||||
<span className="sr-only">Close</span>
|
<span className="sr-only">Close</span>
|
||||||
|
|
|
@ -14,12 +14,12 @@ class Table extends Component {
|
||||||
|
|
||||||
columns = [
|
columns = [
|
||||||
{
|
{
|
||||||
Header: 'Domain',
|
Header: this.props.t('domain'),
|
||||||
accessor: 'domain',
|
accessor: 'domain',
|
||||||
Cell: this.cellWrap,
|
Cell: this.cellWrap,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Header: 'Answer',
|
Header: this.props.t('answer'),
|
||||||
accessor: 'answer',
|
accessor: 'answer',
|
||||||
Cell: this.cellWrap,
|
Cell: this.cellWrap,
|
||||||
},
|
},
|
||||||
|
|
|
@ -44,7 +44,7 @@ class Services extends Component {
|
||||||
return (
|
return (
|
||||||
<Card
|
<Card
|
||||||
title={t('blocked_services')}
|
title={t('blocked_services')}
|
||||||
subtitle={t('Allows to quickly block popular sites.')}
|
subtitle={t('blocked_services_desc')}
|
||||||
bodyType="card-body box-body--settings"
|
bodyType="card-body box-body--settings"
|
||||||
>
|
>
|
||||||
<div className="form">
|
<div className="form">
|
||||||
|
|
|
@ -104,3 +104,8 @@
|
||||||
min-width: 23px;
|
min-width: 23px;
|
||||||
padding: 5px;
|
padding: 5px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.custom-control-label,
|
||||||
|
.custom-control-label:before {
|
||||||
|
transition: 0.3s ease-in-out background-color, 0.3s ease-in-out color;
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,74 @@
|
||||||
|
import React from 'react';
|
||||||
|
import PropTypes from 'prop-types';
|
||||||
|
import { Field, reduxForm } from 'redux-form';
|
||||||
|
import { Trans, withNamespaces } from 'react-i18next';
|
||||||
|
import flow from 'lodash/flow';
|
||||||
|
|
||||||
|
import { renderRadioField, toNumber } from '../../../helpers/form';
|
||||||
|
import { STATS_INTERVALS_DAYS } from '../../../helpers/constants';
|
||||||
|
|
||||||
|
const getIntervalFields = (processing, t, handleChange, toNumber) =>
|
||||||
|
STATS_INTERVALS_DAYS.map((interval) => {
|
||||||
|
const title = interval === 1
|
||||||
|
? t('interval_24_hour')
|
||||||
|
: t('interval_days', { count: interval });
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Field
|
||||||
|
key={interval}
|
||||||
|
name="interval"
|
||||||
|
type="radio"
|
||||||
|
component={renderRadioField}
|
||||||
|
value={interval}
|
||||||
|
placeholder={title}
|
||||||
|
onChange={handleChange}
|
||||||
|
normalize={toNumber}
|
||||||
|
disabled={processing}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
const Form = (props) => {
|
||||||
|
const {
|
||||||
|
handleSubmit, handleChange, processing, t,
|
||||||
|
} = props;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<form onSubmit={handleSubmit}>
|
||||||
|
<div className="row">
|
||||||
|
<div className="col-12">
|
||||||
|
<label className="form__label form__label--with-desc" htmlFor="server_name">
|
||||||
|
<Trans>statistics_retention</Trans>
|
||||||
|
</label>
|
||||||
|
<div className="form__desc form__desc--top">
|
||||||
|
<Trans>statistics_retention_desc</Trans>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="col-12">
|
||||||
|
<div className="form__group mt-2">
|
||||||
|
<div className="custom-controls-stacked">
|
||||||
|
{getIntervalFields(processing, t, handleChange, toNumber)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
Form.propTypes = {
|
||||||
|
handleSubmit: PropTypes.func.isRequired,
|
||||||
|
handleChange: PropTypes.func,
|
||||||
|
change: PropTypes.func.isRequired,
|
||||||
|
submitting: PropTypes.bool.isRequired,
|
||||||
|
invalid: PropTypes.bool.isRequired,
|
||||||
|
processing: PropTypes.bool.isRequired,
|
||||||
|
t: PropTypes.func.isRequired,
|
||||||
|
};
|
||||||
|
|
||||||
|
export default flow([
|
||||||
|
withNamespaces(),
|
||||||
|
reduxForm({
|
||||||
|
form: 'logConfigForm',
|
||||||
|
}),
|
||||||
|
])(Form);
|
|
@ -0,0 +1,63 @@
|
||||||
|
import React, { Component } from 'react';
|
||||||
|
import PropTypes from 'prop-types';
|
||||||
|
import { withNamespaces, Trans } from 'react-i18next';
|
||||||
|
import debounce from 'lodash/debounce';
|
||||||
|
|
||||||
|
import { DEBOUNCE_TIMEOUT } from '../../../helpers/constants';
|
||||||
|
import Form from './Form';
|
||||||
|
import Card from '../../ui/Card';
|
||||||
|
|
||||||
|
class StatsConfig extends Component {
|
||||||
|
handleFormChange = debounce((values) => {
|
||||||
|
this.props.setStatsConfig(values);
|
||||||
|
}, DEBOUNCE_TIMEOUT);
|
||||||
|
|
||||||
|
handleReset = () => {
|
||||||
|
const { t, resetStats } = this.props;
|
||||||
|
// eslint-disable-next-line no-alert
|
||||||
|
if (window.confirm(t('statistics_clear_confirm'))) {
|
||||||
|
resetStats();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
render() {
|
||||||
|
const {
|
||||||
|
t, interval, processing, processingReset,
|
||||||
|
} = this.props;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Card title={t('statistics_logs')} bodyType="card-body box-body--settings">
|
||||||
|
<div className="form">
|
||||||
|
<Form
|
||||||
|
initialValues={{
|
||||||
|
interval,
|
||||||
|
}}
|
||||||
|
onSubmit={this.handleFormChange}
|
||||||
|
onChange={this.handleFormChange}
|
||||||
|
processing={processing}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
className="btn btn-outline-secondary btn-sm mt-3"
|
||||||
|
onClick={this.handleReset}
|
||||||
|
disabled={processingReset}
|
||||||
|
>
|
||||||
|
<Trans>statistics_clear</Trans>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
StatsConfig.propTypes = {
|
||||||
|
interval: PropTypes.number.isRequired,
|
||||||
|
processing: PropTypes.bool.isRequired,
|
||||||
|
processingReset: PropTypes.bool.isRequired,
|
||||||
|
setStatsConfig: PropTypes.func.isRequired,
|
||||||
|
resetStats: PropTypes.func.isRequired,
|
||||||
|
t: PropTypes.func.isRequired,
|
||||||
|
};
|
||||||
|
|
||||||
|
export default withNamespaces()(StatsConfig);
|
|
@ -3,6 +3,7 @@ import PropTypes from 'prop-types';
|
||||||
import { withNamespaces, Trans } from 'react-i18next';
|
import { withNamespaces, Trans } from 'react-i18next';
|
||||||
|
|
||||||
import Services from './Services';
|
import Services from './Services';
|
||||||
|
import StatsConfig from './StatsConfig';
|
||||||
import Checkbox from '../ui/Checkbox';
|
import Checkbox from '../ui/Checkbox';
|
||||||
import Loading from '../ui/Loading';
|
import Loading from '../ui/Loading';
|
||||||
import PageTitle from '../ui/PageTitle';
|
import PageTitle from '../ui/PageTitle';
|
||||||
|
@ -37,6 +38,7 @@ class Settings extends Component {
|
||||||
componentDidMount() {
|
componentDidMount() {
|
||||||
this.props.initSettings(this.settings);
|
this.props.initSettings(this.settings);
|
||||||
this.props.getBlockedServices();
|
this.props.getBlockedServices();
|
||||||
|
this.props.getStatsConfig();
|
||||||
}
|
}
|
||||||
|
|
||||||
renderSettings = (settings) => {
|
renderSettings = (settings) => {
|
||||||
|
@ -62,7 +64,13 @@ class Settings extends Component {
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
const {
|
const {
|
||||||
settings, services, setBlockedServices, t,
|
settings,
|
||||||
|
services,
|
||||||
|
setBlockedServices,
|
||||||
|
setStatsConfig,
|
||||||
|
resetStats,
|
||||||
|
stats,
|
||||||
|
t,
|
||||||
} = this.props;
|
} = this.props;
|
||||||
return (
|
return (
|
||||||
<Fragment>
|
<Fragment>
|
||||||
|
@ -78,6 +86,15 @@ class Settings extends Component {
|
||||||
</div>
|
</div>
|
||||||
</Card>
|
</Card>
|
||||||
</div>
|
</div>
|
||||||
|
<div className="col-md-12">
|
||||||
|
<StatsConfig
|
||||||
|
interval={stats.interval}
|
||||||
|
processing={stats.processingSetConfig}
|
||||||
|
processingReset={stats.processingReset}
|
||||||
|
setStatsConfig={setStatsConfig}
|
||||||
|
resetStats={resetStats}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
<div className="col-md-12">
|
<div className="col-md-12">
|
||||||
<Services
|
<Services
|
||||||
services={services}
|
services={services}
|
||||||
|
@ -93,11 +110,13 @@ class Settings extends Component {
|
||||||
}
|
}
|
||||||
|
|
||||||
Settings.propTypes = {
|
Settings.propTypes = {
|
||||||
initSettings: PropTypes.func,
|
initSettings: PropTypes.func.isRequired,
|
||||||
settings: PropTypes.object,
|
settings: PropTypes.object.isRequired,
|
||||||
settingsList: PropTypes.object,
|
toggleSetting: PropTypes.func.isRequired,
|
||||||
toggleSetting: PropTypes.func,
|
getStatsConfig: PropTypes.func.isRequired,
|
||||||
t: PropTypes.func,
|
setStatsConfig: PropTypes.func.isRequired,
|
||||||
|
resetStats: PropTypes.func.isRequired,
|
||||||
|
t: PropTypes.func.isRequired,
|
||||||
};
|
};
|
||||||
|
|
||||||
export default withNamespaces()(Settings);
|
export default withNamespaces()(Settings);
|
||||||
|
|
|
@ -5,9 +5,7 @@ const Cell = props => (
|
||||||
<div className="stats__row">
|
<div className="stats__row">
|
||||||
<div className="stats__row-value mb-1">
|
<div className="stats__row-value mb-1">
|
||||||
<strong>{props.value}</strong>
|
<strong>{props.value}</strong>
|
||||||
<small className="ml-3 text-muted">
|
<small className="ml-3 text-muted">{props.percent}%</small>
|
||||||
{props.percent}%
|
|
||||||
</small>
|
|
||||||
</div>
|
</div>
|
||||||
<div className="progress progress-xs">
|
<div className="progress progress-xs">
|
||||||
<div
|
<div
|
||||||
|
|
|
@ -4,33 +4,27 @@ import { ResponsiveLine } from '@nivo/line';
|
||||||
|
|
||||||
import './Line.css';
|
import './Line.css';
|
||||||
|
|
||||||
const Line = props => (
|
const Line = ({ data, color }) => (
|
||||||
props.data &&
|
data &&
|
||||||
<ResponsiveLine
|
<ResponsiveLine
|
||||||
data={props.data}
|
data={data}
|
||||||
margin={{
|
margin={{
|
||||||
top: 15,
|
top: 15,
|
||||||
right: 0,
|
right: 0,
|
||||||
bottom: 1,
|
bottom: 1,
|
||||||
left: 0,
|
left: 20,
|
||||||
}}
|
}}
|
||||||
minY="auto"
|
minY="auto"
|
||||||
stacked={false}
|
stacked={false}
|
||||||
curve='linear'
|
curve='linear'
|
||||||
axisBottom={{
|
axisBottom={null}
|
||||||
tickSize: 0,
|
axisLeft={null}
|
||||||
tickPadding: 10,
|
|
||||||
}}
|
|
||||||
axisLeft={{
|
|
||||||
tickSize: 0,
|
|
||||||
tickPadding: 10,
|
|
||||||
}}
|
|
||||||
enableGridX={false}
|
enableGridX={false}
|
||||||
enableGridY={false}
|
enableGridY={false}
|
||||||
enableDots={false}
|
enableDots={false}
|
||||||
enableArea={true}
|
enableArea={true}
|
||||||
animate={false}
|
animate={false}
|
||||||
colorBy={() => (props.color)}
|
colorBy={() => (color)}
|
||||||
tooltip={slice => (
|
tooltip={slice => (
|
||||||
<div>
|
<div>
|
||||||
{slice.data.map(d => (
|
{slice.data.map(d => (
|
||||||
|
|
|
@ -1,20 +1,22 @@
|
||||||
import { connect } from 'react-redux';
|
import { connect } from 'react-redux';
|
||||||
import { getClients, getTopStats } from '../actions';
|
import { getClients } from '../actions';
|
||||||
|
import { getStats } from '../actions/stats';
|
||||||
import { addClient, updateClient, deleteClient, toggleClientModal } from '../actions/clients';
|
import { addClient, updateClient, deleteClient, toggleClientModal } from '../actions/clients';
|
||||||
import Clients from '../components/Settings/Clients';
|
import Clients from '../components/Settings/Clients';
|
||||||
|
|
||||||
const mapStateToProps = (state) => {
|
const mapStateToProps = (state) => {
|
||||||
const { dashboard, clients } = state;
|
const { dashboard, clients, stats } = state;
|
||||||
const props = {
|
const props = {
|
||||||
dashboard,
|
dashboard,
|
||||||
clients,
|
clients,
|
||||||
|
stats,
|
||||||
};
|
};
|
||||||
return props;
|
return props;
|
||||||
};
|
};
|
||||||
|
|
||||||
const mapDispatchToProps = {
|
const mapDispatchToProps = {
|
||||||
getClients,
|
getClients,
|
||||||
getTopStats,
|
getStats,
|
||||||
addClient,
|
addClient,
|
||||||
updateClient,
|
updateClient,
|
||||||
deleteClient,
|
deleteClient,
|
||||||
|
|
|
@ -1,14 +1,23 @@
|
||||||
import { connect } from 'react-redux';
|
import { connect } from 'react-redux';
|
||||||
import * as actionCreators from '../actions';
|
import { toggleProtection, getClients } from '../actions';
|
||||||
|
import { getStats, getStatsConfig, setStatsConfig } from '../actions/stats';
|
||||||
import Dashboard from '../components/Dashboard';
|
import Dashboard from '../components/Dashboard';
|
||||||
|
|
||||||
const mapStateToProps = (state) => {
|
const mapStateToProps = (state) => {
|
||||||
const { dashboard } = state;
|
const { dashboard, stats } = state;
|
||||||
const props = { dashboard };
|
const props = { dashboard, stats };
|
||||||
return props;
|
return props;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const mapDispatchToProps = {
|
||||||
|
toggleProtection,
|
||||||
|
getClients,
|
||||||
|
getStats,
|
||||||
|
getStatsConfig,
|
||||||
|
setStatsConfig,
|
||||||
|
};
|
||||||
|
|
||||||
export default connect(
|
export default connect(
|
||||||
mapStateToProps,
|
mapStateToProps,
|
||||||
actionCreators,
|
mapDispatchToProps,
|
||||||
)(Dashboard);
|
)(Dashboard);
|
||||||
|
|
|
@ -1,13 +1,15 @@
|
||||||
import { connect } from 'react-redux';
|
import { connect } from 'react-redux';
|
||||||
import { initSettings, toggleSetting } from '../actions';
|
import { initSettings, toggleSetting } from '../actions';
|
||||||
import { getBlockedServices, setBlockedServices } from '../actions/services';
|
import { getBlockedServices, setBlockedServices } from '../actions/services';
|
||||||
|
import { getStatsConfig, setStatsConfig, resetStats } from '../actions/stats';
|
||||||
import Settings from '../components/Settings';
|
import Settings from '../components/Settings';
|
||||||
|
|
||||||
const mapStateToProps = (state) => {
|
const mapStateToProps = (state) => {
|
||||||
const { settings, services } = state;
|
const { settings, services, stats } = state;
|
||||||
const props = {
|
const props = {
|
||||||
settings,
|
settings,
|
||||||
services,
|
services,
|
||||||
|
stats,
|
||||||
};
|
};
|
||||||
return props;
|
return props;
|
||||||
};
|
};
|
||||||
|
@ -17,6 +19,9 @@ const mapDispatchToProps = {
|
||||||
toggleSetting,
|
toggleSetting,
|
||||||
getBlockedServices,
|
getBlockedServices,
|
||||||
setBlockedServices,
|
setBlockedServices,
|
||||||
|
getStatsConfig,
|
||||||
|
setStatsConfig,
|
||||||
|
resetStats,
|
||||||
};
|
};
|
||||||
|
|
||||||
export default connect(
|
export default connect(
|
||||||
|
|
|
@ -260,3 +260,5 @@ export const FILTERED_STATUS = {
|
||||||
FILTERED_BLOCKED_SERVICE: 'FilteredBlockedService',
|
FILTERED_BLOCKED_SERVICE: 'FilteredBlockedService',
|
||||||
REWRITE: 'Rewrite',
|
REWRITE: 'Rewrite',
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const STATS_INTERVALS_DAYS = [1, 7, 30, 90];
|
||||||
|
|
|
@ -2,11 +2,12 @@ import dateParse from 'date-fns/parse';
|
||||||
import dateFormat from 'date-fns/format';
|
import dateFormat from 'date-fns/format';
|
||||||
import subHours from 'date-fns/sub_hours';
|
import subHours from 'date-fns/sub_hours';
|
||||||
import addHours from 'date-fns/add_hours';
|
import addHours from 'date-fns/add_hours';
|
||||||
|
import addDays from 'date-fns/add_days';
|
||||||
|
import subDays from 'date-fns/sub_days';
|
||||||
import round from 'lodash/round';
|
import round from 'lodash/round';
|
||||||
import axios from 'axios';
|
import axios from 'axios';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
STATS_NAMES,
|
|
||||||
STANDARD_DNS_PORT,
|
STANDARD_DNS_PORT,
|
||||||
STANDARD_WEB_PORT,
|
STANDARD_WEB_PORT,
|
||||||
STANDARD_HTTPS_PORT,
|
STANDARD_HTTPS_PORT,
|
||||||
|
@ -49,29 +50,28 @@ export const normalizeLogs = logs => logs.map((log) => {
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
export const normalizeHistory = history => Object.keys(history).map((key) => {
|
export const normalizeHistory = (history, interval) => {
|
||||||
let id = STATS_NAMES[key];
|
if (interval === 1 || interval === 7) {
|
||||||
if (!id) {
|
const hoursAgo = subHours(Date.now(), 24 * interval);
|
||||||
id = key.replace(/_/g, ' ').replace(/^\w/, c => c.toUpperCase());
|
return history.map((item, index) => ({
|
||||||
|
x: dateFormat(addHours(hoursAgo, index), 'D MMM HH:00'),
|
||||||
|
y: round(item, 2),
|
||||||
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
const dayAgo = subHours(Date.now(), 24);
|
const daysAgo = subDays(Date.now(), interval - 1);
|
||||||
|
return history.map((item, index) => ({
|
||||||
|
x: dateFormat(addDays(daysAgo, index), 'D MMM YYYY'),
|
||||||
|
y: round(item, 2),
|
||||||
|
}));
|
||||||
|
};
|
||||||
|
|
||||||
const data = history[key].map((item, index) => {
|
export const normalizeTopStats = stats => (
|
||||||
const formatHour = dateFormat(addHours(dayAgo, index), 'ddd HH:00');
|
stats.map(item => ({
|
||||||
const roundValue = round(item, 2);
|
name: Object.keys(item)[0],
|
||||||
|
count: Object.values(item)[0],
|
||||||
return {
|
}))
|
||||||
x: formatHour,
|
);
|
||||||
y: roundValue,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
id,
|
|
||||||
data,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
export const normalizeFilteringStatus = (filteringStatus) => {
|
export const normalizeFilteringStatus = (filteringStatus) => {
|
||||||
const { enabled, filters, user_rules: userRules } = filteringStatus;
|
const { enabled, filters, user_rules: userRules } = filteringStatus;
|
||||||
|
@ -233,3 +233,11 @@ export const sortClients = (clients) => {
|
||||||
export const toggleAllServices = (services, change, isSelected) => {
|
export const toggleAllServices = (services, change, isSelected) => {
|
||||||
services.forEach(service => change(`blocked_services.${service.id}`, isSelected));
|
services.forEach(service => change(`blocked_services.${service.id}`, isSelected));
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const secondsToMilliseconds = (seconds) => {
|
||||||
|
if (seconds) {
|
||||||
|
return seconds * 1000;
|
||||||
|
}
|
||||||
|
|
||||||
|
return seconds;
|
||||||
|
};
|
||||||
|
|
|
@ -11,6 +11,7 @@ import clients from './clients';
|
||||||
import access from './access';
|
import access from './access';
|
||||||
import rewrites from './rewrites';
|
import rewrites from './rewrites';
|
||||||
import services from './services';
|
import services from './services';
|
||||||
|
import stats from './stats';
|
||||||
|
|
||||||
const settings = handleActions({
|
const settings = handleActions({
|
||||||
[actions.initSettingsRequest]: state => ({ ...state, processing: true }),
|
[actions.initSettingsRequest]: state => ({ ...state, processing: true }),
|
||||||
|
@ -93,27 +94,6 @@ const dashboard = handleActions({
|
||||||
return newState;
|
return newState;
|
||||||
},
|
},
|
||||||
|
|
||||||
[actions.getStatsRequest]: state => ({ ...state, processingStats: true }),
|
|
||||||
[actions.getStatsFailure]: state => ({ ...state, processingStats: false }),
|
|
||||||
[actions.getStatsSuccess]: (state, { payload }) => {
|
|
||||||
const newState = { ...state, stats: payload, processingStats: false };
|
|
||||||
return newState;
|
|
||||||
},
|
|
||||||
|
|
||||||
[actions.getTopStatsRequest]: state => ({ ...state, processingTopStats: true }),
|
|
||||||
[actions.getTopStatsFailure]: state => ({ ...state, processingTopStats: false }),
|
|
||||||
[actions.getTopStatsSuccess]: (state, { payload }) => {
|
|
||||||
const newState = { ...state, topStats: payload, processingTopStats: false };
|
|
||||||
return newState;
|
|
||||||
},
|
|
||||||
|
|
||||||
[actions.getStatsHistoryRequest]: state => ({ ...state, processingStatsHistory: true }),
|
|
||||||
[actions.getStatsHistoryFailure]: state => ({ ...state, processingStatsHistory: false }),
|
|
||||||
[actions.getStatsHistorySuccess]: (state, { payload }) => {
|
|
||||||
const newState = { ...state, statsHistory: payload, processingStatsHistory: false };
|
|
||||||
return newState;
|
|
||||||
},
|
|
||||||
|
|
||||||
[actions.toggleLogStatusRequest]: state => ({ ...state, logStatusProcessing: true }),
|
[actions.toggleLogStatusRequest]: state => ({ ...state, logStatusProcessing: true }),
|
||||||
[actions.toggleLogStatusFailure]: state => ({ ...state, logStatusProcessing: false }),
|
[actions.toggleLogStatusFailure]: state => ({ ...state, logStatusProcessing: false }),
|
||||||
[actions.toggleLogStatusSuccess]: (state) => {
|
[actions.toggleLogStatusSuccess]: (state) => {
|
||||||
|
@ -199,8 +179,6 @@ const dashboard = handleActions({
|
||||||
}, {
|
}, {
|
||||||
processing: true,
|
processing: true,
|
||||||
isCoreRunning: false,
|
isCoreRunning: false,
|
||||||
processingTopStats: true,
|
|
||||||
processingStats: true,
|
|
||||||
logStatusProcessing: false,
|
logStatusProcessing: false,
|
||||||
processingVersion: true,
|
processingVersion: true,
|
||||||
processingFiltering: true,
|
processingFiltering: true,
|
||||||
|
@ -217,7 +195,6 @@ const dashboard = handleActions({
|
||||||
dnsVersion: '',
|
dnsVersion: '',
|
||||||
clients: [],
|
clients: [],
|
||||||
autoClients: [],
|
autoClients: [],
|
||||||
topStats: [],
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const queryLogs = handleActions({
|
const queryLogs = handleActions({
|
||||||
|
@ -230,7 +207,11 @@ const queryLogs = handleActions({
|
||||||
[actions.downloadQueryLogRequest]: state => ({ ...state, logsDownloading: true }),
|
[actions.downloadQueryLogRequest]: state => ({ ...state, logsDownloading: true }),
|
||||||
[actions.downloadQueryLogFailure]: state => ({ ...state, logsDownloading: false }),
|
[actions.downloadQueryLogFailure]: state => ({ ...state, logsDownloading: false }),
|
||||||
[actions.downloadQueryLogSuccess]: state => ({ ...state, logsDownloading: false }),
|
[actions.downloadQueryLogSuccess]: state => ({ ...state, logsDownloading: false }),
|
||||||
}, { getLogsProcessing: false, logsDownloading: false });
|
}, {
|
||||||
|
getLogsProcessing: false,
|
||||||
|
logsDownloading: false,
|
||||||
|
logs: [],
|
||||||
|
});
|
||||||
|
|
||||||
const filtering = handleActions({
|
const filtering = handleActions({
|
||||||
[actions.setRulesRequest]: state => ({ ...state, processingRules: true }),
|
[actions.setRulesRequest]: state => ({ ...state, processingRules: true }),
|
||||||
|
@ -426,6 +407,7 @@ export default combineReducers({
|
||||||
access,
|
access,
|
||||||
rewrites,
|
rewrites,
|
||||||
services,
|
services,
|
||||||
|
stats,
|
||||||
loadingBar: loadingBarReducer,
|
loadingBar: loadingBarReducer,
|
||||||
form: formReducer,
|
form: formReducer,
|
||||||
});
|
});
|
||||||
|
|
|
@ -0,0 +1,97 @@
|
||||||
|
import { handleActions } from 'redux-actions';
|
||||||
|
|
||||||
|
import * as actions from '../actions/stats';
|
||||||
|
|
||||||
|
const defaultStats = {
|
||||||
|
dnsQueries: [],
|
||||||
|
blockedFiltering: [],
|
||||||
|
replacedParental: [],
|
||||||
|
replacedSafebrowsing: [],
|
||||||
|
topBlockedDomains: [],
|
||||||
|
topClients: [],
|
||||||
|
topQueriedDomains: [],
|
||||||
|
numBlockedFiltering: 0,
|
||||||
|
numDnsQueries: 0,
|
||||||
|
numReplacedParental: 0,
|
||||||
|
numReplacedSafebrowsing: 0,
|
||||||
|
numReplacedSafesearch: 0,
|
||||||
|
avgProcessingTime: 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
const stats = handleActions(
|
||||||
|
{
|
||||||
|
[actions.getStatsConfigRequest]: state => ({ ...state, processingGetConfig: true }),
|
||||||
|
[actions.getStatsConfigFailure]: state => ({ ...state, processingGetConfig: false }),
|
||||||
|
[actions.getStatsConfigSuccess]: (state, { payload }) => ({
|
||||||
|
...state,
|
||||||
|
interval: payload.interval,
|
||||||
|
processingGetConfig: false,
|
||||||
|
}),
|
||||||
|
|
||||||
|
[actions.setStatsConfigRequest]: state => ({ ...state, processingSetConfig: true }),
|
||||||
|
[actions.setStatsConfigFailure]: state => ({ ...state, processingSetConfig: false }),
|
||||||
|
[actions.setStatsConfigSuccess]: (state, { payload }) => ({
|
||||||
|
...state,
|
||||||
|
interval: payload.interval,
|
||||||
|
processingSetConfig: false,
|
||||||
|
}),
|
||||||
|
|
||||||
|
[actions.getStatsRequest]: state => ({ ...state, processingStats: true }),
|
||||||
|
[actions.getStatsFailure]: state => ({ ...state, processingStats: false }),
|
||||||
|
[actions.getStatsSuccess]: (state, { payload }) => {
|
||||||
|
const {
|
||||||
|
dns_queries: dnsQueries,
|
||||||
|
blocked_filtering: blockedFiltering,
|
||||||
|
replaced_parental: replacedParental,
|
||||||
|
replaced_safebrowsing: replacedSafebrowsing,
|
||||||
|
top_blocked_domains: topBlockedDomains,
|
||||||
|
top_clients: topClients,
|
||||||
|
top_queried_domains: topQueriedDomains,
|
||||||
|
num_blocked_filtering: numBlockedFiltering,
|
||||||
|
num_dns_queries: numDnsQueries,
|
||||||
|
num_replaced_parental: numReplacedParental,
|
||||||
|
num_replaced_safebrowsing: numReplacedSafebrowsing,
|
||||||
|
num_replaced_safesearch: numReplacedSafesearch,
|
||||||
|
avg_processing_time: avgProcessingTime,
|
||||||
|
} = payload;
|
||||||
|
|
||||||
|
const newState = {
|
||||||
|
...state,
|
||||||
|
processingStats: false,
|
||||||
|
dnsQueries,
|
||||||
|
blockedFiltering,
|
||||||
|
replacedParental,
|
||||||
|
replacedSafebrowsing,
|
||||||
|
topBlockedDomains,
|
||||||
|
topClients,
|
||||||
|
topQueriedDomains,
|
||||||
|
numBlockedFiltering,
|
||||||
|
numDnsQueries,
|
||||||
|
numReplacedParental,
|
||||||
|
numReplacedSafebrowsing,
|
||||||
|
numReplacedSafesearch,
|
||||||
|
avgProcessingTime,
|
||||||
|
};
|
||||||
|
|
||||||
|
return newState;
|
||||||
|
},
|
||||||
|
|
||||||
|
[actions.resetStatsRequest]: state => ({ ...state, processingReset: true }),
|
||||||
|
[actions.resetStatsFailure]: state => ({ ...state, processingReset: false }),
|
||||||
|
[actions.resetStatsSuccess]: state => ({
|
||||||
|
...state,
|
||||||
|
...defaultStats,
|
||||||
|
processingReset: false,
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
processingGetConfig: false,
|
||||||
|
processingSetConfig: false,
|
||||||
|
processingStats: true,
|
||||||
|
processingReset: false,
|
||||||
|
interval: 1,
|
||||||
|
...defaultStats,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
export default stats;
|
|
@ -11,6 +11,7 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/AdguardTeam/AdGuardHome/dnsfilter"
|
"github.com/AdguardTeam/AdGuardHome/dnsfilter"
|
||||||
|
"github.com/AdguardTeam/AdGuardHome/stats"
|
||||||
"github.com/AdguardTeam/dnsproxy/proxy"
|
"github.com/AdguardTeam/dnsproxy/proxy"
|
||||||
"github.com/AdguardTeam/dnsproxy/upstream"
|
"github.com/AdguardTeam/dnsproxy/upstream"
|
||||||
"github.com/AdguardTeam/golibs/log"
|
"github.com/AdguardTeam/golibs/log"
|
||||||
|
@ -40,7 +41,7 @@ type Server struct {
|
||||||
dnsProxy *proxy.Proxy // DNS proxy instance
|
dnsProxy *proxy.Proxy // DNS proxy instance
|
||||||
dnsFilter *dnsfilter.Dnsfilter // DNS filter instance
|
dnsFilter *dnsfilter.Dnsfilter // DNS filter instance
|
||||||
queryLog *queryLog // Query log instance
|
queryLog *queryLog // Query log instance
|
||||||
stats *stats // General server statistics
|
stats stats.Stats
|
||||||
|
|
||||||
AllowedClients map[string]bool // IP addresses of whitelist clients
|
AllowedClients map[string]bool // IP addresses of whitelist clients
|
||||||
DisallowedClients map[string]bool // IP addresses of clients that should be blocked
|
DisallowedClients map[string]bool // IP addresses of clients that should be blocked
|
||||||
|
@ -55,22 +56,14 @@ type Server struct {
|
||||||
// NewServer creates a new instance of the dnsforward.Server
|
// NewServer creates a new instance of the dnsforward.Server
|
||||||
// baseDir is the base directory for query logs
|
// baseDir is the base directory for query logs
|
||||||
// Note: this function must be called only once
|
// Note: this function must be called only once
|
||||||
func NewServer(baseDir string) *Server {
|
func NewServer(baseDir string, stats stats.Stats) *Server {
|
||||||
s := &Server{
|
s := &Server{
|
||||||
queryLog: newQueryLog(baseDir),
|
queryLog: newQueryLog(baseDir),
|
||||||
stats: newStats(),
|
|
||||||
}
|
|
||||||
|
|
||||||
log.Tracef("Loading stats from querylog")
|
|
||||||
err := s.queryLog.fillStatsFromQueryLog(s.stats)
|
|
||||||
if err != nil {
|
|
||||||
log.Error("failed to load stats from querylog: %s", err)
|
|
||||||
}
|
}
|
||||||
|
s.stats = stats
|
||||||
|
|
||||||
log.Printf("Start DNS server periodic jobs")
|
log.Printf("Start DNS server periodic jobs")
|
||||||
go s.queryLog.periodicQueryLogRotate()
|
go s.queryLog.periodicQueryLogRotate()
|
||||||
go s.queryLog.runningTop.periodicHourlyTopRotate()
|
|
||||||
go s.stats.statsRotator()
|
|
||||||
return s
|
return s
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -357,38 +350,6 @@ func (s *Server) GetQueryLog() []map[string]interface{} {
|
||||||
return s.queryLog.getQueryLog()
|
return s.queryLog.getQueryLog()
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetStatsTop returns the current stop stats
|
|
||||||
func (s *Server) GetStatsTop() *StatsTop {
|
|
||||||
s.RLock()
|
|
||||||
defer s.RUnlock()
|
|
||||||
return s.queryLog.runningTop.getStatsTop()
|
|
||||||
}
|
|
||||||
|
|
||||||
// PurgeStats purges current server stats
|
|
||||||
func (s *Server) PurgeStats() {
|
|
||||||
s.Lock()
|
|
||||||
defer s.Unlock()
|
|
||||||
s.stats.purgeStats()
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetAggregatedStats returns aggregated stats data for the 24 hours
|
|
||||||
func (s *Server) GetAggregatedStats() map[string]interface{} {
|
|
||||||
s.RLock()
|
|
||||||
defer s.RUnlock()
|
|
||||||
return s.stats.getAggregatedStats()
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetStatsHistory gets stats history aggregated by the specified time unit
|
|
||||||
// timeUnit is either time.Second, time.Minute, time.Hour, or 24*time.Hour
|
|
||||||
// start is start of the time range
|
|
||||||
// end is end of the time range
|
|
||||||
// returns nil if time unit is not supported
|
|
||||||
func (s *Server) GetStatsHistory(timeUnit time.Duration, startTime time.Time, endTime time.Time) (map[string]interface{}, error) {
|
|
||||||
s.RLock()
|
|
||||||
defer s.RUnlock()
|
|
||||||
return s.stats.getStatsHistory(timeUnit, startTime, endTime)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return TRUE if this client should be blocked
|
// Return TRUE if this client should be blocked
|
||||||
func (s *Server) isBlockedIP(ip string) bool {
|
func (s *Server) isBlockedIP(ip string) bool {
|
||||||
if len(s.AllowedClients) != 0 || len(s.AllowedClientsIPNet) != 0 {
|
if len(s.AllowedClients) != 0 || len(s.AllowedClientsIPNet) != 0 {
|
||||||
|
@ -507,21 +468,61 @@ func (s *Server) handleDNSRequest(p *proxy.Proxy, d *proxy.DNSContext) error {
|
||||||
shouldLog = false
|
shouldLog = false
|
||||||
}
|
}
|
||||||
|
|
||||||
if s.conf.QueryLogEnabled && shouldLog {
|
|
||||||
elapsed := time.Since(start)
|
elapsed := time.Since(start)
|
||||||
|
if s.conf.QueryLogEnabled && shouldLog {
|
||||||
upstreamAddr := ""
|
upstreamAddr := ""
|
||||||
if d.Upstream != nil {
|
if d.Upstream != nil {
|
||||||
upstreamAddr = d.Upstream.Address()
|
upstreamAddr = d.Upstream.Address()
|
||||||
}
|
}
|
||||||
entry := s.queryLog.logRequest(msg, d.Res, res, elapsed, d.Addr, upstreamAddr)
|
_ = s.queryLog.logRequest(msg, d.Res, res, elapsed, d.Addr, upstreamAddr)
|
||||||
if entry != nil {
|
|
||||||
s.stats.incrementCounters(entry)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
s.updateStats(d, elapsed, *res)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *Server) updateStats(d *proxy.DNSContext, elapsed time.Duration, res dnsfilter.Result) {
|
||||||
|
if s.stats == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
e := stats.Entry{}
|
||||||
|
e.Domain = strings.ToLower(d.Req.Question[0].Name)
|
||||||
|
e.Domain = e.Domain[:len(e.Domain)-1] // remove last "."
|
||||||
|
switch addr := d.Addr.(type) {
|
||||||
|
case *net.UDPAddr:
|
||||||
|
e.Client = addr.IP
|
||||||
|
case *net.TCPAddr:
|
||||||
|
e.Client = addr.IP
|
||||||
|
}
|
||||||
|
e.Time = uint(elapsed / 1000)
|
||||||
|
switch res.Reason {
|
||||||
|
|
||||||
|
case dnsfilter.NotFilteredNotFound:
|
||||||
|
fallthrough
|
||||||
|
case dnsfilter.NotFilteredWhiteList:
|
||||||
|
fallthrough
|
||||||
|
case dnsfilter.NotFilteredError:
|
||||||
|
e.Result = stats.RNotFiltered
|
||||||
|
|
||||||
|
case dnsfilter.FilteredSafeBrowsing:
|
||||||
|
e.Result = stats.RSafeBrowsing
|
||||||
|
case dnsfilter.FilteredParental:
|
||||||
|
e.Result = stats.RParental
|
||||||
|
case dnsfilter.FilteredSafeSearch:
|
||||||
|
e.Result = stats.RSafeSearch
|
||||||
|
|
||||||
|
case dnsfilter.FilteredBlackList:
|
||||||
|
fallthrough
|
||||||
|
case dnsfilter.FilteredInvalid:
|
||||||
|
fallthrough
|
||||||
|
case dnsfilter.FilteredBlockedService:
|
||||||
|
e.Result = stats.RFiltered
|
||||||
|
}
|
||||||
|
s.stats.Update(e)
|
||||||
|
}
|
||||||
|
|
||||||
// filterDNSRequest applies the dnsFilter and sets d.Res if the request was filtered
|
// filterDNSRequest applies the dnsFilter and sets d.Res if the request was filtered
|
||||||
func (s *Server) filterDNSRequest(d *proxy.DNSContext) (*dnsfilter.Result, error) {
|
func (s *Server) filterDNSRequest(d *proxy.DNSContext) (*dnsfilter.Result, error) {
|
||||||
var res dnsfilter.Result
|
var res dnsfilter.Result
|
||||||
|
|
|
@ -48,10 +48,6 @@ func TestServer(t *testing.T) {
|
||||||
// check query log and stats
|
// check query log and stats
|
||||||
log := s.GetQueryLog()
|
log := s.GetQueryLog()
|
||||||
assert.Equal(t, 1, len(log), "Log size")
|
assert.Equal(t, 1, len(log), "Log size")
|
||||||
stats := s.GetStatsTop()
|
|
||||||
assert.Equal(t, 1, len(stats.Domains), "Top domains length")
|
|
||||||
assert.Equal(t, 0, len(stats.Blocked), "Top blocked length")
|
|
||||||
assert.Equal(t, 1, len(stats.Clients), "Top clients length")
|
|
||||||
|
|
||||||
// message over TCP
|
// message over TCP
|
||||||
req = createGoogleATestMessage()
|
req = createGoogleATestMessage()
|
||||||
|
@ -66,11 +62,6 @@ func TestServer(t *testing.T) {
|
||||||
// check query log and stats again
|
// check query log and stats again
|
||||||
log = s.GetQueryLog()
|
log = s.GetQueryLog()
|
||||||
assert.Equal(t, 2, len(log), "Log size")
|
assert.Equal(t, 2, len(log), "Log size")
|
||||||
stats = s.GetStatsTop()
|
|
||||||
// Length did not change as we queried the same domain
|
|
||||||
assert.Equal(t, 1, len(stats.Domains), "Top domains length")
|
|
||||||
assert.Equal(t, 0, len(stats.Blocked), "Top blocked length")
|
|
||||||
assert.Equal(t, 1, len(stats.Clients), "Top clients length")
|
|
||||||
|
|
||||||
err = s.Stop()
|
err = s.Stop()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -100,10 +91,6 @@ func TestServerWithProtectionDisabled(t *testing.T) {
|
||||||
// check query log and stats
|
// check query log and stats
|
||||||
log := s.GetQueryLog()
|
log := s.GetQueryLog()
|
||||||
assert.Equal(t, 1, len(log), "Log size")
|
assert.Equal(t, 1, len(log), "Log size")
|
||||||
stats := s.GetStatsTop()
|
|
||||||
assert.Equal(t, 1, len(stats.Domains), "Top domains length")
|
|
||||||
assert.Equal(t, 0, len(stats.Blocked), "Top blocked length")
|
|
||||||
assert.Equal(t, 1, len(stats.Clients), "Top clients length")
|
|
||||||
|
|
||||||
err = s.Stop()
|
err = s.Stop()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -195,11 +182,6 @@ func TestSafeSearch(t *testing.T) {
|
||||||
exchangeAndAssertResponse(t, &client, addr, host, "213.180.193.56")
|
exchangeAndAssertResponse(t, &client, addr, host, "213.180.193.56")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check aggregated stats
|
|
||||||
assert.Equal(t, s.GetAggregatedStats()["replaced_safesearch"], float64(len(yandexDomains)))
|
|
||||||
assert.Equal(t, s.GetAggregatedStats()["blocked_filtering"], float64(len(yandexDomains)))
|
|
||||||
assert.Equal(t, s.GetAggregatedStats()["dns_queries"], float64(len(yandexDomains)))
|
|
||||||
|
|
||||||
// Let's lookup for google safesearch ip
|
// Let's lookup for google safesearch ip
|
||||||
ips, err := net.LookupIP("forcesafesearch.google.com")
|
ips, err := net.LookupIP("forcesafesearch.google.com")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -220,27 +202,6 @@ func TestSafeSearch(t *testing.T) {
|
||||||
exchangeAndAssertResponse(t, &client, addr, host, ip.String())
|
exchangeAndAssertResponse(t, &client, addr, host, ip.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check aggregated stats
|
|
||||||
assert.Equal(t, s.GetAggregatedStats()["replaced_safesearch"], float64(len(yandexDomains)+len(googleDomains)))
|
|
||||||
assert.Equal(t, s.GetAggregatedStats()["blocked_filtering"], float64(len(yandexDomains)+len(googleDomains)))
|
|
||||||
assert.Equal(t, s.GetAggregatedStats()["dns_queries"], float64(len(yandexDomains)+len(googleDomains)))
|
|
||||||
|
|
||||||
// Do one more exchange
|
|
||||||
exchangeAndAssertResponse(t, &client, addr, "google-public-dns-a.google.com.", "8.8.8.8")
|
|
||||||
|
|
||||||
// Check aggregated stats
|
|
||||||
assert.Equal(t, s.GetAggregatedStats()["replaced_safesearch"], float64(len(yandexDomains)+len(googleDomains)))
|
|
||||||
assert.Equal(t, s.GetAggregatedStats()["blocked_filtering"], float64(len(yandexDomains)+len(googleDomains)))
|
|
||||||
assert.Equal(t, s.GetAggregatedStats()["dns_queries"], float64(len(yandexDomains)+len(googleDomains)+1))
|
|
||||||
|
|
||||||
// Count of blocked domains (there is `yandex.com` duplicate in yandexDomains array)
|
|
||||||
blockedCount := len(yandexDomains) - 1 + len(googleDomains)
|
|
||||||
assert.Equal(t, len(s.GetStatsTop().Blocked), blockedCount)
|
|
||||||
|
|
||||||
// Count of domains (blocked domains + `google-public-dns-a.google.com`)
|
|
||||||
domainsCount := blockedCount + 1
|
|
||||||
assert.Equal(t, len(s.GetStatsTop().Domains), domainsCount)
|
|
||||||
|
|
||||||
err = s.Stop()
|
err = s.Stop()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Can not stopd server cause: %s", err)
|
t.Fatalf("Can not stopd server cause: %s", err)
|
||||||
|
@ -272,10 +233,6 @@ func TestInvalidRequest(t *testing.T) {
|
||||||
// invalid requests aren't written to the query log
|
// invalid requests aren't written to the query log
|
||||||
log := s.GetQueryLog()
|
log := s.GetQueryLog()
|
||||||
assert.Equal(t, 0, len(log), "Log size")
|
assert.Equal(t, 0, len(log), "Log size")
|
||||||
stats := s.GetStatsTop()
|
|
||||||
assert.Equal(t, 0, len(stats.Domains), "Top domains length")
|
|
||||||
assert.Equal(t, 0, len(stats.Blocked), "Top blocked length")
|
|
||||||
assert.Equal(t, 0, len(stats.Clients), "Top clients length")
|
|
||||||
|
|
||||||
err = s.Stop()
|
err = s.Stop()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -313,10 +270,6 @@ func TestBlockedRequest(t *testing.T) {
|
||||||
// check query log and stats
|
// check query log and stats
|
||||||
log := s.GetQueryLog()
|
log := s.GetQueryLog()
|
||||||
assert.Equal(t, 1, len(log), "Log size")
|
assert.Equal(t, 1, len(log), "Log size")
|
||||||
stats := s.GetStatsTop()
|
|
||||||
assert.Equal(t, 1, len(stats.Domains), "Top domains length")
|
|
||||||
assert.Equal(t, 1, len(stats.Blocked), "Top blocked length")
|
|
||||||
assert.Equal(t, 1, len(stats.Clients), "Top clients length")
|
|
||||||
|
|
||||||
err = s.Stop()
|
err = s.Stop()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -362,10 +315,6 @@ func TestNullBlockedRequest(t *testing.T) {
|
||||||
// check query log and stats
|
// check query log and stats
|
||||||
log := s.GetQueryLog()
|
log := s.GetQueryLog()
|
||||||
assert.Equal(t, 1, len(log), "Log size")
|
assert.Equal(t, 1, len(log), "Log size")
|
||||||
stats := s.GetStatsTop()
|
|
||||||
assert.Equal(t, 1, len(stats.Domains), "Top domains length")
|
|
||||||
assert.Equal(t, 1, len(stats.Blocked), "Top blocked length")
|
|
||||||
assert.Equal(t, 1, len(stats.Clients), "Top clients length")
|
|
||||||
|
|
||||||
err = s.Stop()
|
err = s.Stop()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -410,10 +359,6 @@ func TestBlockedByHosts(t *testing.T) {
|
||||||
// check query log and stats
|
// check query log and stats
|
||||||
log := s.GetQueryLog()
|
log := s.GetQueryLog()
|
||||||
assert.Equal(t, 1, len(log), "Log size")
|
assert.Equal(t, 1, len(log), "Log size")
|
||||||
stats := s.GetStatsTop()
|
|
||||||
assert.Equal(t, 1, len(stats.Domains), "Top domains length")
|
|
||||||
assert.Equal(t, 1, len(stats.Blocked), "Top blocked length")
|
|
||||||
assert.Equal(t, 1, len(stats.Clients), "Top clients length")
|
|
||||||
|
|
||||||
err = s.Stop()
|
err = s.Stop()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -469,10 +414,6 @@ func TestBlockedBySafeBrowsing(t *testing.T) {
|
||||||
// check query log and stats
|
// check query log and stats
|
||||||
log := s.GetQueryLog()
|
log := s.GetQueryLog()
|
||||||
assert.Equal(t, 1, len(log), "Log size")
|
assert.Equal(t, 1, len(log), "Log size")
|
||||||
stats := s.GetStatsTop()
|
|
||||||
assert.Equal(t, 1, len(stats.Domains), "Top domains length")
|
|
||||||
assert.Equal(t, 1, len(stats.Blocked), "Top blocked length")
|
|
||||||
assert.Equal(t, 1, len(stats.Clients), "Top clients length")
|
|
||||||
|
|
||||||
err = s.Stop()
|
err = s.Stop()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -481,7 +422,7 @@ func TestBlockedBySafeBrowsing(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func createTestServer(t *testing.T) *Server {
|
func createTestServer(t *testing.T) *Server {
|
||||||
s := NewServer(createDataDir(t))
|
s := NewServer(createDataDir(t), nil)
|
||||||
s.conf.UDPListenAddr = &net.UDPAddr{Port: 0}
|
s.conf.UDPListenAddr = &net.UDPAddr{Port: 0}
|
||||||
s.conf.TCPListenAddr = &net.TCPAddr{Port: 0}
|
s.conf.TCPListenAddr = &net.TCPAddr{Port: 0}
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,6 @@ const (
|
||||||
// queryLog is a structure that writes and reads the DNS query log
|
// queryLog is a structure that writes and reads the DNS query log
|
||||||
type queryLog struct {
|
type queryLog struct {
|
||||||
logFile string // path to the log file
|
logFile string // path to the log file
|
||||||
runningTop *dayTop // current top charts
|
|
||||||
|
|
||||||
logBufferLock sync.RWMutex
|
logBufferLock sync.RWMutex
|
||||||
logBuffer []*logEntry
|
logBuffer []*logEntry
|
||||||
|
@ -41,9 +40,7 @@ type queryLog struct {
|
||||||
func newQueryLog(baseDir string) *queryLog {
|
func newQueryLog(baseDir string) *queryLog {
|
||||||
l := &queryLog{
|
l := &queryLog{
|
||||||
logFile: filepath.Join(baseDir, queryLogFileName),
|
logFile: filepath.Join(baseDir, queryLogFileName),
|
||||||
runningTop: &dayTop{},
|
|
||||||
}
|
}
|
||||||
l.runningTop.init()
|
|
||||||
return l
|
return l
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -112,13 +109,6 @@ func (l *queryLog) logRequest(question *dns.Msg, answer *dns.Msg, result *dnsfil
|
||||||
}
|
}
|
||||||
l.queryLogLock.Unlock()
|
l.queryLogLock.Unlock()
|
||||||
|
|
||||||
// add it to running top
|
|
||||||
err = l.runningTop.addEntry(&entry, question, now)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("Failed to add entry to running top: %s", err)
|
|
||||||
// don't do failure, just log
|
|
||||||
}
|
|
||||||
|
|
||||||
// if buffer needs to be flushed to disk, do it now
|
// if buffer needs to be flushed to disk, do it now
|
||||||
if needFlush {
|
if needFlush {
|
||||||
// write to file
|
// write to file
|
||||||
|
|
|
@ -178,99 +178,3 @@ func (l *queryLog) periodicQueryLogRotate() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *queryLog) genericLoader(onEntry func(entry *logEntry) error, needMore func() bool, timeWindow time.Duration) error {
|
|
||||||
now := time.Now()
|
|
||||||
// read from querylog files, try newest file first
|
|
||||||
var files []string
|
|
||||||
|
|
||||||
if enableGzip {
|
|
||||||
files = []string{
|
|
||||||
l.logFile + ".gz",
|
|
||||||
l.logFile + ".gz.1",
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
files = []string{
|
|
||||||
l.logFile,
|
|
||||||
l.logFile + ".1",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// read from all files
|
|
||||||
for _, file := range files {
|
|
||||||
if !needMore() {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if _, err := os.Stat(file); os.IsNotExist(err) {
|
|
||||||
// do nothing, file doesn't exist
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
f, err := os.Open(file)
|
|
||||||
if err != nil {
|
|
||||||
log.Error("Failed to open file \"%s\": %s", file, err)
|
|
||||||
// try next file
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
defer f.Close()
|
|
||||||
|
|
||||||
var d *json.Decoder
|
|
||||||
|
|
||||||
if enableGzip {
|
|
||||||
zr, err := gzip.NewReader(f)
|
|
||||||
if err != nil {
|
|
||||||
log.Error("Failed to create gzip reader: %s", err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
defer zr.Close()
|
|
||||||
d = json.NewDecoder(zr)
|
|
||||||
} else {
|
|
||||||
d = json.NewDecoder(f)
|
|
||||||
}
|
|
||||||
|
|
||||||
i := 0
|
|
||||||
over := 0
|
|
||||||
max := 10000 * time.Second
|
|
||||||
var sum time.Duration
|
|
||||||
// entries on file are in oldest->newest order
|
|
||||||
// we want maxLen newest
|
|
||||||
for d.More() {
|
|
||||||
if !needMore() {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
var entry logEntry
|
|
||||||
err := d.Decode(&entry)
|
|
||||||
if err != nil {
|
|
||||||
log.Error("Failed to decode: %s", err)
|
|
||||||
// next entry can be fine, try more
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if now.Sub(entry.Time) > timeWindow {
|
|
||||||
// log.Tracef("skipping entry") // debug logging
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if entry.Elapsed > max {
|
|
||||||
over++
|
|
||||||
} else {
|
|
||||||
sum += entry.Elapsed
|
|
||||||
}
|
|
||||||
|
|
||||||
i++
|
|
||||||
err = onEntry(&entry)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
elapsed := time.Since(now)
|
|
||||||
var perunit time.Duration
|
|
||||||
var avg time.Duration
|
|
||||||
if i > 0 {
|
|
||||||
perunit = elapsed / time.Duration(i)
|
|
||||||
avg = sum / time.Duration(i)
|
|
||||||
}
|
|
||||||
log.Debug("file \"%s\": read %d entries in %v, %v/entry, %v over %v, %v avg", file, i, elapsed, perunit, over, max, avg)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,315 +0,0 @@
|
||||||
package dnsforward
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"path"
|
|
||||||
"runtime"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/AdguardTeam/golibs/log"
|
|
||||||
"github.com/bluele/gcache"
|
|
||||||
"github.com/miekg/dns"
|
|
||||||
)
|
|
||||||
|
|
||||||
type hourTop struct {
|
|
||||||
domains gcache.Cache
|
|
||||||
blocked gcache.Cache
|
|
||||||
clients gcache.Cache
|
|
||||||
|
|
||||||
mutex sync.RWMutex
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *hourTop) init() {
|
|
||||||
h.domains = gcache.New(queryLogTopSize).LRU().Build()
|
|
||||||
h.blocked = gcache.New(queryLogTopSize).LRU().Build()
|
|
||||||
h.clients = gcache.New(queryLogTopSize).LRU().Build()
|
|
||||||
}
|
|
||||||
|
|
||||||
type dayTop struct {
|
|
||||||
hours []*hourTop
|
|
||||||
hoursLock sync.RWMutex // writelock this lock ONLY WHEN rotating or intializing hours!
|
|
||||||
|
|
||||||
loaded bool
|
|
||||||
loadedLock sync.Mutex
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *dayTop) init() {
|
|
||||||
d.hoursWriteLock()
|
|
||||||
for i := 0; i < 24; i++ {
|
|
||||||
hour := hourTop{}
|
|
||||||
hour.init()
|
|
||||||
d.hours = append(d.hours, &hour)
|
|
||||||
}
|
|
||||||
d.hoursWriteUnlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *dayTop) rotateHourlyTop() {
|
|
||||||
log.Printf("Rotating hourly top")
|
|
||||||
hour := &hourTop{}
|
|
||||||
hour.init()
|
|
||||||
d.hoursWriteLock()
|
|
||||||
d.hours = append([]*hourTop{hour}, d.hours...)
|
|
||||||
d.hours = d.hours[:24]
|
|
||||||
d.hoursWriteUnlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *dayTop) periodicHourlyTopRotate() {
|
|
||||||
t := time.Hour
|
|
||||||
for range time.Tick(t) {
|
|
||||||
d.rotateHourlyTop()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *hourTop) incrementValue(key string, cache gcache.Cache) error {
|
|
||||||
h.Lock()
|
|
||||||
defer h.Unlock()
|
|
||||||
ivalue, err := cache.Get(key)
|
|
||||||
if err == gcache.KeyNotFoundError {
|
|
||||||
// we just set it and we're done
|
|
||||||
err = cache.Set(key, 1)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("Failed to set hourly top value: %s", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("gcache encountered an error during get: %s", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
cachedValue, ok := ivalue.(int)
|
|
||||||
if !ok {
|
|
||||||
err = fmt.Errorf("SHOULD NOT HAPPEN: gcache has non-int as value: %v", ivalue)
|
|
||||||
log.Println(err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
err = cache.Set(key, cachedValue+1)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("Failed to set hourly top value: %s", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *hourTop) incrementDomains(key string) error {
|
|
||||||
return h.incrementValue(key, h.domains)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *hourTop) incrementBlocked(key string) error {
|
|
||||||
return h.incrementValue(key, h.blocked)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *hourTop) incrementClients(key string) error {
|
|
||||||
return h.incrementValue(key, h.clients)
|
|
||||||
}
|
|
||||||
|
|
||||||
// if does not exist -- return 0
|
|
||||||
func (h *hourTop) lockedGetValue(key string, cache gcache.Cache) (int, error) {
|
|
||||||
ivalue, err := cache.Get(key)
|
|
||||||
if err == gcache.KeyNotFoundError {
|
|
||||||
return 0, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("gcache encountered an error during get: %s", err)
|
|
||||||
return 0, err
|
|
||||||
}
|
|
||||||
|
|
||||||
value, ok := ivalue.(int)
|
|
||||||
if !ok {
|
|
||||||
err := fmt.Errorf("SHOULD NOT HAPPEN: gcache has non-int as value: %v", ivalue)
|
|
||||||
log.Println(err)
|
|
||||||
return 0, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return value, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *hourTop) lockedGetDomains(key string) (int, error) {
|
|
||||||
return h.lockedGetValue(key, h.domains)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *hourTop) lockedGetBlocked(key string) (int, error) {
|
|
||||||
return h.lockedGetValue(key, h.blocked)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *hourTop) lockedGetClients(key string) (int, error) {
|
|
||||||
return h.lockedGetValue(key, h.clients)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *dayTop) addEntry(entry *logEntry, q *dns.Msg, now time.Time) error {
|
|
||||||
// figure out which hour bucket it belongs to
|
|
||||||
hour := int(now.Sub(entry.Time).Hours())
|
|
||||||
if hour >= 24 {
|
|
||||||
log.Printf("t %v is >24 hours ago, ignoring", entry.Time)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// if a DNS query doesn't have questions, do nothing
|
|
||||||
if len(q.Question) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
hostname := strings.ToLower(strings.TrimSuffix(q.Question[0].Name, "."))
|
|
||||||
|
|
||||||
// if question hostname is empty, do nothing
|
|
||||||
if hostname == "" {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// get value, if not set, crate one
|
|
||||||
d.hoursReadLock()
|
|
||||||
defer d.hoursReadUnlock()
|
|
||||||
err := d.hours[hour].incrementDomains(hostname)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("Failed to increment value: %s", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if entry.Result.IsFiltered {
|
|
||||||
err := d.hours[hour].incrementBlocked(hostname)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("Failed to increment value: %s", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(entry.IP) > 0 {
|
|
||||||
err := d.hours[hour].incrementClients(entry.IP)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("Failed to increment value: %s", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *queryLog) fillStatsFromQueryLog(s *stats) error {
|
|
||||||
now := time.Now()
|
|
||||||
l.runningTop.loadedWriteLock()
|
|
||||||
defer l.runningTop.loadedWriteUnlock()
|
|
||||||
if l.runningTop.loaded {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
onEntry := func(entry *logEntry) error {
|
|
||||||
if len(entry.Question) == 0 {
|
|
||||||
log.Printf("entry question is absent, skipping")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if entry.Time.After(now) {
|
|
||||||
log.Printf("t %v vs %v is in the future, ignoring", entry.Time, now)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
q := new(dns.Msg)
|
|
||||||
if err := q.Unpack(entry.Question); err != nil {
|
|
||||||
log.Printf("failed to unpack dns message question: %s", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(q.Question) != 1 {
|
|
||||||
log.Printf("malformed dns message, has no questions, skipping")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
err := l.runningTop.addEntry(entry, q, now)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("Failed to add entry to running top: %s", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
l.queryLogLock.Lock()
|
|
||||||
l.queryLogCache = append(l.queryLogCache, entry)
|
|
||||||
if len(l.queryLogCache) > queryLogSize {
|
|
||||||
toremove := len(l.queryLogCache) - queryLogSize
|
|
||||||
l.queryLogCache = l.queryLogCache[toremove:]
|
|
||||||
}
|
|
||||||
l.queryLogLock.Unlock()
|
|
||||||
|
|
||||||
s.incrementCounters(entry)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
needMore := func() bool { return true }
|
|
||||||
err := l.genericLoader(onEntry, needMore, queryLogTimeLimit)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("Failed to load entries from querylog: %s", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
l.runningTop.loaded = true
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// StatsTop represents top stat charts
|
|
||||||
type StatsTop struct {
|
|
||||||
Domains map[string]int // Domains - top requested domains
|
|
||||||
Blocked map[string]int // Blocked - top blocked domains
|
|
||||||
Clients map[string]int // Clients - top DNS clients
|
|
||||||
}
|
|
||||||
|
|
||||||
// getStatsTop returns the current top stats
|
|
||||||
func (d *dayTop) getStatsTop() *StatsTop {
|
|
||||||
s := &StatsTop{
|
|
||||||
Domains: map[string]int{},
|
|
||||||
Blocked: map[string]int{},
|
|
||||||
Clients: map[string]int{},
|
|
||||||
}
|
|
||||||
|
|
||||||
do := func(keys []interface{}, getter func(key string) (int, error), result map[string]int) {
|
|
||||||
for _, ikey := range keys {
|
|
||||||
key, ok := ikey.(string)
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
value, err := getter(key)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("Failed to get top domains value for %v: %s", key, err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
result[key] += value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
d.hoursReadLock()
|
|
||||||
for hour := 0; hour < 24; hour++ {
|
|
||||||
d.hours[hour].RLock()
|
|
||||||
do(d.hours[hour].domains.Keys(false), d.hours[hour].lockedGetDomains, s.Domains)
|
|
||||||
do(d.hours[hour].blocked.Keys(false), d.hours[hour].lockedGetBlocked, s.Blocked)
|
|
||||||
do(d.hours[hour].clients.Keys(false), d.hours[hour].lockedGetClients, s.Clients)
|
|
||||||
d.hours[hour].RUnlock()
|
|
||||||
}
|
|
||||||
d.hoursReadUnlock()
|
|
||||||
|
|
||||||
return s
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *dayTop) hoursWriteLock() { tracelock(); d.hoursLock.Lock() }
|
|
||||||
func (d *dayTop) hoursWriteUnlock() { tracelock(); d.hoursLock.Unlock() }
|
|
||||||
func (d *dayTop) hoursReadLock() { tracelock(); d.hoursLock.RLock() }
|
|
||||||
func (d *dayTop) hoursReadUnlock() { tracelock(); d.hoursLock.RUnlock() }
|
|
||||||
func (d *dayTop) loadedWriteLock() { tracelock(); d.loadedLock.Lock() }
|
|
||||||
func (d *dayTop) loadedWriteUnlock() { tracelock(); d.loadedLock.Unlock() }
|
|
||||||
|
|
||||||
func (h *hourTop) Lock() { tracelock(); h.mutex.Lock() }
|
|
||||||
func (h *hourTop) RLock() { tracelock(); h.mutex.RLock() }
|
|
||||||
func (h *hourTop) RUnlock() { tracelock(); h.mutex.RUnlock() }
|
|
||||||
func (h *hourTop) Unlock() { tracelock(); h.mutex.Unlock() }
|
|
||||||
|
|
||||||
func tracelock() {
|
|
||||||
if false { // not commented out to make code checked during compilation
|
|
||||||
pc := make([]uintptr, 10) // at least 1 entry needed
|
|
||||||
runtime.Callers(2, pc)
|
|
||||||
f := path.Base(runtime.FuncForPC(pc[1]).Name())
|
|
||||||
lockf := path.Base(runtime.FuncForPC(pc[0]).Name())
|
|
||||||
fmt.Fprintf(os.Stderr, "%s(): %s\n", f, lockf)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,355 +0,0 @@
|
||||||
package dnsforward
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"sync"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/AdguardTeam/AdGuardHome/dnsfilter"
|
|
||||||
)
|
|
||||||
|
|
||||||
// how far back to keep the stats
|
|
||||||
const statsHistoryElements = 60 + 1 // +1 for calculating delta
|
|
||||||
|
|
||||||
// entries for single time period (for example all per-second entries)
|
|
||||||
type statsEntries map[string][statsHistoryElements]float64
|
|
||||||
|
|
||||||
// each periodic stat is a map of arrays
|
|
||||||
type periodicStats struct {
|
|
||||||
entries statsEntries
|
|
||||||
period time.Duration // how long one entry lasts
|
|
||||||
lastRotate time.Time // last time this data was rotated
|
|
||||||
|
|
||||||
sync.RWMutex
|
|
||||||
}
|
|
||||||
|
|
||||||
// stats is the DNS server historical statistics
|
|
||||||
type stats struct {
|
|
||||||
perSecond periodicStats
|
|
||||||
perMinute periodicStats
|
|
||||||
perHour periodicStats
|
|
||||||
perDay periodicStats
|
|
||||||
|
|
||||||
requests *counter // total number of requests
|
|
||||||
filtered *counter // total number of filtered requests
|
|
||||||
filteredLists *counter // total number of requests blocked by filter lists
|
|
||||||
filteredSafebrowsing *counter // total number of requests blocked by safebrowsing
|
|
||||||
filteredParental *counter // total number of requests blocked by the parental control
|
|
||||||
whitelisted *counter // total number of requests whitelisted by filter lists
|
|
||||||
safesearch *counter // total number of requests for which safe search rules were applied
|
|
||||||
errorsTotal *counter // total number of errors
|
|
||||||
elapsedTime *histogram // requests duration histogram
|
|
||||||
}
|
|
||||||
|
|
||||||
// initializes an empty stats structure
|
|
||||||
func newStats() *stats {
|
|
||||||
s := &stats{
|
|
||||||
requests: newDNSCounter("requests_total"),
|
|
||||||
filtered: newDNSCounter("filtered_total"),
|
|
||||||
filteredLists: newDNSCounter("filtered_lists_total"),
|
|
||||||
filteredSafebrowsing: newDNSCounter("filtered_safebrowsing_total"),
|
|
||||||
filteredParental: newDNSCounter("filtered_parental_total"),
|
|
||||||
whitelisted: newDNSCounter("whitelisted_total"),
|
|
||||||
safesearch: newDNSCounter("safesearch_total"),
|
|
||||||
errorsTotal: newDNSCounter("errors_total"),
|
|
||||||
elapsedTime: newDNSHistogram("request_duration"),
|
|
||||||
}
|
|
||||||
|
|
||||||
// Initializes empty per-sec/minute/hour/day stats
|
|
||||||
s.purgeStats()
|
|
||||||
return s
|
|
||||||
}
|
|
||||||
|
|
||||||
func initPeriodicStats(periodic *periodicStats, period time.Duration) {
|
|
||||||
periodic.Lock()
|
|
||||||
periodic.entries = statsEntries{}
|
|
||||||
periodic.lastRotate = time.Now()
|
|
||||||
periodic.period = period
|
|
||||||
periodic.Unlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *stats) purgeStats() {
|
|
||||||
initPeriodicStats(&s.perSecond, time.Second)
|
|
||||||
initPeriodicStats(&s.perMinute, time.Minute)
|
|
||||||
initPeriodicStats(&s.perHour, time.Hour)
|
|
||||||
initPeriodicStats(&s.perDay, time.Hour*24)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *periodicStats) Inc(name string, when time.Time) {
|
|
||||||
// calculate how many periods ago this happened
|
|
||||||
elapsed := int64(time.Since(when) / p.period)
|
|
||||||
// log.Tracef("%s: %v as %v -> [%v]", name, time.Since(when), p.period, elapsed)
|
|
||||||
if elapsed >= statsHistoryElements {
|
|
||||||
return // outside of our timeframe
|
|
||||||
}
|
|
||||||
p.Lock()
|
|
||||||
currentValues := p.entries[name]
|
|
||||||
currentValues[elapsed]++
|
|
||||||
p.entries[name] = currentValues
|
|
||||||
p.Unlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *periodicStats) Observe(name string, when time.Time, value float64) {
|
|
||||||
// calculate how many periods ago this happened
|
|
||||||
elapsed := int64(time.Since(when) / p.period)
|
|
||||||
// log.Tracef("%s: %v as %v -> [%v]", name, time.Since(when), p.period, elapsed)
|
|
||||||
if elapsed >= statsHistoryElements {
|
|
||||||
return // outside of our timeframe
|
|
||||||
}
|
|
||||||
p.Lock()
|
|
||||||
{
|
|
||||||
countname := name + "_count"
|
|
||||||
currentValues := p.entries[countname]
|
|
||||||
v := currentValues[elapsed]
|
|
||||||
// log.Tracef("Will change p.entries[%s][%d] from %v to %v", countname, elapsed, value, value+1)
|
|
||||||
v++
|
|
||||||
currentValues[elapsed] = v
|
|
||||||
p.entries[countname] = currentValues
|
|
||||||
}
|
|
||||||
{
|
|
||||||
totalname := name + "_sum"
|
|
||||||
currentValues := p.entries[totalname]
|
|
||||||
currentValues[elapsed] += value
|
|
||||||
p.entries[totalname] = currentValues
|
|
||||||
}
|
|
||||||
p.Unlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *periodicStats) statsRotate(now time.Time) {
|
|
||||||
p.Lock()
|
|
||||||
rotations := int64(now.Sub(p.lastRotate) / p.period)
|
|
||||||
if rotations > statsHistoryElements {
|
|
||||||
rotations = statsHistoryElements
|
|
||||||
}
|
|
||||||
// calculate how many times we should rotate
|
|
||||||
for r := int64(0); r < rotations; r++ {
|
|
||||||
for key, values := range p.entries {
|
|
||||||
newValues := [statsHistoryElements]float64{}
|
|
||||||
for i := 1; i < len(values); i++ {
|
|
||||||
newValues[i] = values[i-1]
|
|
||||||
}
|
|
||||||
p.entries[key] = newValues
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if rotations > 0 {
|
|
||||||
p.lastRotate = now
|
|
||||||
}
|
|
||||||
p.Unlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *stats) statsRotator() {
|
|
||||||
for range time.Tick(time.Second) {
|
|
||||||
now := time.Now()
|
|
||||||
s.perSecond.statsRotate(now)
|
|
||||||
s.perMinute.statsRotate(now)
|
|
||||||
s.perHour.statsRotate(now)
|
|
||||||
s.perDay.statsRotate(now)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// counter that wraps around prometheus Counter but also adds to periodic stats
|
|
||||||
type counter struct {
|
|
||||||
name string // used as key in periodic stats
|
|
||||||
value int64
|
|
||||||
|
|
||||||
sync.Mutex
|
|
||||||
}
|
|
||||||
|
|
||||||
func newDNSCounter(name string) *counter {
|
|
||||||
// log.Tracef("called")
|
|
||||||
return &counter{
|
|
||||||
name: name,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *stats) incWithTime(c *counter, when time.Time) {
|
|
||||||
s.perSecond.Inc(c.name, when)
|
|
||||||
s.perMinute.Inc(c.name, when)
|
|
||||||
s.perHour.Inc(c.name, when)
|
|
||||||
s.perDay.Inc(c.name, when)
|
|
||||||
c.Lock()
|
|
||||||
c.value++
|
|
||||||
c.Unlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
type histogram struct {
|
|
||||||
name string // used as key in periodic stats
|
|
||||||
count int64
|
|
||||||
total float64
|
|
||||||
|
|
||||||
sync.Mutex
|
|
||||||
}
|
|
||||||
|
|
||||||
func newDNSHistogram(name string) *histogram {
|
|
||||||
return &histogram{
|
|
||||||
name: name,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *stats) observeWithTime(h *histogram, value float64, when time.Time) {
|
|
||||||
s.perSecond.Observe(h.name, when, value)
|
|
||||||
s.perMinute.Observe(h.name, when, value)
|
|
||||||
s.perHour.Observe(h.name, when, value)
|
|
||||||
s.perDay.Observe(h.name, when, value)
|
|
||||||
h.Lock()
|
|
||||||
h.count++
|
|
||||||
h.total += value
|
|
||||||
h.Unlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
// -----
|
|
||||||
// stats
|
|
||||||
// -----
|
|
||||||
func (s *stats) incrementCounters(entry *logEntry) {
|
|
||||||
s.incWithTime(s.requests, entry.Time)
|
|
||||||
if entry.Result.IsFiltered {
|
|
||||||
s.incWithTime(s.filtered, entry.Time)
|
|
||||||
}
|
|
||||||
|
|
||||||
switch entry.Result.Reason {
|
|
||||||
case dnsfilter.NotFilteredWhiteList:
|
|
||||||
s.incWithTime(s.whitelisted, entry.Time)
|
|
||||||
case dnsfilter.NotFilteredError:
|
|
||||||
s.incWithTime(s.errorsTotal, entry.Time)
|
|
||||||
case dnsfilter.FilteredBlackList:
|
|
||||||
s.incWithTime(s.filteredLists, entry.Time)
|
|
||||||
case dnsfilter.FilteredSafeBrowsing:
|
|
||||||
s.incWithTime(s.filteredSafebrowsing, entry.Time)
|
|
||||||
case dnsfilter.FilteredParental:
|
|
||||||
s.incWithTime(s.filteredParental, entry.Time)
|
|
||||||
case dnsfilter.FilteredInvalid:
|
|
||||||
// do nothing
|
|
||||||
case dnsfilter.FilteredSafeSearch:
|
|
||||||
s.incWithTime(s.safesearch, entry.Time)
|
|
||||||
}
|
|
||||||
s.observeWithTime(s.elapsedTime, entry.Elapsed.Seconds(), entry.Time)
|
|
||||||
}
|
|
||||||
|
|
||||||
// getAggregatedStats returns aggregated stats data for the 24 hours
|
|
||||||
func (s *stats) getAggregatedStats() map[string]interface{} {
|
|
||||||
const numHours = 24
|
|
||||||
historical := s.generateMapFromStats(&s.perHour, 0, numHours)
|
|
||||||
// sum them up
|
|
||||||
summed := map[string]interface{}{}
|
|
||||||
for key, values := range historical {
|
|
||||||
summedValue := 0.0
|
|
||||||
floats, ok := values.([]float64)
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
for _, v := range floats {
|
|
||||||
summedValue += v
|
|
||||||
}
|
|
||||||
summed[key] = summedValue
|
|
||||||
}
|
|
||||||
// don't forget to divide by number of elements in returned slice
|
|
||||||
if val, ok := summed["avg_processing_time"]; ok {
|
|
||||||
if flval, flok := val.(float64); flok {
|
|
||||||
flval /= numHours
|
|
||||||
summed["avg_processing_time"] = flval
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
summed["stats_period"] = "24 hours"
|
|
||||||
return summed
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *stats) generateMapFromStats(stats *periodicStats, start int, end int) map[string]interface{} {
|
|
||||||
stats.RLock()
|
|
||||||
defer stats.RUnlock()
|
|
||||||
|
|
||||||
// clamp
|
|
||||||
start = clamp(start, 0, statsHistoryElements)
|
|
||||||
end = clamp(end, 0, statsHistoryElements)
|
|
||||||
|
|
||||||
avgProcessingTime := make([]float64, 0)
|
|
||||||
|
|
||||||
count := getReversedSlice(stats.entries[s.elapsedTime.name+"_count"], start, end)
|
|
||||||
sum := getReversedSlice(stats.entries[s.elapsedTime.name+"_sum"], start, end)
|
|
||||||
for i := 0; i < len(count); i++ {
|
|
||||||
var avg float64
|
|
||||||
if count[i] != 0 {
|
|
||||||
avg = sum[i] / count[i]
|
|
||||||
avg *= 1000
|
|
||||||
}
|
|
||||||
avgProcessingTime = append(avgProcessingTime, avg)
|
|
||||||
}
|
|
||||||
|
|
||||||
result := map[string]interface{}{
|
|
||||||
"dns_queries": getReversedSlice(stats.entries[s.requests.name], start, end),
|
|
||||||
"blocked_filtering": getReversedSlice(stats.entries[s.filtered.name], start, end),
|
|
||||||
"replaced_safebrowsing": getReversedSlice(stats.entries[s.filteredSafebrowsing.name], start, end),
|
|
||||||
"replaced_safesearch": getReversedSlice(stats.entries[s.safesearch.name], start, end),
|
|
||||||
"replaced_parental": getReversedSlice(stats.entries[s.filteredParental.name], start, end),
|
|
||||||
"avg_processing_time": avgProcessingTime,
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// getStatsHistory gets stats history aggregated by the specified time unit
|
|
||||||
// timeUnit is either time.Second, time.Minute, time.Hour, or 24*time.Hour
|
|
||||||
// start is start of the time range
|
|
||||||
// end is end of the time range
|
|
||||||
// returns nil if time unit is not supported
|
|
||||||
func (s *stats) getStatsHistory(timeUnit time.Duration, startTime time.Time, endTime time.Time) (map[string]interface{}, error) {
|
|
||||||
var stats *periodicStats
|
|
||||||
|
|
||||||
switch timeUnit {
|
|
||||||
case time.Second:
|
|
||||||
stats = &s.perSecond
|
|
||||||
case time.Minute:
|
|
||||||
stats = &s.perMinute
|
|
||||||
case time.Hour:
|
|
||||||
stats = &s.perHour
|
|
||||||
case 24 * time.Hour:
|
|
||||||
stats = &s.perDay
|
|
||||||
}
|
|
||||||
|
|
||||||
if stats == nil {
|
|
||||||
return nil, fmt.Errorf("unsupported time unit: %v", timeUnit)
|
|
||||||
}
|
|
||||||
|
|
||||||
now := time.Now()
|
|
||||||
|
|
||||||
// check if start and time times are within supported time range
|
|
||||||
timeRange := timeUnit * statsHistoryElements
|
|
||||||
if startTime.Add(timeRange).Before(now) {
|
|
||||||
return nil, fmt.Errorf("start_time parameter is outside of supported range: %s", startTime.String())
|
|
||||||
}
|
|
||||||
if endTime.Add(timeRange).Before(now) {
|
|
||||||
return nil, fmt.Errorf("end_time parameter is outside of supported range: %s", startTime.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
// calculate start and end of our array
|
|
||||||
// basically it's how many hours/minutes/etc have passed since now
|
|
||||||
start := int(now.Sub(endTime) / timeUnit)
|
|
||||||
end := int(now.Sub(startTime) / timeUnit)
|
|
||||||
|
|
||||||
// swap them around if they're inverted
|
|
||||||
if start > end {
|
|
||||||
start, end = end, start
|
|
||||||
}
|
|
||||||
|
|
||||||
return s.generateMapFromStats(stats, start, end), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func clamp(value, low, high int) int {
|
|
||||||
if value < low {
|
|
||||||
return low
|
|
||||||
}
|
|
||||||
if value > high {
|
|
||||||
return high
|
|
||||||
}
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
|
|
||||||
// --------------------------
|
|
||||||
// helper functions for stats
|
|
||||||
// --------------------------
|
|
||||||
func getReversedSlice(input [statsHistoryElements]float64, start int, end int) []float64 {
|
|
||||||
output := make([]float64, 0)
|
|
||||||
for i := start; i <= end; i++ {
|
|
||||||
output = append([]float64{input[i]}, output...)
|
|
||||||
}
|
|
||||||
return output
|
|
||||||
}
|
|
4
go.mod
4
go.mod
|
@ -7,7 +7,9 @@ require (
|
||||||
github.com/AdguardTeam/golibs v0.2.1
|
github.com/AdguardTeam/golibs v0.2.1
|
||||||
github.com/AdguardTeam/urlfilter v0.5.0
|
github.com/AdguardTeam/urlfilter v0.5.0
|
||||||
github.com/NYTimes/gziphandler v1.1.1
|
github.com/NYTimes/gziphandler v1.1.1
|
||||||
|
github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf
|
||||||
github.com/bluele/gcache v0.0.0-20190518031135-bc40bd653833
|
github.com/bluele/gcache v0.0.0-20190518031135-bc40bd653833
|
||||||
|
github.com/etcd-io/bbolt v1.3.3
|
||||||
github.com/go-test/deep v1.0.1
|
github.com/go-test/deep v1.0.1
|
||||||
github.com/gobuffalo/packr v1.19.0
|
github.com/gobuffalo/packr v1.19.0
|
||||||
github.com/joomcode/errorx v0.8.0
|
github.com/joomcode/errorx v0.8.0
|
||||||
|
@ -17,8 +19,8 @@ require (
|
||||||
github.com/miekg/dns v1.1.8
|
github.com/miekg/dns v1.1.8
|
||||||
github.com/sparrc/go-ping v0.0.0-20181106165434-ef3ab45e41b0
|
github.com/sparrc/go-ping v0.0.0-20181106165434-ef3ab45e41b0
|
||||||
github.com/stretchr/testify v1.4.0
|
github.com/stretchr/testify v1.4.0
|
||||||
|
go.etcd.io/bbolt v1.3.3 // indirect
|
||||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859
|
||||||
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0
|
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0
|
||||||
gopkg.in/asaskevich/govalidator.v4 v4.0.0-20160518190739-766470278477
|
|
||||||
gopkg.in/yaml.v2 v2.2.2
|
gopkg.in/yaml.v2 v2.2.2
|
||||||
)
|
)
|
||||||
|
|
4
go.sum
4
go.sum
|
@ -28,6 +28,7 @@ github.com/bluele/gcache v0.0.0-20190518031135-bc40bd653833/go.mod h1:8c4/i2Vlov
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/etcd-io/bbolt v1.3.3/go.mod h1:ZF2nL25h33cCyBtcyWeZ2/I3HQOfTP+0PIEvHjkjCrw=
|
||||||
github.com/go-ole/go-ole v1.2.4 h1:nNBDSCOigTSiarFpYE9J/KtEA1IOW4CNeqT9TQDqCxI=
|
github.com/go-ole/go-ole v1.2.4 h1:nNBDSCOigTSiarFpYE9J/KtEA1IOW4CNeqT9TQDqCxI=
|
||||||
github.com/go-ole/go-ole v1.2.4/go.mod h1:XCwSNxSkXRo4vlyPy93sltvi/qJq0jqQhjqQNIwKuxM=
|
github.com/go-ole/go-ole v1.2.4/go.mod h1:XCwSNxSkXRo4vlyPy93sltvi/qJq0jqQhjqQNIwKuxM=
|
||||||
github.com/go-test/deep v1.0.1 h1:UQhStjbkDClarlmv0am7OXXO4/GaPdCGiUiMTvi28sg=
|
github.com/go-test/deep v1.0.1 h1:UQhStjbkDClarlmv0am7OXXO4/GaPdCGiUiMTvi28sg=
|
||||||
|
@ -80,6 +81,7 @@ github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0
|
||||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
|
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
|
||||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||||
|
go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
|
||||||
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9 h1:mKdxBk7AujPs8kU4m80U72y/zjbZ3UcXC7dClwKbUI0=
|
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9 h1:mKdxBk7AujPs8kU4m80U72y/zjbZ3UcXC7dClwKbUI0=
|
||||||
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2 h1:VklqNMn3ovrHsnt90PveolxSbWFaJdECFbxSq0Mqo2M=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2 h1:VklqNMn3ovrHsnt90PveolxSbWFaJdECFbxSq0Mqo2M=
|
||||||
|
@ -112,8 +114,6 @@ golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
|
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
|
||||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
gopkg.in/asaskevich/govalidator.v4 v4.0.0-20160518190739-766470278477 h1:5xUJw+lg4zao9W4HIDzlFbMYgSgtvNVHh00MEHvbGpQ=
|
|
||||||
gopkg.in/asaskevich/govalidator.v4 v4.0.0-20160518190739-766470278477/go.mod h1:QDV1vrFSrowdoOba0UM8VJPUZONT7dnfdLsM+GG53Z8=
|
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
|
|
@ -12,6 +12,7 @@ import (
|
||||||
"github.com/AdguardTeam/AdGuardHome/dhcpd"
|
"github.com/AdguardTeam/AdGuardHome/dhcpd"
|
||||||
"github.com/AdguardTeam/AdGuardHome/dnsfilter"
|
"github.com/AdguardTeam/AdGuardHome/dnsfilter"
|
||||||
"github.com/AdguardTeam/AdGuardHome/dnsforward"
|
"github.com/AdguardTeam/AdGuardHome/dnsforward"
|
||||||
|
"github.com/AdguardTeam/AdGuardHome/stats"
|
||||||
"github.com/AdguardTeam/golibs/file"
|
"github.com/AdguardTeam/golibs/file"
|
||||||
"github.com/AdguardTeam/golibs/log"
|
"github.com/AdguardTeam/golibs/log"
|
||||||
yaml "gopkg.in/yaml.v2"
|
yaml "gopkg.in/yaml.v2"
|
||||||
|
@ -68,6 +69,7 @@ type configuration struct {
|
||||||
controlLock sync.Mutex
|
controlLock sync.Mutex
|
||||||
transport *http.Transport
|
transport *http.Transport
|
||||||
client *http.Client
|
client *http.Client
|
||||||
|
stats stats.Stats
|
||||||
|
|
||||||
// cached version.json to avoid hammering github.io for each page reload
|
// cached version.json to avoid hammering github.io for each page reload
|
||||||
versionCheckJSON []byte
|
versionCheckJSON []byte
|
||||||
|
@ -107,6 +109,9 @@ type dnsConfig struct {
|
||||||
BindHost string `yaml:"bind_host"`
|
BindHost string `yaml:"bind_host"`
|
||||||
Port int `yaml:"port"`
|
Port int `yaml:"port"`
|
||||||
|
|
||||||
|
// time interval for statistics (in days)
|
||||||
|
StatsInterval uint `yaml:"statistics_interval"`
|
||||||
|
|
||||||
dnsforward.FilteringConfig `yaml:",inline"`
|
dnsforward.FilteringConfig `yaml:",inline"`
|
||||||
|
|
||||||
UpstreamDNS []string `yaml:"upstream_dns"`
|
UpstreamDNS []string `yaml:"upstream_dns"`
|
||||||
|
@ -163,6 +168,7 @@ var config = configuration{
|
||||||
DNS: dnsConfig{
|
DNS: dnsConfig{
|
||||||
BindHost: "0.0.0.0",
|
BindHost: "0.0.0.0",
|
||||||
Port: 53,
|
Port: 53,
|
||||||
|
StatsInterval: 1,
|
||||||
FilteringConfig: dnsforward.FilteringConfig{
|
FilteringConfig: dnsforward.FilteringConfig{
|
||||||
ProtectionEnabled: true, // whether or not use any of dnsfilter features
|
ProtectionEnabled: true, // whether or not use any of dnsfilter features
|
||||||
FilteringEnabled: true, // whether or not use filter lists
|
FilteringEnabled: true, // whether or not use filter lists
|
||||||
|
@ -264,6 +270,10 @@ func parseConfig() error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if !checkStatsInterval(config.DNS.StatsInterval) {
|
||||||
|
config.DNS.StatsInterval = 1
|
||||||
|
}
|
||||||
|
|
||||||
for _, cy := range config.Clients {
|
for _, cy := range config.Clients {
|
||||||
cli := Client{
|
cli := Client{
|
||||||
Name: cy.Name,
|
Name: cy.Name,
|
||||||
|
|
150
home/control.go
150
home/control.go
|
@ -1,12 +1,10 @@
|
||||||
package home
|
package home
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net"
|
"net"
|
||||||
"net/http"
|
"net/http"
|
||||||
"sort"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
@ -177,149 +175,6 @@ func handleQueryLog(w http.ResponseWriter, r *http.Request) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func handleStatsTop(w http.ResponseWriter, r *http.Request) {
|
|
||||||
s := config.dnsServer.GetStatsTop()
|
|
||||||
|
|
||||||
// use manual json marshalling because we want maps to be sorted by value
|
|
||||||
statsJSON := bytes.Buffer{}
|
|
||||||
statsJSON.WriteString("{\n")
|
|
||||||
|
|
||||||
gen := func(json *bytes.Buffer, name string, top map[string]int, addComma bool) {
|
|
||||||
json.WriteString(" ")
|
|
||||||
json.WriteString(fmt.Sprintf("%q", name))
|
|
||||||
json.WriteString(": {\n")
|
|
||||||
sorted := sortByValue(top)
|
|
||||||
// no more than 50 entries
|
|
||||||
if len(sorted) > 50 {
|
|
||||||
sorted = sorted[:50]
|
|
||||||
}
|
|
||||||
for i, key := range sorted {
|
|
||||||
json.WriteString(" ")
|
|
||||||
json.WriteString(fmt.Sprintf("%q", key))
|
|
||||||
json.WriteString(": ")
|
|
||||||
json.WriteString(strconv.Itoa(top[key]))
|
|
||||||
if i+1 != len(sorted) {
|
|
||||||
json.WriteByte(',')
|
|
||||||
}
|
|
||||||
json.WriteByte('\n')
|
|
||||||
}
|
|
||||||
json.WriteString(" }")
|
|
||||||
if addComma {
|
|
||||||
json.WriteByte(',')
|
|
||||||
}
|
|
||||||
json.WriteByte('\n')
|
|
||||||
}
|
|
||||||
gen(&statsJSON, "top_queried_domains", s.Domains, true)
|
|
||||||
gen(&statsJSON, "top_blocked_domains", s.Blocked, true)
|
|
||||||
gen(&statsJSON, "top_clients", s.Clients, true)
|
|
||||||
statsJSON.WriteString(" \"stats_period\": \"24 hours\"\n")
|
|
||||||
statsJSON.WriteString("}\n")
|
|
||||||
|
|
||||||
w.Header().Set("Content-Type", "application/json")
|
|
||||||
_, err := w.Write(statsJSON.Bytes())
|
|
||||||
if err != nil {
|
|
||||||
httpError(w, http.StatusInternalServerError, "Couldn't write body: %s", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// handleStatsReset resets the stats caches
|
|
||||||
func handleStatsReset(w http.ResponseWriter, r *http.Request) {
|
|
||||||
config.dnsServer.PurgeStats()
|
|
||||||
_, err := fmt.Fprintf(w, "OK\n")
|
|
||||||
if err != nil {
|
|
||||||
httpError(w, http.StatusInternalServerError, "Couldn't write body: %s", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// handleStats returns aggregated stats data for the 24 hours
|
|
||||||
func handleStats(w http.ResponseWriter, r *http.Request) {
|
|
||||||
summed := config.dnsServer.GetAggregatedStats()
|
|
||||||
|
|
||||||
statsJSON, err := json.Marshal(summed)
|
|
||||||
if err != nil {
|
|
||||||
httpError(w, http.StatusInternalServerError, "Unable to marshal status json: %s", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
w.Header().Set("Content-Type", "application/json")
|
|
||||||
_, err = w.Write(statsJSON)
|
|
||||||
if err != nil {
|
|
||||||
httpError(w, http.StatusInternalServerError, "Unable to write response json: %s", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// HandleStatsHistory returns historical stats data for the 24 hours
|
|
||||||
func handleStatsHistory(w http.ResponseWriter, r *http.Request) {
|
|
||||||
// handle time unit and prepare our time window size
|
|
||||||
timeUnitString := r.URL.Query().Get("time_unit")
|
|
||||||
var timeUnit time.Duration
|
|
||||||
switch timeUnitString {
|
|
||||||
case "seconds":
|
|
||||||
timeUnit = time.Second
|
|
||||||
case "minutes":
|
|
||||||
timeUnit = time.Minute
|
|
||||||
case "hours":
|
|
||||||
timeUnit = time.Hour
|
|
||||||
case "days":
|
|
||||||
timeUnit = time.Hour * 24
|
|
||||||
default:
|
|
||||||
http.Error(w, "Must specify valid time_unit parameter", http.StatusBadRequest)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// parse start and end time
|
|
||||||
startTime, err := time.Parse(time.RFC3339, r.URL.Query().Get("start_time"))
|
|
||||||
if err != nil {
|
|
||||||
httpError(w, http.StatusBadRequest, "Must specify valid start_time parameter: %s", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
endTime, err := time.Parse(time.RFC3339, r.URL.Query().Get("end_time"))
|
|
||||||
if err != nil {
|
|
||||||
httpError(w, http.StatusBadRequest, "Must specify valid end_time parameter: %s", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
data, err := config.dnsServer.GetStatsHistory(timeUnit, startTime, endTime)
|
|
||||||
if err != nil {
|
|
||||||
httpError(w, http.StatusBadRequest, "Cannot get stats history: %s", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
statsJSON, err := json.Marshal(data)
|
|
||||||
if err != nil {
|
|
||||||
httpError(w, http.StatusInternalServerError, "Unable to marshal status json: %s", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
w.Header().Set("Content-Type", "application/json")
|
|
||||||
_, err = w.Write(statsJSON)
|
|
||||||
if err != nil {
|
|
||||||
httpError(w, http.StatusInternalServerError, "Unable to write response json: %s", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// sortByValue is a helper function for querylog API
|
|
||||||
func sortByValue(m map[string]int) []string {
|
|
||||||
type kv struct {
|
|
||||||
k string
|
|
||||||
v int
|
|
||||||
}
|
|
||||||
var ss []kv
|
|
||||||
for k, v := range m {
|
|
||||||
ss = append(ss, kv{k, v})
|
|
||||||
}
|
|
||||||
sort.Slice(ss, func(l, r int) bool {
|
|
||||||
return ss[l].v > ss[r].v
|
|
||||||
})
|
|
||||||
|
|
||||||
sorted := []string{}
|
|
||||||
for _, v := range ss {
|
|
||||||
sorted = append(sorted, v.k)
|
|
||||||
}
|
|
||||||
return sorted
|
|
||||||
}
|
|
||||||
|
|
||||||
// -----------------------
|
// -----------------------
|
||||||
// upstreams configuration
|
// upstreams configuration
|
||||||
// -----------------------
|
// -----------------------
|
||||||
|
@ -722,10 +577,6 @@ func registerControlHandlers() {
|
||||||
httpRegister(http.MethodPost, "/control/test_upstream_dns", handleTestUpstreamDNS)
|
httpRegister(http.MethodPost, "/control/test_upstream_dns", handleTestUpstreamDNS)
|
||||||
httpRegister(http.MethodPost, "/control/i18n/change_language", handleI18nChangeLanguage)
|
httpRegister(http.MethodPost, "/control/i18n/change_language", handleI18nChangeLanguage)
|
||||||
httpRegister(http.MethodGet, "/control/i18n/current_language", handleI18nCurrentLanguage)
|
httpRegister(http.MethodGet, "/control/i18n/current_language", handleI18nCurrentLanguage)
|
||||||
httpRegister(http.MethodGet, "/control/stats_top", handleStatsTop)
|
|
||||||
httpRegister(http.MethodGet, "/control/stats", handleStats)
|
|
||||||
httpRegister(http.MethodGet, "/control/stats_history", handleStatsHistory)
|
|
||||||
httpRegister(http.MethodPost, "/control/stats_reset", handleStatsReset)
|
|
||||||
http.HandleFunc("/control/version.json", postInstall(optionalAuth(handleGetVersionJSON)))
|
http.HandleFunc("/control/version.json", postInstall(optionalAuth(handleGetVersionJSON)))
|
||||||
httpRegister(http.MethodPost, "/control/update", handleUpdate)
|
httpRegister(http.MethodPost, "/control/update", handleUpdate)
|
||||||
httpRegister(http.MethodPost, "/control/filtering/enable", handleFilteringEnable)
|
httpRegister(http.MethodPost, "/control/filtering/enable", handleFilteringEnable)
|
||||||
|
@ -760,6 +611,7 @@ func registerControlHandlers() {
|
||||||
RegisterClientsHandlers()
|
RegisterClientsHandlers()
|
||||||
registerRewritesHandlers()
|
registerRewritesHandlers()
|
||||||
RegisterBlockedServicesHandlers()
|
RegisterBlockedServicesHandlers()
|
||||||
|
RegisterStatsHandlers()
|
||||||
|
|
||||||
http.HandleFunc("/dns-query", postInstall(handleDOH))
|
http.HandleFunc("/dns-query", postInstall(handleDOH))
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,92 @@
|
||||||
|
package home
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/AdguardTeam/AdGuardHome/stats"
|
||||||
|
"github.com/AdguardTeam/golibs/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
type statsConfig struct {
|
||||||
|
Interval uint `json:"interval"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get stats configuration
|
||||||
|
func handleStatsInfo(w http.ResponseWriter, r *http.Request) {
|
||||||
|
resp := statsConfig{}
|
||||||
|
resp.Interval = config.DNS.StatsInterval
|
||||||
|
|
||||||
|
jsonVal, err := json.Marshal(resp)
|
||||||
|
if err != nil {
|
||||||
|
httpError(w, http.StatusInternalServerError, "json encode: %s", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
_, err = w.Write(jsonVal)
|
||||||
|
if err != nil {
|
||||||
|
httpError(w, http.StatusInternalServerError, "http write: %s", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set stats configuration
|
||||||
|
func handleStatsConfig(w http.ResponseWriter, r *http.Request) {
|
||||||
|
reqData := statsConfig{}
|
||||||
|
err := json.NewDecoder(r.Body).Decode(&reqData)
|
||||||
|
if err != nil {
|
||||||
|
httpError(w, http.StatusBadRequest, "json decode: %s", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if !checkStatsInterval(reqData.Interval) {
|
||||||
|
httpError(w, http.StatusBadRequest, "Unsupported interval")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
config.DNS.StatsInterval = reqData.Interval
|
||||||
|
config.stats.Configure(int(config.DNS.StatsInterval))
|
||||||
|
|
||||||
|
returnOK(w)
|
||||||
|
}
|
||||||
|
|
||||||
|
// handleStats returns aggregated stats data
|
||||||
|
func handleStats(w http.ResponseWriter, r *http.Request) {
|
||||||
|
units := stats.Hours
|
||||||
|
if config.DNS.StatsInterval > 7 {
|
||||||
|
units = stats.Days
|
||||||
|
}
|
||||||
|
counter := log.StartTimer()
|
||||||
|
d := config.stats.GetData(units)
|
||||||
|
counter.LogElapsed("Stats: prepared data")
|
||||||
|
|
||||||
|
if d == nil {
|
||||||
|
httpError(w, http.StatusInternalServerError, "Couldn't get statistics data")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
data, err := json.Marshal(d)
|
||||||
|
if err != nil {
|
||||||
|
httpError(w, http.StatusInternalServerError, "json encode: %s", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
w.Write(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
// handleStatsReset resets the stats
|
||||||
|
func handleStatsReset(w http.ResponseWriter, r *http.Request) {
|
||||||
|
config.stats.Clear()
|
||||||
|
returnOK(w)
|
||||||
|
}
|
||||||
|
|
||||||
|
// RegisterStatsHandlers - register handlers
|
||||||
|
func RegisterStatsHandlers() {
|
||||||
|
httpRegister(http.MethodGet, "/control/stats", handleStats)
|
||||||
|
httpRegister(http.MethodPost, "/control/stats_reset", handleStatsReset)
|
||||||
|
httpRegister(http.MethodPost, "/control/stats_config", handleStatsConfig)
|
||||||
|
httpRegister(http.MethodGet, "/control/stats_info", handleStatsInfo)
|
||||||
|
}
|
||||||
|
|
||||||
|
func checkStatsInterval(i uint) bool {
|
||||||
|
return i == 1 || i == 7 || i == 30 || i == 90
|
||||||
|
}
|
14
home/dns.go
14
home/dns.go
|
@ -8,6 +8,7 @@ import (
|
||||||
|
|
||||||
"github.com/AdguardTeam/AdGuardHome/dnsfilter"
|
"github.com/AdguardTeam/AdGuardHome/dnsfilter"
|
||||||
"github.com/AdguardTeam/AdGuardHome/dnsforward"
|
"github.com/AdguardTeam/AdGuardHome/dnsforward"
|
||||||
|
"github.com/AdguardTeam/AdGuardHome/stats"
|
||||||
"github.com/AdguardTeam/dnsproxy/proxy"
|
"github.com/AdguardTeam/dnsproxy/proxy"
|
||||||
"github.com/AdguardTeam/dnsproxy/upstream"
|
"github.com/AdguardTeam/dnsproxy/upstream"
|
||||||
"github.com/AdguardTeam/golibs/log"
|
"github.com/AdguardTeam/golibs/log"
|
||||||
|
@ -33,7 +34,11 @@ func initDNSServer(baseDir string) {
|
||||||
log.Fatalf("Cannot create DNS data dir at %s: %s", baseDir, err)
|
log.Fatalf("Cannot create DNS data dir at %s: %s", baseDir, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
config.dnsServer = dnsforward.NewServer(baseDir)
|
config.stats = stats.New("./data/stats.db", int(config.DNS.StatsInterval), nil)
|
||||||
|
if config.stats == nil {
|
||||||
|
log.Fatal("config.stats == nil")
|
||||||
|
}
|
||||||
|
config.dnsServer = dnsforward.NewServer(baseDir, config.stats)
|
||||||
|
|
||||||
initRDNS()
|
initRDNS()
|
||||||
}
|
}
|
||||||
|
@ -152,11 +157,6 @@ func startDNSServer() error {
|
||||||
return errorx.Decorate(err, "Couldn't start forwarding DNS server")
|
return errorx.Decorate(err, "Couldn't start forwarding DNS server")
|
||||||
}
|
}
|
||||||
|
|
||||||
top := config.dnsServer.GetStatsTop()
|
|
||||||
for k := range top.Clients {
|
|
||||||
beginAsyncRDNS(k)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -183,5 +183,7 @@ func stopDNSServer() error {
|
||||||
return errorx.Decorate(err, "Couldn't stop forwarding DNS server")
|
return errorx.Decorate(err, "Couldn't stop forwarding DNS server")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
config.stats.Close()
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -214,18 +214,6 @@ paths:
|
||||||
# General statistics methods
|
# General statistics methods
|
||||||
# --------------------------------------------------
|
# --------------------------------------------------
|
||||||
|
|
||||||
/stats_top:
|
|
||||||
get:
|
|
||||||
tags:
|
|
||||||
- stats
|
|
||||||
operationId: statusTop
|
|
||||||
summary: 'Get DNS server top client, domain and blocked statistics'
|
|
||||||
responses:
|
|
||||||
200:
|
|
||||||
description: OK
|
|
||||||
schema:
|
|
||||||
$ref: "#/definitions/StatsTop"
|
|
||||||
|
|
||||||
/stats:
|
/stats:
|
||||||
get:
|
get:
|
||||||
tags:
|
tags:
|
||||||
|
@ -234,46 +222,10 @@ paths:
|
||||||
summary: 'Get DNS server statistics'
|
summary: 'Get DNS server statistics'
|
||||||
responses:
|
responses:
|
||||||
200:
|
200:
|
||||||
description: 'Returns general statistics for the last 24 hours'
|
description: 'Returns statistics data'
|
||||||
schema:
|
schema:
|
||||||
$ref: "#/definitions/Stats"
|
$ref: "#/definitions/Stats"
|
||||||
|
|
||||||
/stats_history:
|
|
||||||
get:
|
|
||||||
tags:
|
|
||||||
- stats
|
|
||||||
operationId: stats_history
|
|
||||||
summary: 'Get historical DNS server statistics for the last 24 hours'
|
|
||||||
parameters:
|
|
||||||
-
|
|
||||||
name: start_time
|
|
||||||
in: query
|
|
||||||
type: string
|
|
||||||
description: 'Start time in ISO8601 (example: `2018-05-04T17:55:33+00:00`)'
|
|
||||||
required: true
|
|
||||||
-
|
|
||||||
name: end_time
|
|
||||||
in: query
|
|
||||||
type: string
|
|
||||||
description: 'End time in ISO8601 (example: `2018-05-04T17:55:33+00:00`)'
|
|
||||||
required: true
|
|
||||||
-
|
|
||||||
name: time_unit
|
|
||||||
in: query
|
|
||||||
type: string
|
|
||||||
description: 'Time unit (`minutes` or `hours`)'
|
|
||||||
required: true
|
|
||||||
enum:
|
|
||||||
- minutes
|
|
||||||
- hours
|
|
||||||
responses:
|
|
||||||
501:
|
|
||||||
description: 'Requested time window is outside of supported range. It will be supported later, but not now.'
|
|
||||||
200:
|
|
||||||
description: 'Returns historical stats for the specified time interval.'
|
|
||||||
schema:
|
|
||||||
$ref: '#/definitions/StatsHistory'
|
|
||||||
|
|
||||||
/stats_reset:
|
/stats_reset:
|
||||||
post:
|
post:
|
||||||
tags:
|
tags:
|
||||||
|
@ -284,6 +236,34 @@ paths:
|
||||||
200:
|
200:
|
||||||
description: OK
|
description: OK
|
||||||
|
|
||||||
|
/stats_info:
|
||||||
|
get:
|
||||||
|
tags:
|
||||||
|
- stats
|
||||||
|
operationId: statsInfo
|
||||||
|
summary: 'Get statistics parameters'
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
schema:
|
||||||
|
$ref: "#/definitions/StatsConfig"
|
||||||
|
|
||||||
|
/stats_config:
|
||||||
|
post:
|
||||||
|
tags:
|
||||||
|
- stats
|
||||||
|
operationId: statsConfig
|
||||||
|
summary: "Set statistics parameters"
|
||||||
|
consumes:
|
||||||
|
- application/json
|
||||||
|
parameters:
|
||||||
|
- in: "body"
|
||||||
|
name: "body"
|
||||||
|
schema:
|
||||||
|
$ref: "#/definitions/StatsConfig"
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: OK
|
||||||
|
|
||||||
# --------------------------------------------------
|
# --------------------------------------------------
|
||||||
# TLS server methods
|
# TLS server methods
|
||||||
# --------------------------------------------------
|
# --------------------------------------------------
|
||||||
|
@ -1103,28 +1083,29 @@ definitions:
|
||||||
type: "boolean"
|
type: "boolean"
|
||||||
Stats:
|
Stats:
|
||||||
type: "object"
|
type: "object"
|
||||||
description: "General server stats for the last 24 hours"
|
description: "Server statistics data"
|
||||||
required:
|
|
||||||
- "dns_queries"
|
|
||||||
- "blocked_filtering"
|
|
||||||
- "replaced_safebrowsing"
|
|
||||||
- "replaced_parental"
|
|
||||||
- "replaced_safesearch"
|
|
||||||
- "avg_processing_time"
|
|
||||||
properties:
|
properties:
|
||||||
dns_queries:
|
time_units:
|
||||||
|
type: "string"
|
||||||
|
description: "Time units (hours | days)"
|
||||||
|
example: "hours"
|
||||||
|
num_dns_queries:
|
||||||
type: "integer"
|
type: "integer"
|
||||||
description: "Total number of DNS queries"
|
description: "Total number of DNS queries"
|
||||||
example: 123
|
example: 123
|
||||||
blocked_filtering:
|
num_blocked_filtering:
|
||||||
type: "integer"
|
type: "integer"
|
||||||
description: "Number of requests blocked by filtering rules"
|
description: "Number of requests blocked by filtering rules"
|
||||||
example: 50
|
example: 50
|
||||||
replaced_safebrowsing:
|
num_replaced_safebrowsing:
|
||||||
type: "integer"
|
type: "integer"
|
||||||
description: "Number of requests blocked by the safebrowsing module"
|
description: "Number of requests blocked by safebrowsing module"
|
||||||
example: 5
|
example: 5
|
||||||
replaced_parental:
|
num_replaced_safesearch:
|
||||||
|
type: "integer"
|
||||||
|
description: "Number of requests blocked by safesearch module"
|
||||||
|
example: 5
|
||||||
|
num_replaced_parental:
|
||||||
type: "integer"
|
type: "integer"
|
||||||
description: "Number of blocked adult websites"
|
description: "Number of blocked adult websites"
|
||||||
example: 15
|
example: 15
|
||||||
|
@ -1133,110 +1114,43 @@ definitions:
|
||||||
format: "float"
|
format: "float"
|
||||||
description: "Average time in milliseconds on processing a DNS"
|
description: "Average time in milliseconds on processing a DNS"
|
||||||
example: 0.34
|
example: 0.34
|
||||||
StatsTop:
|
|
||||||
type: "object"
|
|
||||||
description: "Server stats top charts"
|
|
||||||
required:
|
|
||||||
- "top_queried_domains"
|
|
||||||
- "top_clients"
|
|
||||||
- "top_blocked_domains"
|
|
||||||
properties:
|
|
||||||
top_queried_domains:
|
top_queried_domains:
|
||||||
type: "array"
|
type: "array"
|
||||||
items:
|
items:
|
||||||
type: "object"
|
type: "object"
|
||||||
example:
|
|
||||||
example.org: 12312
|
|
||||||
example.com: 321
|
|
||||||
example.net: 5555
|
|
||||||
top_clients:
|
top_clients:
|
||||||
type: "array"
|
type: "array"
|
||||||
items:
|
items:
|
||||||
type: "object"
|
type: "object"
|
||||||
example:
|
|
||||||
127.0.0.1: 12312
|
|
||||||
192.168.0.1: 13211
|
|
||||||
192.168.0.3: 13211
|
|
||||||
top_blocked_domains:
|
top_blocked_domains:
|
||||||
type: "array"
|
type: "array"
|
||||||
items:
|
items:
|
||||||
type: "object"
|
type: "object"
|
||||||
example:
|
|
||||||
example.org: 12312
|
|
||||||
example.com: 321
|
|
||||||
example.net: 5555
|
|
||||||
StatsHistory:
|
|
||||||
type: "object"
|
|
||||||
description: "Historical stats of the DNS server. Example below is for 5 minutes. Values are from oldest to newest."
|
|
||||||
required:
|
|
||||||
- "dns_queries"
|
|
||||||
- "blocked_filtering"
|
|
||||||
- "replaced_safebrowsing"
|
|
||||||
- "replaced_parental"
|
|
||||||
- "replaced_safesearch"
|
|
||||||
- "avg_processing_time"
|
|
||||||
properties:
|
|
||||||
dns_queries:
|
dns_queries:
|
||||||
type: "array"
|
type: "array"
|
||||||
items:
|
items:
|
||||||
type: "integer"
|
type: "integer"
|
||||||
example:
|
|
||||||
- 1201
|
|
||||||
- 1501
|
|
||||||
- 1251
|
|
||||||
- 1231
|
|
||||||
- 120
|
|
||||||
blocked_filtering:
|
blocked_filtering:
|
||||||
type: "array"
|
type: "array"
|
||||||
items:
|
items:
|
||||||
type: "integer"
|
type: "integer"
|
||||||
example:
|
|
||||||
- 421
|
|
||||||
- 124
|
|
||||||
- 5
|
|
||||||
- 12
|
|
||||||
- 43
|
|
||||||
replaced_safebrowsing:
|
replaced_safebrowsing:
|
||||||
type: "array"
|
type: "array"
|
||||||
items:
|
items:
|
||||||
type: "integer"
|
type: "integer"
|
||||||
example:
|
|
||||||
- 1
|
|
||||||
- 0
|
|
||||||
- 5
|
|
||||||
- 0
|
|
||||||
- 0
|
|
||||||
replaced_parental:
|
replaced_parental:
|
||||||
type: "array"
|
type: "array"
|
||||||
items:
|
items:
|
||||||
type: "integer"
|
type: "integer"
|
||||||
example:
|
|
||||||
- 120
|
StatsConfig:
|
||||||
- 10
|
type: "object"
|
||||||
- 5
|
description: "Statistics configuration"
|
||||||
- 12
|
properties:
|
||||||
- 1
|
interval:
|
||||||
replaced_safesearch:
|
|
||||||
type: "array"
|
|
||||||
items:
|
|
||||||
type: "integer"
|
type: "integer"
|
||||||
example:
|
description: "Time period to keep data (1 | 7 | 30 | 90)"
|
||||||
- 1
|
|
||||||
- 0
|
|
||||||
- 0
|
|
||||||
- 0
|
|
||||||
- 5
|
|
||||||
avg_processing_time:
|
|
||||||
type: "array"
|
|
||||||
items:
|
|
||||||
type: "number"
|
|
||||||
format: "float"
|
|
||||||
example:
|
|
||||||
- 1.25
|
|
||||||
- 5.12
|
|
||||||
- 4.12
|
|
||||||
- 123.12
|
|
||||||
- 0.12
|
|
||||||
DhcpConfig:
|
DhcpConfig:
|
||||||
type: "object"
|
type: "object"
|
||||||
description: "Built-in DHCP server configuration"
|
description: "Built-in DHCP server configuration"
|
||||||
|
|
|
@ -0,0 +1,68 @@
|
||||||
|
// Module for managing statistics for DNS filtering server
|
||||||
|
|
||||||
|
package stats
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net"
|
||||||
|
)
|
||||||
|
|
||||||
|
type unitIDCallback func() int
|
||||||
|
|
||||||
|
// New - create object
|
||||||
|
// filename: DB file name
|
||||||
|
// limit: time limit (in days)
|
||||||
|
// unitID: user function to get the current unit ID. If nil, the current time hour is used.
|
||||||
|
func New(filename string, limit int, unitID unitIDCallback) Stats {
|
||||||
|
return createObject(filename, limit, unitID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stats - main interface
|
||||||
|
type Stats interface {
|
||||||
|
// Close object.
|
||||||
|
// This function is not thread safe
|
||||||
|
// (can't be called in parallel with any other function of this interface).
|
||||||
|
Close()
|
||||||
|
|
||||||
|
// Set new configuration at runtime.
|
||||||
|
// limit: time limit (in days)
|
||||||
|
Configure(limit int)
|
||||||
|
|
||||||
|
// Reset counters and clear database
|
||||||
|
Clear()
|
||||||
|
|
||||||
|
// Update counters
|
||||||
|
Update(e Entry)
|
||||||
|
|
||||||
|
// Get data
|
||||||
|
GetData(timeUnit TimeUnit) map[string]interface{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TimeUnit - time unit
|
||||||
|
type TimeUnit int
|
||||||
|
|
||||||
|
// Supported time units
|
||||||
|
const (
|
||||||
|
Hours TimeUnit = iota
|
||||||
|
Days
|
||||||
|
)
|
||||||
|
|
||||||
|
// Result of DNS request processing
|
||||||
|
type Result int
|
||||||
|
|
||||||
|
// Supported result values
|
||||||
|
const (
|
||||||
|
RNotFiltered Result = iota + 1
|
||||||
|
RFiltered
|
||||||
|
RSafeBrowsing
|
||||||
|
RSafeSearch
|
||||||
|
RParental
|
||||||
|
rLast
|
||||||
|
)
|
||||||
|
|
||||||
|
// Entry - data to add
|
||||||
|
type Entry struct {
|
||||||
|
Domain string
|
||||||
|
Client net.IP
|
||||||
|
Result Result
|
||||||
|
Time uint // processing time (msec)
|
||||||
|
}
|
|
@ -0,0 +1,115 @@
|
||||||
|
package stats
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net"
|
||||||
|
"os"
|
||||||
|
"sync/atomic"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func UIntArrayEquals(a []uint, b []uint) bool {
|
||||||
|
if len(a) != len(b) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := range a {
|
||||||
|
if a[i] != b[i] {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestStats(t *testing.T) {
|
||||||
|
s := New("./stats.db", 1, nil)
|
||||||
|
|
||||||
|
e := Entry{}
|
||||||
|
|
||||||
|
e.Domain = "domain"
|
||||||
|
e.Client = net.ParseIP("127.0.0.1")
|
||||||
|
e.Result = RFiltered
|
||||||
|
e.Time = 123456
|
||||||
|
s.Update(e)
|
||||||
|
|
||||||
|
e.Domain = "domain"
|
||||||
|
e.Client = net.ParseIP("127.0.0.1")
|
||||||
|
e.Result = RNotFiltered
|
||||||
|
e.Time = 123456
|
||||||
|
s.Update(e)
|
||||||
|
|
||||||
|
d := s.GetData(Hours)
|
||||||
|
a := []uint{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2}
|
||||||
|
assert.True(t, UIntArrayEquals(d["dns_queries"].([]uint), a))
|
||||||
|
|
||||||
|
a = []uint{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1}
|
||||||
|
assert.True(t, UIntArrayEquals(d["blocked_filtering"].([]uint), a))
|
||||||
|
|
||||||
|
a = []uint{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}
|
||||||
|
assert.True(t, UIntArrayEquals(d["replaced_safebrowsing"].([]uint), a))
|
||||||
|
|
||||||
|
a = []uint{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}
|
||||||
|
assert.True(t, UIntArrayEquals(d["replaced_parental"].([]uint), a))
|
||||||
|
|
||||||
|
m := d["top_queried_domains"].([]map[string]uint)
|
||||||
|
assert.True(t, m[0]["domain"] == 1)
|
||||||
|
|
||||||
|
m = d["top_blocked_domains"].([]map[string]uint)
|
||||||
|
assert.True(t, m[0]["domain"] == 1)
|
||||||
|
|
||||||
|
m = d["top_clients"].([]map[string]uint)
|
||||||
|
assert.True(t, m[0]["127.0.0.1"] == 2)
|
||||||
|
|
||||||
|
assert.True(t, d["num_dns_queries"].(uint) == 2)
|
||||||
|
assert.True(t, d["num_blocked_filtering"].(uint) == 1)
|
||||||
|
assert.True(t, d["num_replaced_safebrowsing"].(uint) == 0)
|
||||||
|
assert.True(t, d["num_replaced_safesearch"].(uint) == 0)
|
||||||
|
assert.True(t, d["num_replaced_parental"].(uint) == 0)
|
||||||
|
assert.True(t, d["avg_processing_time"].(float64) == 0.123456)
|
||||||
|
|
||||||
|
s.Clear()
|
||||||
|
s.Close()
|
||||||
|
os.Remove("./stats.db")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLargeNumbers(t *testing.T) {
|
||||||
|
var hour int32
|
||||||
|
hour = 1
|
||||||
|
newID := func() int {
|
||||||
|
// use "atomic" to make Go race detector happy
|
||||||
|
return int(atomic.LoadInt32(&hour))
|
||||||
|
}
|
||||||
|
|
||||||
|
// log.SetLevel(log.DEBUG)
|
||||||
|
fn := "./stats.db"
|
||||||
|
os.Remove(fn)
|
||||||
|
s := New(fn, 1, newID)
|
||||||
|
e := Entry{}
|
||||||
|
|
||||||
|
n := 1000 // number of distinct clients and domains every hour
|
||||||
|
for h := 0; h != 12; h++ {
|
||||||
|
if h != 0 {
|
||||||
|
atomic.AddInt32(&hour, 1)
|
||||||
|
time.Sleep(1500 * time.Millisecond)
|
||||||
|
}
|
||||||
|
for i := 0; i != n; i++ {
|
||||||
|
e.Domain = fmt.Sprintf("domain%d", i)
|
||||||
|
e.Client = net.ParseIP("127.0.0.1")
|
||||||
|
e.Client[2] = byte((i & 0xff00) >> 8)
|
||||||
|
e.Client[3] = byte(i & 0xff)
|
||||||
|
e.Result = RNotFiltered
|
||||||
|
e.Time = 123456
|
||||||
|
s.Update(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
d := s.GetData(Hours)
|
||||||
|
assert.True(t, d["num_dns_queries"].(uint) == uint(int(hour)*n))
|
||||||
|
|
||||||
|
s.Close()
|
||||||
|
os.Remove(fn)
|
||||||
|
}
|
|
@ -0,0 +1,672 @@
|
||||||
|
package stats
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/binary"
|
||||||
|
"encoding/gob"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"sort"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/AdguardTeam/golibs/log"
|
||||||
|
bolt "github.com/etcd-io/bbolt"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
maxDomains = 100 // max number of top domains to store in file or return via Get()
|
||||||
|
maxClients = 100 // max number of top clients to store in file or return via Get()
|
||||||
|
)
|
||||||
|
|
||||||
|
// statsCtx - global context
|
||||||
|
type statsCtx struct {
|
||||||
|
limit int // maximum time we need to keep data for (in hours)
|
||||||
|
filename string // database file name
|
||||||
|
unitID unitIDCallback // user function which returns the current unit ID
|
||||||
|
db *bolt.DB
|
||||||
|
|
||||||
|
unit *unit // the current unit
|
||||||
|
unitLock sync.Mutex // protect 'unit'
|
||||||
|
}
|
||||||
|
|
||||||
|
// data for 1 time unit
|
||||||
|
type unit struct {
|
||||||
|
id int // unit ID. Default: absolute hour since Jan 1, 1970
|
||||||
|
|
||||||
|
nTotal int // total requests
|
||||||
|
nResult []int // number of requests per one result
|
||||||
|
timeSum int // sum of processing time of all requests (usec)
|
||||||
|
|
||||||
|
// top:
|
||||||
|
domains map[string]int // number of requests per domain
|
||||||
|
blockedDomains map[string]int // number of blocked requests per domain
|
||||||
|
clients map[string]int // number of requests per client
|
||||||
|
}
|
||||||
|
|
||||||
|
// name-count pair
|
||||||
|
type countPair struct {
|
||||||
|
Name string
|
||||||
|
Count uint
|
||||||
|
}
|
||||||
|
|
||||||
|
// structure for storing data in file
|
||||||
|
type unitDB struct {
|
||||||
|
NTotal uint
|
||||||
|
NResult []uint
|
||||||
|
|
||||||
|
Domains []countPair
|
||||||
|
BlockedDomains []countPair
|
||||||
|
Clients []countPair
|
||||||
|
|
||||||
|
TimeAvg uint // usec
|
||||||
|
}
|
||||||
|
|
||||||
|
func createObject(filename string, limitDays int, unitID unitIDCallback) *statsCtx {
|
||||||
|
s := statsCtx{}
|
||||||
|
s.limit = limitDays * 24
|
||||||
|
s.filename = filename
|
||||||
|
s.unitID = newUnitID
|
||||||
|
if unitID != nil {
|
||||||
|
s.unitID = unitID
|
||||||
|
}
|
||||||
|
|
||||||
|
if !s.dbOpen() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
id := s.unitID()
|
||||||
|
tx := s.beginTxn(true)
|
||||||
|
var udb *unitDB
|
||||||
|
if tx != nil {
|
||||||
|
log.Tracef("Deleting old units...")
|
||||||
|
firstID := id - s.limit - 1
|
||||||
|
unitDel := 0
|
||||||
|
forEachBkt := func(name []byte, b *bolt.Bucket) error {
|
||||||
|
id := btoi(name)
|
||||||
|
if id < firstID {
|
||||||
|
err := tx.DeleteBucket(name)
|
||||||
|
if err != nil {
|
||||||
|
log.Debug("tx.DeleteBucket: %s", err)
|
||||||
|
}
|
||||||
|
log.Debug("Stats: deleted unit %d", id)
|
||||||
|
unitDel++
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return fmt.Errorf("")
|
||||||
|
}
|
||||||
|
_ = tx.ForEach(forEachBkt)
|
||||||
|
|
||||||
|
udb = s.loadUnitFromDB(tx, id)
|
||||||
|
|
||||||
|
if unitDel != 0 {
|
||||||
|
s.commitTxn(tx)
|
||||||
|
} else {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
u := unit{}
|
||||||
|
s.initUnit(&u, id)
|
||||||
|
if udb != nil {
|
||||||
|
deserialize(&u, udb)
|
||||||
|
}
|
||||||
|
s.unit = &u
|
||||||
|
|
||||||
|
go s.periodicFlush()
|
||||||
|
|
||||||
|
log.Debug("Stats: initialized")
|
||||||
|
return &s
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *statsCtx) dbOpen() bool {
|
||||||
|
var err error
|
||||||
|
log.Tracef("db.Open...")
|
||||||
|
s.db, err = bolt.Open(s.filename, 0644, nil)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Stats: open DB: %s: %s", s.filename, err)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
log.Tracef("db.Open")
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Atomically swap the currently active unit with a new value
|
||||||
|
// Return old value
|
||||||
|
func (s *statsCtx) swapUnit(new *unit) *unit {
|
||||||
|
s.unitLock.Lock()
|
||||||
|
u := s.unit
|
||||||
|
s.unit = new
|
||||||
|
s.unitLock.Unlock()
|
||||||
|
return u
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get unit ID for the current hour
|
||||||
|
func newUnitID() int {
|
||||||
|
return int(time.Now().Unix() / (60 * 60))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize a unit
|
||||||
|
func (s *statsCtx) initUnit(u *unit, id int) {
|
||||||
|
u.id = id
|
||||||
|
u.nResult = make([]int, rLast)
|
||||||
|
u.domains = make(map[string]int)
|
||||||
|
u.blockedDomains = make(map[string]int)
|
||||||
|
u.clients = make(map[string]int)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Open a DB transaction
|
||||||
|
func (s *statsCtx) beginTxn(wr bool) *bolt.Tx {
|
||||||
|
db := s.db
|
||||||
|
if db == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Tracef("db.Begin...")
|
||||||
|
tx, err := db.Begin(wr)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("db.Begin: %s", err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
log.Tracef("db.Begin")
|
||||||
|
return tx
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *statsCtx) commitTxn(tx *bolt.Tx) {
|
||||||
|
err := tx.Commit()
|
||||||
|
if err != nil {
|
||||||
|
log.Debug("tx.Commit: %s", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
log.Tracef("tx.Commit")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get unit name
|
||||||
|
func unitName(id int) []byte {
|
||||||
|
return itob(id)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert integer to 8-byte array (big endian)
|
||||||
|
func itob(v int) []byte {
|
||||||
|
b := make([]byte, 8)
|
||||||
|
binary.BigEndian.PutUint64(b, uint64(v))
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert 8-byte array (big endian) to integer
|
||||||
|
func btoi(b []byte) int {
|
||||||
|
return int(binary.BigEndian.Uint64(b))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Flush the current unit to DB and delete an old unit when a new hour is started
|
||||||
|
func (s *statsCtx) periodicFlush() {
|
||||||
|
for {
|
||||||
|
s.unitLock.Lock()
|
||||||
|
ptr := s.unit
|
||||||
|
s.unitLock.Unlock()
|
||||||
|
if ptr == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
id := s.unitID()
|
||||||
|
if ptr.id == id {
|
||||||
|
time.Sleep(time.Second)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
nu := unit{}
|
||||||
|
s.initUnit(&nu, id)
|
||||||
|
u := s.swapUnit(&nu)
|
||||||
|
udb := serialize(u)
|
||||||
|
|
||||||
|
tx := s.beginTxn(true)
|
||||||
|
if tx == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
ok1 := s.flushUnitToDB(tx, u.id, udb)
|
||||||
|
ok2 := s.deleteUnit(tx, id-s.limit)
|
||||||
|
if ok1 || ok2 {
|
||||||
|
s.commitTxn(tx)
|
||||||
|
} else {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
log.Tracef("periodicFlush() exited")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete unit's data from file
|
||||||
|
func (s *statsCtx) deleteUnit(tx *bolt.Tx, id int) bool {
|
||||||
|
err := tx.DeleteBucket(unitName(id))
|
||||||
|
if err != nil {
|
||||||
|
log.Tracef("bolt DeleteBucket: %s", err)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
log.Debug("Stats: deleted unit %d", id)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertMapToArray(m map[string]int, max int) []countPair {
|
||||||
|
a := []countPair{}
|
||||||
|
for k, v := range m {
|
||||||
|
pair := countPair{}
|
||||||
|
pair.Name = k
|
||||||
|
pair.Count = uint(v)
|
||||||
|
a = append(a, pair)
|
||||||
|
}
|
||||||
|
less := func(i, j int) bool {
|
||||||
|
if a[i].Count >= a[j].Count {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
sort.Slice(a, less)
|
||||||
|
if max > len(a) {
|
||||||
|
max = len(a)
|
||||||
|
}
|
||||||
|
return a[:max]
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertArrayToMap(a []countPair) map[string]int {
|
||||||
|
m := map[string]int{}
|
||||||
|
for _, it := range a {
|
||||||
|
m[it.Name] = int(it.Count)
|
||||||
|
}
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
|
||||||
|
func serialize(u *unit) *unitDB {
|
||||||
|
udb := unitDB{}
|
||||||
|
udb.NTotal = uint(u.nTotal)
|
||||||
|
for _, it := range u.nResult {
|
||||||
|
udb.NResult = append(udb.NResult, uint(it))
|
||||||
|
}
|
||||||
|
if u.nTotal != 0 {
|
||||||
|
udb.TimeAvg = uint(u.timeSum / u.nTotal)
|
||||||
|
}
|
||||||
|
udb.Domains = convertMapToArray(u.domains, maxDomains)
|
||||||
|
udb.BlockedDomains = convertMapToArray(u.blockedDomains, maxDomains)
|
||||||
|
udb.Clients = convertMapToArray(u.clients, maxClients)
|
||||||
|
return &udb
|
||||||
|
}
|
||||||
|
|
||||||
|
func deserialize(u *unit, udb *unitDB) {
|
||||||
|
u.nTotal = int(udb.NTotal)
|
||||||
|
for _, it := range udb.NResult {
|
||||||
|
u.nResult = append(u.nResult, int(it))
|
||||||
|
}
|
||||||
|
u.domains = convertArrayToMap(udb.Domains)
|
||||||
|
u.blockedDomains = convertArrayToMap(udb.BlockedDomains)
|
||||||
|
u.clients = convertArrayToMap(udb.Clients)
|
||||||
|
u.timeSum = int(udb.TimeAvg) * u.nTotal
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *statsCtx) flushUnitToDB(tx *bolt.Tx, id int, udb *unitDB) bool {
|
||||||
|
log.Tracef("Flushing unit %d", id)
|
||||||
|
|
||||||
|
bkt, err := tx.CreateBucketIfNotExists(unitName(id))
|
||||||
|
if err != nil {
|
||||||
|
log.Error("tx.CreateBucketIfNotExists: %s", err)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
enc := gob.NewEncoder(&buf)
|
||||||
|
err = enc.Encode(udb)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("gob.Encode: %s", err)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
err = bkt.Put([]byte{0}, buf.Bytes())
|
||||||
|
if err != nil {
|
||||||
|
log.Error("bkt.Put: %s", err)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *statsCtx) loadUnitFromDB(tx *bolt.Tx, id int) *unitDB {
|
||||||
|
bkt := tx.Bucket(unitName(id))
|
||||||
|
if bkt == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Tracef("Loading unit %d", id)
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
buf.Write(bkt.Get([]byte{0}))
|
||||||
|
dec := gob.NewDecoder(&buf)
|
||||||
|
udb := unitDB{}
|
||||||
|
err := dec.Decode(&udb)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("gob Decode: %s", err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return &udb
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertTopArray(a []countPair) []map[string]uint {
|
||||||
|
m := []map[string]uint{}
|
||||||
|
for _, it := range a {
|
||||||
|
ent := map[string]uint{}
|
||||||
|
ent[it.Name] = it.Count
|
||||||
|
m = append(m, ent)
|
||||||
|
}
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *statsCtx) Configure(limit int) {
|
||||||
|
if limit < 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
s.limit = limit * 24
|
||||||
|
log.Debug("Stats: set limit: %d", limit)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *statsCtx) Close() {
|
||||||
|
u := s.swapUnit(nil)
|
||||||
|
udb := serialize(u)
|
||||||
|
tx := s.beginTxn(true)
|
||||||
|
if tx != nil {
|
||||||
|
if s.flushUnitToDB(tx, u.id, udb) {
|
||||||
|
s.commitTxn(tx)
|
||||||
|
} else {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if s.db != nil {
|
||||||
|
log.Tracef("db.Close...")
|
||||||
|
_ = s.db.Close()
|
||||||
|
log.Tracef("db.Close")
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Debug("Stats: closed")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *statsCtx) Clear() {
|
||||||
|
tx := s.beginTxn(true)
|
||||||
|
if tx != nil {
|
||||||
|
db := s.db
|
||||||
|
s.db = nil
|
||||||
|
_ = tx.Rollback()
|
||||||
|
// the active transactions can continue using database,
|
||||||
|
// but no new transactions will be opened
|
||||||
|
_ = db.Close()
|
||||||
|
log.Tracef("db.Close")
|
||||||
|
// all active transactions are now closed
|
||||||
|
}
|
||||||
|
|
||||||
|
u := unit{}
|
||||||
|
s.initUnit(&u, s.unitID())
|
||||||
|
_ = s.swapUnit(&u)
|
||||||
|
|
||||||
|
err := os.Remove(s.filename)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("os.Remove: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
_ = s.dbOpen()
|
||||||
|
|
||||||
|
log.Debug("Stats: cleared")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *statsCtx) Update(e Entry) {
|
||||||
|
if e.Result == 0 ||
|
||||||
|
len(e.Domain) == 0 ||
|
||||||
|
!(len(e.Client) == 4 || len(e.Client) == 16) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
client := e.Client.String()
|
||||||
|
|
||||||
|
s.unitLock.Lock()
|
||||||
|
u := s.unit
|
||||||
|
|
||||||
|
u.nResult[e.Result]++
|
||||||
|
|
||||||
|
if e.Result == RNotFiltered {
|
||||||
|
u.domains[e.Domain]++
|
||||||
|
} else {
|
||||||
|
u.blockedDomains[e.Domain]++
|
||||||
|
}
|
||||||
|
|
||||||
|
u.clients[client]++
|
||||||
|
u.timeSum += int(e.Time)
|
||||||
|
u.nTotal++
|
||||||
|
s.unitLock.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Algorithm:
|
||||||
|
. Prepare array of N units, where N is the value of "limit" configuration setting
|
||||||
|
. Load data for the most recent units from file
|
||||||
|
If a unit with required ID doesn't exist, just add an empty unit
|
||||||
|
. Get data for the current unit
|
||||||
|
. Process data from the units and prepare an output map object:
|
||||||
|
* per time unit counters:
|
||||||
|
* DNS-queries/time-unit
|
||||||
|
* blocked/time-unit
|
||||||
|
* safebrowsing-blocked/time-unit
|
||||||
|
* parental-blocked/time-unit
|
||||||
|
If time-unit is an hour, just add values from each unit to an array.
|
||||||
|
If time-unit is a day, aggregate per-hour data into days.
|
||||||
|
* top counters:
|
||||||
|
* queries/domain
|
||||||
|
* queries/blocked-domain
|
||||||
|
* queries/client
|
||||||
|
To get these values we first sum up data for all units into a single map.
|
||||||
|
Then we get the pairs with the highest numbers (the values are sorted in descending order)
|
||||||
|
* total counters:
|
||||||
|
* DNS-queries
|
||||||
|
* blocked
|
||||||
|
* safebrowsing-blocked
|
||||||
|
* safesearch-blocked
|
||||||
|
* parental-blocked
|
||||||
|
These values are just the sum of data for all units.
|
||||||
|
*/
|
||||||
|
// nolint (gocyclo)
|
||||||
|
func (s *statsCtx) GetData(timeUnit TimeUnit) map[string]interface{} {
|
||||||
|
d := map[string]interface{}{}
|
||||||
|
|
||||||
|
tx := s.beginTxn(false)
|
||||||
|
if tx == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
units := []*unitDB{} //per-hour units
|
||||||
|
lastID := s.unitID()
|
||||||
|
firstID := lastID - s.limit + 1
|
||||||
|
for i := firstID; i != lastID; i++ {
|
||||||
|
u := s.loadUnitFromDB(tx, i)
|
||||||
|
if u == nil {
|
||||||
|
u = &unitDB{}
|
||||||
|
u.NResult = make([]uint, rLast)
|
||||||
|
}
|
||||||
|
units = append(units, u)
|
||||||
|
}
|
||||||
|
|
||||||
|
_ = tx.Rollback()
|
||||||
|
|
||||||
|
s.unitLock.Lock()
|
||||||
|
cu := serialize(s.unit)
|
||||||
|
cuID := s.unit.id
|
||||||
|
s.unitLock.Unlock()
|
||||||
|
if cuID != lastID {
|
||||||
|
units = units[1:]
|
||||||
|
}
|
||||||
|
units = append(units, cu)
|
||||||
|
|
||||||
|
if len(units) != s.limit {
|
||||||
|
log.Fatalf("len(units) != s.limit: %d %d", len(units), s.limit)
|
||||||
|
}
|
||||||
|
|
||||||
|
// per time unit counters:
|
||||||
|
|
||||||
|
// 720 hours may span 31 days, so we skip data for the first day in this case
|
||||||
|
firstDayID := (firstID + 24 - 1) / 24 * 24 // align_ceil(24)
|
||||||
|
|
||||||
|
a := []uint{}
|
||||||
|
if timeUnit == Hours {
|
||||||
|
for _, u := range units {
|
||||||
|
a = append(a, u.NTotal)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
var sum uint
|
||||||
|
id := firstDayID
|
||||||
|
nextDayID := firstDayID + 24
|
||||||
|
for i := firstDayID - firstID; i != len(units); i++ {
|
||||||
|
sum += units[i].NTotal
|
||||||
|
if id == nextDayID {
|
||||||
|
a = append(a, sum)
|
||||||
|
sum = 0
|
||||||
|
nextDayID += 24
|
||||||
|
}
|
||||||
|
id++
|
||||||
|
}
|
||||||
|
if id < nextDayID {
|
||||||
|
a = append(a, sum)
|
||||||
|
}
|
||||||
|
if len(a) != s.limit/24 {
|
||||||
|
log.Fatalf("len(a) != s.limit: %d %d", len(a), s.limit)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
d["dns_queries"] = a
|
||||||
|
|
||||||
|
a = []uint{}
|
||||||
|
if timeUnit == Hours {
|
||||||
|
for _, u := range units {
|
||||||
|
a = append(a, u.NResult[RFiltered])
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
var sum uint
|
||||||
|
id := firstDayID
|
||||||
|
nextDayID := firstDayID + 24
|
||||||
|
for i := firstDayID - firstID; i != len(units); i++ {
|
||||||
|
sum += units[i].NResult[RFiltered]
|
||||||
|
if id == nextDayID {
|
||||||
|
a = append(a, sum)
|
||||||
|
sum = 0
|
||||||
|
nextDayID += 24
|
||||||
|
}
|
||||||
|
id++
|
||||||
|
}
|
||||||
|
if id < nextDayID {
|
||||||
|
a = append(a, sum)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
d["blocked_filtering"] = a
|
||||||
|
|
||||||
|
a = []uint{}
|
||||||
|
if timeUnit == Hours {
|
||||||
|
for _, u := range units {
|
||||||
|
a = append(a, u.NResult[RSafeBrowsing])
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
var sum uint
|
||||||
|
id := firstDayID
|
||||||
|
nextDayID := firstDayID + 24
|
||||||
|
for i := firstDayID - firstID; i != len(units); i++ {
|
||||||
|
sum += units[i].NResult[RSafeBrowsing]
|
||||||
|
if id == nextDayID {
|
||||||
|
a = append(a, sum)
|
||||||
|
sum = 0
|
||||||
|
nextDayID += 24
|
||||||
|
}
|
||||||
|
id++
|
||||||
|
}
|
||||||
|
if id < nextDayID {
|
||||||
|
a = append(a, sum)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
d["replaced_safebrowsing"] = a
|
||||||
|
|
||||||
|
a = []uint{}
|
||||||
|
if timeUnit == Hours {
|
||||||
|
for _, u := range units {
|
||||||
|
a = append(a, u.NResult[RParental])
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
var sum uint
|
||||||
|
id := firstDayID
|
||||||
|
nextDayID := firstDayID + 24
|
||||||
|
for i := firstDayID - firstID; i != len(units); i++ {
|
||||||
|
sum += units[i].NResult[RParental]
|
||||||
|
if id == nextDayID {
|
||||||
|
a = append(a, sum)
|
||||||
|
sum = 0
|
||||||
|
nextDayID += 24
|
||||||
|
}
|
||||||
|
id++
|
||||||
|
}
|
||||||
|
if id < nextDayID {
|
||||||
|
a = append(a, sum)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
d["replaced_parental"] = a
|
||||||
|
|
||||||
|
// top counters:
|
||||||
|
|
||||||
|
m := map[string]int{}
|
||||||
|
for _, u := range units {
|
||||||
|
for _, it := range u.Domains {
|
||||||
|
m[it.Name] += int(it.Count)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
a2 := convertMapToArray(m, maxDomains)
|
||||||
|
d["top_queried_domains"] = convertTopArray(a2)
|
||||||
|
|
||||||
|
m = map[string]int{}
|
||||||
|
for _, u := range units {
|
||||||
|
for _, it := range u.BlockedDomains {
|
||||||
|
m[it.Name] += int(it.Count)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
a2 = convertMapToArray(m, maxDomains)
|
||||||
|
d["top_blocked_domains"] = convertTopArray(a2)
|
||||||
|
|
||||||
|
m = map[string]int{}
|
||||||
|
for _, u := range units {
|
||||||
|
for _, it := range u.Clients {
|
||||||
|
m[it.Name] += int(it.Count)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
a2 = convertMapToArray(m, maxClients)
|
||||||
|
d["top_clients"] = convertTopArray(a2)
|
||||||
|
|
||||||
|
// total counters:
|
||||||
|
|
||||||
|
sum := unitDB{}
|
||||||
|
timeN := 0
|
||||||
|
sum.NResult = make([]uint, rLast)
|
||||||
|
for _, u := range units {
|
||||||
|
sum.NTotal += u.NTotal
|
||||||
|
sum.TimeAvg += u.TimeAvg
|
||||||
|
if u.TimeAvg != 0 {
|
||||||
|
timeN++
|
||||||
|
}
|
||||||
|
sum.NResult[RFiltered] += u.NResult[RFiltered]
|
||||||
|
sum.NResult[RSafeBrowsing] += u.NResult[RSafeBrowsing]
|
||||||
|
sum.NResult[RSafeSearch] += u.NResult[RSafeSearch]
|
||||||
|
sum.NResult[RParental] += u.NResult[RParental]
|
||||||
|
}
|
||||||
|
|
||||||
|
d["num_dns_queries"] = sum.NTotal
|
||||||
|
d["num_blocked_filtering"] = sum.NResult[RFiltered]
|
||||||
|
d["num_replaced_safebrowsing"] = sum.NResult[RSafeBrowsing]
|
||||||
|
d["num_replaced_safesearch"] = sum.NResult[RSafeSearch]
|
||||||
|
d["num_replaced_parental"] = sum.NResult[RParental]
|
||||||
|
|
||||||
|
avgTime := float64(0)
|
||||||
|
if timeN != 0 {
|
||||||
|
avgTime = float64(sum.TimeAvg/uint(timeN)) / 1000000
|
||||||
|
}
|
||||||
|
d["avg_processing_time"] = avgTime
|
||||||
|
|
||||||
|
d["time_units"] = "hours"
|
||||||
|
if timeUnit == Days {
|
||||||
|
d["time_units"] = "days"
|
||||||
|
}
|
||||||
|
|
||||||
|
return d
|
||||||
|
}
|
Loading…
Reference in New Issue