Merge: Querylog: speed up, change format, robust search

Close #1099 Close #1094

* commit '62c8664fd75439b7597d935992c380f9c0675660':
  + client: load additional search results
  + client: separate filters from the table component
  + client: hide page size option and page info
  + client: use oldest param
  * openapi: update 'QueryLog'
  * querylog: add more tests
  * QueryLog.Add() now receives net.IP, not net.Addr
  * querylog: major refactor: change on-disk format and API
This commit is contained in:
Simon Zolin 2019-11-19 15:21:42 +03:00
commit 2ac6e48535
19 changed files with 922 additions and 283 deletions

View File

@ -1012,17 +1012,20 @@ Response:
When a new DNS request is received and processed, we store information about this event in "query log". It is a file on disk in JSON format: When a new DNS request is received and processed, we store information about this event in "query log". It is a file on disk in JSON format:
{ {
"Question":"..."," "IP":"127.0.0.1", // client IP
Answer":"...", "T":"...", // response time
"QH":"...", // target host name without the last dot
"QT":"...", // question type
"QC":"...", // question class
"Answer":"...",
"Result":{ "Result":{
"IsFiltered":true, "IsFiltered":true,
"Reason":3, "Reason":3,
"Rule":"...", "Rule":"...",
"FilterID":1 "FilterID":1
}, },
"Time":"...",
"Elapsed":12345, "Elapsed":12345,
"IP":"127.0.0.1" "Upstream":"...",
} }
@ -1052,7 +1055,7 @@ Request:
&filter_question_type=A | AAAA &filter_question_type=A | AAAA
&filter_response_status= | filtered &filter_response_status= | filtered
If `older_than` value is set, server returns the next chunk of entries that are older than this time stamp. This setting is used for paging. UI sets the empty value on the first request and gets the latest log entries. To get the older entries, UI sets this value to the timestamp of the last (the oldest) entry from the previous response from Server. `older_than` setting is used for paging. UI uses an empty value for `older_than` on the first request and gets the latest log entries. To get the older entries, UI sets `older_than` to the `oldest` value from the server's response.
If "filter" settings are set, server returns only entries that match the specified request. If "filter" settings are set, server returns only entries that match the specified request.
@ -1060,7 +1063,9 @@ For `filter.domain` and `filter.client` the server matches substrings by default
Response: Response:
[ {
"oldest":"2006-01-02T15:04:05.999999999Z07:00"
"data":[
{ {
"answer":[ "answer":[
{ {
@ -1085,6 +1090,7 @@ Response:
} }
... ...
] ]
}
The most recent entries are at the top of list. The most recent entries are at the top of list.

View File

@ -401,4 +401,4 @@
"descr": "Description", "descr": "Description",
"whois": "Whois", "whois": "Whois",
"filtering_rules_learn_more": "<0>Learn more</0> about creating your own hosts blocklists." "filtering_rules_learn_more": "<0>Learn more</0> about creating your own hosts blocklists."
} }

View File

@ -3,26 +3,100 @@ import { createAction } from 'redux-actions';
import apiClient from '../api/Api'; import apiClient from '../api/Api';
import { addErrorToast, addSuccessToast } from './index'; import { addErrorToast, addSuccessToast } from './index';
import { normalizeLogs } from '../helpers/helpers'; import { normalizeLogs } from '../helpers/helpers';
import { TABLE_DEFAULT_PAGE_SIZE } from '../helpers/constants';
const getLogsWithParams = async (config) => {
const { older_than, filter, ...values } = config;
const rawLogs = await apiClient.getQueryLog({ ...filter, older_than });
const { data, oldest } = rawLogs;
const logs = normalizeLogs(data);
return {
logs, oldest, older_than, filter, ...values,
};
};
export const getAdditionalLogsRequest = createAction('GET_ADDITIONAL_LOGS_REQUEST');
export const getAdditionalLogsFailure = createAction('GET_ADDITIONAL_LOGS_FAILURE');
export const getAdditionalLogsSuccess = createAction('GET_ADDITIONAL_LOGS_SUCCESS');
const checkFilteredLogs = async (data, filter, dispatch, total) => {
const { logs, oldest } = data;
const totalData = total || { logs };
const needToGetAdditionalLogs = (logs.length < TABLE_DEFAULT_PAGE_SIZE ||
totalData.logs.length < TABLE_DEFAULT_PAGE_SIZE) &&
oldest !== '';
if (needToGetAdditionalLogs) {
dispatch(getAdditionalLogsRequest());
try {
const additionalLogs = await getLogsWithParams({ older_than: oldest, filter });
if (additionalLogs.logs.length > 0) {
return await checkFilteredLogs(additionalLogs, filter, dispatch, {
logs: [...totalData.logs, ...additionalLogs.logs],
oldest: additionalLogs.oldest,
});
}
dispatch(getAdditionalLogsSuccess());
return totalData;
} catch (error) {
dispatch(addErrorToast({ error }));
dispatch(getAdditionalLogsFailure(error));
}
}
dispatch(getAdditionalLogsSuccess());
return totalData;
};
export const setLogsPagination = createAction('LOGS_PAGINATION'); export const setLogsPagination = createAction('LOGS_PAGINATION');
export const setLogsFilter = createAction('LOGS_FILTER'); export const setLogsPage = createAction('SET_LOG_PAGE');
export const getLogsRequest = createAction('GET_LOGS_REQUEST'); export const getLogsRequest = createAction('GET_LOGS_REQUEST');
export const getLogsFailure = createAction('GET_LOGS_FAILURE'); export const getLogsFailure = createAction('GET_LOGS_FAILURE');
export const getLogsSuccess = createAction('GET_LOGS_SUCCESS'); export const getLogsSuccess = createAction('GET_LOGS_SUCCESS');
export const getLogs = config => async (dispatch) => { export const getLogs = config => async (dispatch, getState) => {
dispatch(getLogsRequest()); dispatch(getLogsRequest());
try { try {
const { filter, lastRowTime: older_than } = config; const { isFiltered, filter, page } = getState().queryLogs;
const logs = normalizeLogs(await apiClient.getQueryLog({ ...filter, older_than })); const data = await getLogsWithParams({ ...config, filter });
dispatch(getLogsSuccess({ logs, ...config }));
if (isFiltered) {
const additionalData = await checkFilteredLogs(data, filter, dispatch);
const updatedData = additionalData.logs ? { ...data, ...additionalData } : data;
dispatch(getLogsSuccess(updatedData));
dispatch(setLogsPagination({ page, pageSize: TABLE_DEFAULT_PAGE_SIZE }));
} else {
dispatch(getLogsSuccess(data));
}
} catch (error) { } catch (error) {
dispatch(addErrorToast({ error })); dispatch(addErrorToast({ error }));
dispatch(getLogsFailure(error)); dispatch(getLogsFailure(error));
} }
}; };
export const setLogsFilterRequest = createAction('SET_LOGS_FILTER_REQUEST');
export const setLogsFilterFailure = createAction('SET_LOGS_FILTER_FAILURE');
export const setLogsFilterSuccess = createAction('SET_LOGS_FILTER_SUCCESS');
export const setLogsFilter = filter => async (dispatch) => {
dispatch(setLogsFilterRequest());
try {
const data = await getLogsWithParams({ older_than: '', filter });
const additionalData = await checkFilteredLogs(data, filter, dispatch);
const updatedData = additionalData.logs ? { ...data, ...additionalData } : data;
dispatch(setLogsFilterSuccess({ ...updatedData, filter }));
dispatch(setLogsPage(0));
} catch (error) {
dispatch(addErrorToast({ error }));
dispatch(setLogsFilterFailure(error));
}
};
export const clearLogsRequest = createAction('CLEAR_LOGS_REQUEST'); export const clearLogsRequest = createAction('CLEAR_LOGS_REQUEST');
export const clearLogsFailure = createAction('CLEAR_LOGS_FAILURE'); export const clearLogsFailure = createAction('CLEAR_LOGS_FAILURE');
export const clearLogsSuccess = createAction('CLEAR_LOGS_SUCCESS'); export const clearLogsSuccess = createAction('CLEAR_LOGS_SUCCESS');

View File

@ -0,0 +1,116 @@
import React, { Fragment } from 'react';
import PropTypes from 'prop-types';
import { Field, reduxForm } from 'redux-form';
import { withNamespaces, Trans } from 'react-i18next';
import flow from 'lodash/flow';
import { renderField } from '../../../helpers/form';
import { RESPONSE_FILTER } from '../../../helpers/constants';
import Tooltip from '../../ui/Tooltip';
const renderFilterField = ({
input,
id,
className,
placeholder,
type,
disabled,
autoComplete,
tooltip,
meta: { touched, error },
}) => (
<Fragment>
<div className="logs__input-wrap">
<input
{...input}
id={id}
placeholder={placeholder}
type={type}
className={className}
disabled={disabled}
autoComplete={autoComplete}
/>
<span className="logs__notice">
<Tooltip text={tooltip} type='tooltip-custom--logs' />
</span>
{!disabled &&
touched &&
(error && <span className="form__message form__message--error">{error}</span>)}
</div>
</Fragment>
);
const Form = (props) => {
const {
t,
handleChange,
} = props;
return (
<form onSubmit={handleChange}>
<div className="row">
<div className="col-6 col-sm-3 my-2">
<Field
id="filter_domain"
name="filter_domain"
component={renderFilterField}
type="text"
className="form-control"
placeholder={t('domain_name_table_header')}
tooltip={t('query_log_strict_search')}
onChange={handleChange}
/>
</div>
<div className="col-6 col-sm-3 my-2">
<Field
id="filter_question_type"
name="filter_question_type"
component={renderField}
type="text"
className="form-control"
placeholder={t('type_table_header')}
onChange={handleChange}
/>
</div>
<div className="col-6 col-sm-3 my-2">
<Field
name="filter_response_status"
component="select"
className="form-control custom-select"
>
<option value={RESPONSE_FILTER.ALL}>
<Trans>show_all_filter_type</Trans>
</option>
<option value={RESPONSE_FILTER.FILTERED}>
<Trans>show_filtered_type</Trans>
</option>
</Field>
</div>
<div className="col-6 col-sm-3 my-2">
<Field
id="filter_client"
name="filter_client"
component={renderFilterField}
type="text"
className="form-control"
placeholder={t('client_table_header')}
tooltip={t('query_log_strict_search')}
onChange={handleChange}
/>
</div>
</div>
</form>
);
};
Form.propTypes = {
handleChange: PropTypes.func,
t: PropTypes.func.isRequired,
};
export default flow([
withNamespaces(),
reduxForm({
form: 'logsFilterForm',
}),
])(Form);

View File

@ -0,0 +1,52 @@
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import debounce from 'lodash/debounce';
import classnames from 'classnames';
import { DEBOUNCE_FILTER_TIMEOUT, RESPONSE_FILTER } from '../../../helpers/constants';
import { isValidQuestionType } from '../../../helpers/helpers';
import Form from './Form';
import Card from '../../ui/Card';
class Filters extends Component {
getFilters = ({
filter_domain, filter_question_type, filter_response_status, filter_client,
}) => ({
filter_domain: filter_domain || '',
filter_question_type: isValidQuestionType(filter_question_type) ? filter_question_type.toUpperCase() : '',
filter_response_status: filter_response_status === RESPONSE_FILTER.FILTERED ? filter_response_status : '',
filter_client: filter_client || '',
});
handleFormChange = debounce((values) => {
const filter = this.getFilters(values);
this.props.setLogsFilter(filter);
}, DEBOUNCE_FILTER_TIMEOUT);
render() {
const { filter, processingAdditionalLogs } = this.props;
const cardBodyClass = classnames({
'card-body': true,
'card-body--loading': processingAdditionalLogs,
});
return (
<Card bodyType={cardBodyClass}>
<Form
initialValues={filter}
onChange={this.handleFormChange}
/>
</Card>
);
}
}
Filters.propTypes = {
filter: PropTypes.object.isRequired,
setLogsFilter: PropTypes.func.isRequired,
processingGetLogs: PropTypes.bool.isRequired,
processingAdditionalLogs: PropTypes.bool.isRequired,
};
export default Filters;

View File

@ -5,46 +5,40 @@ import escapeRegExp from 'lodash/escapeRegExp';
import endsWith from 'lodash/endsWith'; import endsWith from 'lodash/endsWith';
import { Trans, withNamespaces } from 'react-i18next'; import { Trans, withNamespaces } from 'react-i18next';
import { HashLink as Link } from 'react-router-hash-link'; import { HashLink as Link } from 'react-router-hash-link';
import debounce from 'lodash/debounce';
import { import {
formatTime, formatTime,
formatDateTime, formatDateTime,
isValidQuestionType,
} from '../../helpers/helpers'; } from '../../helpers/helpers';
import { SERVICES, FILTERED_STATUS, DEBOUNCE_TIMEOUT, DEFAULT_LOGS_FILTER } from '../../helpers/constants'; import { SERVICES, FILTERED_STATUS, TABLE_DEFAULT_PAGE_SIZE } from '../../helpers/constants';
import { getTrackerData } from '../../helpers/trackers/trackers'; import { getTrackerData } from '../../helpers/trackers/trackers';
import { formatClientCell } from '../../helpers/formatClientCell'; import { formatClientCell } from '../../helpers/formatClientCell';
import Filters from './Filters';
import PageTitle from '../ui/PageTitle'; import PageTitle from '../ui/PageTitle';
import Card from '../ui/Card'; import Card from '../ui/Card';
import Loading from '../ui/Loading'; import Loading from '../ui/Loading';
import PopoverFiltered from '../ui/PopoverFilter'; import PopoverFiltered from '../ui/PopoverFilter';
import Popover from '../ui/Popover'; import Popover from '../ui/Popover';
import Tooltip from '../ui/Tooltip';
import './Logs.css'; import './Logs.css';
const TABLE_FIRST_PAGE = 0; const TABLE_FIRST_PAGE = 0;
const TABLE_DEFAULT_PAGE_SIZE = 50; const INITIAL_REQUEST_DATA = ['', TABLE_FIRST_PAGE, TABLE_DEFAULT_PAGE_SIZE];
const INITIAL_REQUEST_DATA = ['', DEFAULT_LOGS_FILTER, TABLE_FIRST_PAGE, TABLE_DEFAULT_PAGE_SIZE];
const FILTERED_REASON = 'Filtered'; const FILTERED_REASON = 'Filtered';
const RESPONSE_FILTER = {
ALL: 'all',
FILTERED: 'filtered',
};
class Logs extends Component { class Logs extends Component {
componentDidMount() { componentDidMount() {
this.props.setLogsPage(TABLE_FIRST_PAGE);
this.getLogs(...INITIAL_REQUEST_DATA); this.getLogs(...INITIAL_REQUEST_DATA);
this.props.getFilteringStatus(); this.props.getFilteringStatus();
this.props.getClients(); this.props.getClients();
this.props.getLogsConfig(); this.props.getLogsConfig();
} }
getLogs = (lastRowTime, filter, page, pageSize, filtered) => { getLogs = (older_than, page) => {
if (this.props.queryLogs.enabled) { if (this.props.queryLogs.enabled) {
this.props.getLogs({ this.props.getLogs({
lastRowTime, filter, page, pageSize, filtered, older_than, page, pageSize: TABLE_DEFAULT_PAGE_SIZE,
}); });
} }
}; };
@ -53,16 +47,6 @@ class Logs extends Component {
window.location.reload(); window.location.reload();
}; };
handleLogsFiltering = debounce((lastRowTime, filter, page, pageSize, filtered) => {
this.props.getLogs({
lastRowTime,
filter,
page,
pageSize,
filtered,
});
}, DEBOUNCE_TIMEOUT);
renderTooltip = (isFiltered, rule, filter, service) => renderTooltip = (isFiltered, rule, filter, service) =>
isFiltered && <PopoverFiltered rule={rule} filter={filter} service={service} />; isFiltered && <PopoverFiltered rule={rule} filter={filter} service={service} />;
@ -232,72 +216,26 @@ class Logs extends Component {
); );
}; };
getFilterInput = ({ filter, onChange }) => (
<Fragment>
<div className="logs__input-wrap">
<input
type="text"
className="form-control"
onChange={event => onChange(event.target.value)}
value={filter ? filter.value : ''}
/>
<span className="logs__notice">
<Tooltip text={this.props.t('query_log_strict_search')} type='tooltip-custom--logs' />
</span>
</div>
</Fragment>
);
getFilters = (filtered) => {
const filteredObj = filtered.reduce((acc, cur) => ({ ...acc, [cur.id]: cur.value }), {});
const {
domain, client, type, response,
} = filteredObj;
return {
filter_domain: domain || '',
filter_client: client || '',
filter_question_type: isValidQuestionType(type) ? type.toUpperCase() : '',
filter_response_status: response === RESPONSE_FILTER.FILTERED ? response : '',
};
};
fetchData = (state) => { fetchData = (state) => {
const { pageSize, page, pages } = state; const { pages } = state;
const { allLogs, filter } = this.props.queryLogs; const { oldest, page } = this.props.queryLogs;
const isLastPage = pages && (page + 1 === pages); const isLastPage = pages && (page + 1 === pages);
if (isLastPage) { if (isLastPage) {
const lastRow = allLogs[allLogs.length - 1]; this.getLogs(oldest, page);
const lastRowTime = (lastRow && lastRow.time) || '';
this.getLogs(lastRowTime, filter, page, pageSize, true);
} else {
this.props.setLogsPagination({ page, pageSize });
} }
}; };
handleFilterChange = (filtered) => { changePage = (page) => {
const filters = this.getFilters(filtered); this.props.setLogsPage(page);
this.props.setLogsFilter(filters); this.props.setLogsPagination({ page, pageSize: TABLE_DEFAULT_PAGE_SIZE });
this.handleLogsFiltering('', filters, TABLE_FIRST_PAGE, TABLE_DEFAULT_PAGE_SIZE, true); };
}
showTotalPagesCount = (pages) => {
const { total, isEntireLog } = this.props.queryLogs;
const showEllipsis = !isEntireLog && total >= 500;
return (
<span className="-totalPages">
{pages || 1}{showEllipsis && '…' }
</span>
);
}
renderLogs() { renderLogs() {
const { queryLogs, dashboard, t } = this.props; const { queryLogs, dashboard, t } = this.props;
const { processingClients } = dashboard; const { processingClients } = dashboard;
const { const {
processingGetLogs, processingGetConfig, logs, pages, processingGetLogs, processingGetConfig, logs, pages, page,
} = queryLogs; } = queryLogs;
const isLoading = processingGetLogs || processingClients || processingGetConfig; const isLoading = processingGetLogs || processingClients || processingGetConfig;
@ -306,7 +244,6 @@ class Logs extends Component {
Header: t('time_table_header'), Header: t('time_table_header'),
accessor: 'time', accessor: 'time',
maxWidth: 100, maxWidth: 100,
filterable: false,
Cell: this.getTimeCell, Cell: this.getTimeCell,
}, },
{ {
@ -314,7 +251,6 @@ class Logs extends Component {
accessor: 'domain', accessor: 'domain',
minWidth: 180, minWidth: 180,
Cell: this.getDomainCell, Cell: this.getDomainCell,
Filter: this.getFilterInput,
}, },
{ {
Header: t('type_table_header'), Header: t('type_table_header'),
@ -326,28 +262,6 @@ class Logs extends Component {
accessor: 'response', accessor: 'response',
minWidth: 250, minWidth: 250,
Cell: this.getResponseCell, Cell: this.getResponseCell,
filterMethod: (filter, row) => {
if (filter.value === RESPONSE_FILTER.FILTERED) {
// eslint-disable-next-line no-underscore-dangle
const { reason } = row._original;
return this.checkFiltered(reason) || this.checkWhiteList(reason);
}
return true;
},
Filter: ({ filter, onChange }) => (
<select
className="form-control custom-select"
onChange={event => onChange(event.target.value)}
value={filter ? filter.value : RESPONSE_FILTER.ALL}
>
<option value={RESPONSE_FILTER.ALL}>
<Trans>show_all_filter_type</Trans>
</option>
<option value={RESPONSE_FILTER.FILTERED}>
<Trans>show_filtered_type</Trans>
</option>
</select>
),
}, },
{ {
Header: t('client_table_header'), Header: t('client_table_header'),
@ -355,34 +269,36 @@ class Logs extends Component {
maxWidth: 240, maxWidth: 240,
minWidth: 240, minWidth: 240,
Cell: this.getClientCell, Cell: this.getClientCell,
Filter: this.getFilterInput,
}, },
]; ];
return ( return (
<ReactTable <ReactTable
manual manual
filterable
minRows={5} minRows={5}
page={page}
pages={pages} pages={pages}
columns={columns} columns={columns}
filterable={false}
sortable={false} sortable={false}
data={logs || []} data={logs || []}
loading={isLoading} loading={isLoading}
showPageJump={false}
onFetchData={this.fetchData}
onFilteredChange={this.handleFilterChange}
className="logs__table"
showPagination={true} showPagination={true}
showPaginationTop={true}
showPageJump={false}
showPageSizeOptions={false}
onFetchData={this.fetchData}
onPageChange={this.changePage}
className="logs__table"
defaultPageSize={TABLE_DEFAULT_PAGE_SIZE} defaultPageSize={TABLE_DEFAULT_PAGE_SIZE}
previousText={t('previous_btn')} previousText={t('previous_btn')}
nextText={t('next_btn')} nextText={t('next_btn')}
loadingText={t('loading_table_status')} loadingText={t('loading_table_status')}
pageText={t('page_table_footer_text')}
ofText={t('of_table_footer_text')}
rowsText={t('rows_table_footer_text')} rowsText={t('rows_table_footer_text')}
noDataText={t('no_logs_found')} noDataText={t('no_logs_found')}
renderTotalPagesCount={this.showTotalPagesCount} pageText={''}
ofText={''}
renderTotalPagesCount={() => false}
defaultFilterMethod={(filter, row) => { defaultFilterMethod={(filter, row) => {
const id = filter.pivotId || filter.id; const id = filter.pivotId || filter.id;
return row[id] !== undefined return row[id] !== undefined
@ -426,7 +342,9 @@ class Logs extends Component {
render() { render() {
const { queryLogs, t } = this.props; const { queryLogs, t } = this.props;
const { enabled, processingGetConfig } = queryLogs; const {
enabled, processingGetConfig, processingAdditionalLogs, processingGetLogs,
} = queryLogs;
const refreshButton = enabled ? ( const refreshButton = enabled ? (
<button <button
@ -446,7 +364,17 @@ class Logs extends Component {
<Fragment> <Fragment>
<PageTitle title={t('query_log')}>{refreshButton}</PageTitle> <PageTitle title={t('query_log')}>{refreshButton}</PageTitle>
{enabled && processingGetConfig && <Loading />} {enabled && processingGetConfig && <Loading />}
{enabled && !processingGetConfig && <Card>{this.renderLogs()}</Card>} {enabled && !processingGetConfig && (
<Fragment>
<Filters
filter={queryLogs.filter}
processingGetLogs={processingGetLogs}
processingAdditionalLogs={processingAdditionalLogs}
setLogsFilter={this.props.setLogsFilter}
/>
<Card>{this.renderLogs()}</Card>
</Fragment>
)}
{!enabled && !processingGetConfig && ( {!enabled && !processingGetConfig && (
<Card> <Card>
<div className="lead text-center py-6"> <div className="lead text-center py-6">
@ -479,6 +407,7 @@ Logs.propTypes = {
getLogsConfig: PropTypes.func.isRequired, getLogsConfig: PropTypes.func.isRequired,
setLogsPagination: PropTypes.func.isRequired, setLogsPagination: PropTypes.func.isRequired,
setLogsFilter: PropTypes.func.isRequired, setLogsFilter: PropTypes.func.isRequired,
setLogsPage: PropTypes.func.isRequired,
t: PropTypes.func.isRequired, t: PropTypes.func.isRequired,
}; };

View File

@ -33,6 +33,36 @@
text-align: center; text-align: center;
} }
.card-body--loading {
position: relative;
}
.card-body--loading:before {
content: "";
position: absolute;
top: 0;
left: 0;
z-index: 100;
width: 100%;
height: 100%;
background-color: rgba(255, 255, 255, 0.6);
}
.card-body--loading:after {
content: "";
position: absolute;
z-index: 101;
left: 50%;
top: 50%;
width: 40px;
height: 40px;
margin-top: -20px;
margin-left: -20px;
background-image: url("data:image/svg+xml;charset=utf-8,%3Csvg%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%20viewBox%3D%220%200%2047.6%2047.6%22%20height%3D%22100%25%22%20width%3D%22100%25%22%3E%3Cpath%20opacity%3D%22.235%22%20fill%3D%22%23979797%22%20d%3D%22M44.4%2011.9l-5.2%203c1.5%202.6%202.4%205.6%202.4%208.9%200%209.8-8%2017.8-17.8%2017.8-6.6%200-12.3-3.6-15.4-8.9l-5.2%203C7.3%2042.8%2015%2047.6%2023.8%2047.6c13.1%200%2023.8-10.7%2023.8-23.8%200-4.3-1.2-8.4-3.2-11.9z%22%2F%3E%3Cpath%20fill%3D%22%2366b574%22%20d%3D%22M3.2%2035.7C0%2030.2-.8%2023.8.8%2017.6%202.5%2011.5%206.4%206.4%2011.9%203.2%2017.4%200%2023.8-.8%2030%20.8c6.1%201.6%2011.3%205.6%2014.4%2011.1l-5.2%203c-2.4-4.1-6.2-7.1-10.8-8.3C23.8%205.4%2019%206%2014.9%208.4s-7.1%206.2-8.3%2010.8c-1.2%204.6-.6%209.4%201.8%2013.5l-5.2%203z%22%2F%3E%3C%2Fsvg%3E");
will-change: transform;
animation: clockwise 2s linear infinite;
}
.card-title-stats { .card-title-stats {
font-size: 13px; font-size: 13px;
color: #9aa0ac; color: #9aa0ac;

View File

@ -64,6 +64,7 @@
top: calc(100% + 10px); top: calc(100% + 10px);
right: -10px; right: -10px;
left: initial; left: initial;
width: 255px;
transform: none; transform: none;
} }

View File

@ -1,7 +1,7 @@
import { connect } from 'react-redux'; import { connect } from 'react-redux';
import { addSuccessToast, getClients } from '../actions'; import { addSuccessToast, getClients } from '../actions';
import { getFilteringStatus, setRules } from '../actions/filtering'; import { getFilteringStatus, setRules } from '../actions/filtering';
import { getLogs, getLogsConfig, setLogsPagination, setLogsFilter } from '../actions/queryLogs'; import { getLogs, getLogsConfig, setLogsPagination, setLogsFilter, setLogsPage } from '../actions/queryLogs';
import Logs from '../components/Logs'; import Logs from '../components/Logs';
const mapStateToProps = (state) => { const mapStateToProps = (state) => {
@ -19,6 +19,7 @@ const mapDispatchToProps = {
getLogsConfig, getLogsConfig,
setLogsPagination, setLogsPagination,
setLogsFilter, setLogsFilter,
setLogsPage,
}; };
export default connect( export default connect(

View File

@ -141,6 +141,7 @@ export const STANDARD_HTTPS_PORT = 443;
export const EMPTY_DATE = '0001-01-01T00:00:00Z'; export const EMPTY_DATE = '0001-01-01T00:00:00Z';
export const DEBOUNCE_TIMEOUT = 300; export const DEBOUNCE_TIMEOUT = 300;
export const DEBOUNCE_FILTER_TIMEOUT = 500;
export const CHECK_TIMEOUT = 1000; export const CHECK_TIMEOUT = 1000;
export const STOP_TIMEOUT = 10000; export const STOP_TIMEOUT = 10000;
@ -379,3 +380,9 @@ export const DEFAULT_LOGS_FILTER = {
}; };
export const DEFAULT_LANGUAGE = 'en'; export const DEFAULT_LANGUAGE = 'en';
export const TABLE_DEFAULT_PAGE_SIZE = 100;
export const RESPONSE_FILTER = {
ALL: 'all',
FILTERED: 'filtered',
};

View File

@ -20,25 +20,50 @@ const queryLogs = handleActions(
}; };
}, },
[actions.setLogsFilter]: (state, { payload }) => ( [actions.setLogsPage]: (state, { payload }) => ({
{ ...state, filter: payload } ...state,
), page: payload,
}),
[actions.setLogsFilterRequest]: state => ({ ...state, processingGetLogs: true }),
[actions.setLogsFilterFailure]: state => ({ ...state, processingGetLogs: false }),
[actions.setLogsFilterSuccess]: (state, { payload }) => {
const { logs, oldest, filter } = payload;
const pageSize = 100;
const page = 0;
const pages = Math.ceil(logs.length / pageSize);
const total = logs.length;
const rowsStart = pageSize * page;
const rowsEnd = (pageSize * page) + pageSize;
const logsSlice = logs.slice(rowsStart, rowsEnd);
const isFiltered = Object.keys(filter).some(key => filter[key]);
return {
...state,
oldest,
filter,
isFiltered,
pages,
total,
logs: logsSlice,
allLogs: logs,
processingGetLogs: false,
};
},
[actions.getLogsRequest]: state => ({ ...state, processingGetLogs: true }), [actions.getLogsRequest]: state => ({ ...state, processingGetLogs: true }),
[actions.getLogsFailure]: state => ({ ...state, processingGetLogs: false }), [actions.getLogsFailure]: state => ({ ...state, processingGetLogs: false }),
[actions.getLogsSuccess]: (state, { payload }) => { [actions.getLogsSuccess]: (state, { payload }) => {
const { const {
logs, lastRowTime, page, pageSize, filtered, logs, oldest, older_than, page, pageSize,
} = payload; } = payload;
let logsWithOffset = state.allLogs.length > 0 ? state.allLogs : logs; let logsWithOffset = state.allLogs.length > 0 ? state.allLogs : logs;
let allLogs = logs; let allLogs = logs;
if (lastRowTime) { if (older_than) {
logsWithOffset = [...state.allLogs, ...logs]; logsWithOffset = [...state.allLogs, ...logs];
allLogs = [...state.allLogs, ...logs]; allLogs = [...state.allLogs, ...logs];
} else if (filtered) {
logsWithOffset = logs;
allLogs = logs;
} }
const pages = Math.ceil(logsWithOffset.length / pageSize); const pages = Math.ceil(logsWithOffset.length / pageSize);
@ -49,6 +74,7 @@ const queryLogs = handleActions(
return { return {
...state, ...state,
oldest,
pages, pages,
total, total,
allLogs, allLogs,
@ -81,20 +107,33 @@ const queryLogs = handleActions(
...payload, ...payload,
processingSetConfig: false, processingSetConfig: false,
}), }),
[actions.getAdditionalLogsRequest]: state => ({
...state, processingAdditionalLogs: true, processingGetLogs: true,
}),
[actions.getAdditionalLogsFailure]: state => ({
...state, processingAdditionalLogs: false, processingGetLogs: false,
}),
[actions.getAdditionalLogsSuccess]: state => ({
...state, processingAdditionalLogs: false, processingGetLogs: false,
}),
}, },
{ {
processingGetLogs: true, processingGetLogs: true,
processingClear: false, processingClear: false,
processingGetConfig: false, processingGetConfig: false,
processingSetConfig: false, processingSetConfig: false,
processingAdditionalLogs: false,
logs: [], logs: [],
interval: 1, interval: 1,
allLogs: [], allLogs: [],
page: 0,
pages: 0, pages: 0,
total: 0, total: 0,
enabled: true, enabled: true,
older_than: '', oldest: '',
filter: DEFAULT_LOGS_FILTER, filter: DEFAULT_LOGS_FILTER,
isFiltered: false,
}, },
); );

View File

@ -462,7 +462,7 @@ func (s *Server) handleDNSRequest(p *proxy.Proxy, d *proxy.DNSContext) error {
if d.Upstream != nil { if d.Upstream != nil {
upstreamAddr = d.Upstream.Address() upstreamAddr = d.Upstream.Address()
} }
s.queryLog.Add(msg, d.Res, res, elapsed, d.Addr, upstreamAddr) s.queryLog.Add(msg, d.Res, res, elapsed, getIP(d.Addr), upstreamAddr)
} }
s.updateStats(d, elapsed, *res) s.updateStats(d, elapsed, *res)
@ -471,6 +471,17 @@ func (s *Server) handleDNSRequest(p *proxy.Proxy, d *proxy.DNSContext) error {
return nil return nil
} }
// Get IP address from net.Addr
func getIP(addr net.Addr) net.IP {
switch addr := addr.(type) {
case *net.UDPAddr:
return addr.IP
case *net.TCPAddr:
return addr.IP
}
return nil
}
func (s *Server) updateStats(d *proxy.DNSContext, elapsed time.Duration, res dnsfilter.Result) { func (s *Server) updateStats(d *proxy.DNSContext, elapsed time.Duration, res dnsfilter.Result) {
if s.stats == nil { if s.stats == nil {
return return

View File

@ -1,6 +1,46 @@
# AdGuard Home API Change Log # AdGuard Home API Change Log
## v0.99.3: API changes
### API: Get query log: GET /control/querylog
The response data is now a JSON object, not an array.
Response:
200 OK
{
"oldest":"2006-01-02T15:04:05.999999999Z07:00"
"data":[
{
"answer":[
{
"ttl":10,
"type":"AAAA",
"value":"::"
}
...
],
"client":"127.0.0.1",
"elapsedMs":"0.098403",
"filterId":1,
"question":{
"class":"IN",
"host":"doubleclick.net",
"type":"AAAA"
},
"reason":"FilteredBlackList",
"rule":"||doubleclick.net^",
"status":"NOERROR",
"time":"2006-01-02T15:04:05.999999999Z07:00"
}
...
]
}
## v0.99.1: API changes ## v0.99.1: API changes
### API: Get current user info: GET /control/profile ### API: Get current user info: GET /control/profile

View File

@ -1417,10 +1417,16 @@ definitions:
example: "2018-11-26T00:02:41+03:00" example: "2018-11-26T00:02:41+03:00"
QueryLog: QueryLog:
type: "array" type: "object"
description: "Query log" description: "Query log"
items: properties:
$ref: "#/definitions/QueryLogItem" oldest:
type: "string"
example: "2018-11-26T00:02:41+03:00"
data:
type: "array"
items:
$ref: "#/definitions/QueryLogItem"
QueryLogConfig: QueryLogConfig:
type: "object" type: "object"

View File

@ -20,8 +20,8 @@ const (
queryLogFileName = "querylog.json" // .gz added during compression queryLogFileName = "querylog.json" // .gz added during compression
getDataLimit = 500 // GetData(): maximum log entries to return getDataLimit = 500 // GetData(): maximum log entries to return
// maximum data chunks to parse when filtering entries // maximum entries to parse when searching
maxFilteringChunks = 10 maxSearchEntries = 50000
) )
// queryLog is a structure that writes and reads the DNS query log // queryLog is a structure that writes and reads the DNS query log
@ -94,45 +94,31 @@ func (l *queryLog) clear() {
} }
type logEntry struct { type logEntry struct {
Question []byte IP string `json:"IP"`
Time time.Time `json:"T"`
QHost string `json:"QH"`
QType string `json:"QT"`
QClass string `json:"QC"`
Answer []byte `json:",omitempty"` // sometimes empty answers happen like binerdunt.top or rev2.globalrootservers.net Answer []byte `json:",omitempty"` // sometimes empty answers happen like binerdunt.top or rev2.globalrootservers.net
Result dnsfilter.Result Result dnsfilter.Result
Time time.Time
Elapsed time.Duration Elapsed time.Duration
IP string
Upstream string `json:",omitempty"` // if empty, means it was cached Upstream string `json:",omitempty"` // if empty, means it was cached
} }
// getIPString is a helper function that extracts IP address from net.Addr func (l *queryLog) Add(question *dns.Msg, answer *dns.Msg, result *dnsfilter.Result, elapsed time.Duration, ip net.IP, upstream string) {
func getIPString(addr net.Addr) string {
switch addr := addr.(type) {
case *net.UDPAddr:
return addr.IP.String()
case *net.TCPAddr:
return addr.IP.String()
}
return ""
}
func (l *queryLog) Add(question *dns.Msg, answer *dns.Msg, result *dnsfilter.Result, elapsed time.Duration, addr net.Addr, upstream string) {
if !l.conf.Enabled { if !l.conf.Enabled {
return return
} }
var q []byte if question == nil || len(question.Question) != 1 || len(question.Question[0].Name) == 0 ||
ip == nil {
return
}
var a []byte var a []byte
var err error var err error
ip := getIPString(addr)
if question == nil {
return
}
q, err = question.Pack()
if err != nil {
log.Printf("failed to pack question for querylog: %s", err)
return
}
if answer != nil { if answer != nil {
a, err = answer.Pack() a, err = answer.Pack()
@ -148,14 +134,18 @@ func (l *queryLog) Add(question *dns.Msg, answer *dns.Msg, result *dnsfilter.Res
now := time.Now() now := time.Now()
entry := logEntry{ entry := logEntry{
Question: q, IP: ip.String(),
Time: now,
Answer: a, Answer: a,
Result: *result, Result: *result,
Time: now,
Elapsed: elapsed, Elapsed: elapsed,
IP: ip,
Upstream: upstream, Upstream: upstream,
} }
q := question.Question[0]
entry.QHost = strings.ToLower(q.Name[:len(q.Name)-1]) // remove the last dot
entry.QType = dns.Type(q.Qtype).String()
entry.QClass = dns.Class(q.Qclass).String()
l.bufferLock.Lock() l.bufferLock.Lock()
l.buffer = append(l.buffer, &entry) l.buffer = append(l.buffer, &entry)
@ -182,33 +172,22 @@ func isNeeded(entry *logEntry, params getDataParams) bool {
return false return false
} }
if len(params.Domain) != 0 || params.QuestionType != 0 { if len(params.QuestionType) != 0 {
m := dns.Msg{} if entry.QType != params.QuestionType {
_ = m.Unpack(entry.Question) return false
if params.QuestionType != 0 {
if m.Question[0].Qtype != params.QuestionType {
return false
}
}
if len(params.Domain) != 0 && params.StrictMatchDomain {
if m.Question[0].Name != params.Domain {
return false
}
} else if len(params.Domain) != 0 {
if strings.Index(m.Question[0].Name, params.Domain) == -1 {
return false
}
} }
} }
if len(params.Client) != 0 && params.StrictMatchClient { if len(params.Domain) != 0 {
if entry.IP != params.Client { if (params.StrictMatchDomain && entry.QHost != params.Domain) ||
(!params.StrictMatchDomain && strings.Index(entry.QHost, params.Domain) == -1) {
return false return false
} }
} else if len(params.Client) != 0 { }
if strings.Index(entry.IP, params.Client) == -1 {
if len(params.Client) != 0 {
if (params.StrictMatchClient && entry.IP != params.Client) ||
(!params.StrictMatchClient && strings.Index(entry.IP, params.Client) == -1) {
return false return false
} }
} }
@ -216,31 +195,23 @@ func isNeeded(entry *logEntry, params getDataParams) bool {
return true return true
} }
func (l *queryLog) readFromFile(params getDataParams) ([]*logEntry, int) { func (l *queryLog) readFromFile(params getDataParams) ([]*logEntry, time.Time, int) {
entries := []*logEntry{} entries := []*logEntry{}
olderThan := params.OlderThan oldest := time.Time{}
totalChunks := 0
total := 0
r := l.OpenReader() r := l.OpenReader()
if r == nil { if r == nil {
return entries, 0 return entries, time.Time{}, 0
} }
r.BeginRead(olderThan, getDataLimit) r.BeginRead(params.OlderThan, getDataLimit, &params)
for totalChunks < maxFilteringChunks { total := uint64(0)
first := true for total <= maxSearchEntries {
newEntries := []*logEntry{} newEntries := []*logEntry{}
for { for {
entry := r.Next() entry := r.Next()
if entry == nil { if entry == nil {
break break
} }
total++
if first {
first = false
olderThan = entry.Time
}
if !isNeeded(entry, params) { if !isNeeded(entry, params) {
continue continue
@ -251,7 +222,7 @@ func (l *queryLog) readFromFile(params getDataParams) ([]*logEntry, int) {
newEntries = append(newEntries, entry) newEntries = append(newEntries, entry)
} }
log.Debug("entries: +%d (%d) older-than:%s", len(newEntries), len(entries), olderThan) log.Debug("entries: +%d (%d) [%d]", len(newEntries), len(entries), r.Total())
entries = append(newEntries, entries...) entries = append(newEntries, entries...)
if len(entries) > getDataLimit { if len(entries) > getDataLimit {
@ -259,15 +230,16 @@ func (l *queryLog) readFromFile(params getDataParams) ([]*logEntry, int) {
entries = entries[toremove:] entries = entries[toremove:]
break break
} }
if first || len(entries) == getDataLimit { if r.Total() == 0 || len(entries) == getDataLimit {
break break
} }
totalChunks++ total += r.Total()
r.BeginReadPrev(olderThan, getDataLimit) oldest = r.Oldest()
r.BeginReadPrev(getDataLimit)
} }
r.Close() r.Close()
return entries, total return entries, oldest, int(total)
} }
// Parameters for getData() // Parameters for getData()
@ -275,7 +247,7 @@ type getDataParams struct {
OlderThan time.Time // return entries that are older than this value OlderThan time.Time // return entries that are older than this value
Domain string // filter by domain name in question Domain string // filter by domain name in question
Client string // filter by client IP Client string // filter by client IP
QuestionType uint16 // filter by question type QuestionType string // filter by question type
ResponseStatus responseStatusType // filter by response status ResponseStatus responseStatusType // filter by response status
StrictMatchDomain bool // if Domain value must be matched strictly StrictMatchDomain bool // if Domain value must be matched strictly
StrictMatchClient bool // if Client value must be matched strictly StrictMatchClient bool // if Client value must be matched strictly
@ -291,19 +263,16 @@ const (
) )
// Get log entries // Get log entries
func (l *queryLog) getData(params getDataParams) []map[string]interface{} { func (l *queryLog) getData(params getDataParams) map[string]interface{} {
var data = []map[string]interface{}{} var data = []map[string]interface{}{}
if len(params.Domain) != 0 && params.StrictMatchDomain { var oldest time.Time
params.Domain = params.Domain + "."
}
now := time.Now() now := time.Now()
entries := []*logEntry{} entries := []*logEntry{}
total := 0 total := 0
// add from file // add from file
entries, total = l.readFromFile(params) entries, oldest, total = l.readFromFile(params)
if params.OlderThan.IsZero() { if params.OlderThan.IsZero() {
params.OlderThan = now params.OlderThan = now
@ -332,26 +301,12 @@ func (l *queryLog) getData(params getDataParams) []map[string]interface{} {
// process the elements from latest to oldest // process the elements from latest to oldest
for i := len(entries) - 1; i >= 0; i-- { for i := len(entries) - 1; i >= 0; i-- {
entry := entries[i] entry := entries[i]
var q *dns.Msg
var a *dns.Msg var a *dns.Msg
if len(entry.Question) == 0 {
continue
}
q = new(dns.Msg)
if err := q.Unpack(entry.Question); err != nil {
log.Tracef("q.Unpack(): %s", err)
continue
}
if len(q.Question) != 1 {
log.Tracef("len(q.Question) != 1")
continue
}
if len(entry.Answer) > 0 { if len(entry.Answer) > 0 {
a = new(dns.Msg) a = new(dns.Msg)
if err := a.Unpack(entry.Answer); err != nil { if err := a.Unpack(entry.Answer); err != nil {
log.Debug("Failed to unpack dns message answer: %s", err) log.Debug("Failed to unpack dns message answer: %s: %s", err, string(entry.Answer))
a = nil a = nil
} }
} }
@ -363,9 +318,9 @@ func (l *queryLog) getData(params getDataParams) []map[string]interface{} {
"client": entry.IP, "client": entry.IP,
} }
jsonEntry["question"] = map[string]interface{}{ jsonEntry["question"] = map[string]interface{}{
"host": strings.ToLower(strings.TrimSuffix(q.Question[0].Name, ".")), "host": entry.QHost,
"type": dns.Type(q.Question[0].Qtype).String(), "type": entry.QType,
"class": dns.Class(q.Question[0].Qclass).String(), "class": entry.QClass,
} }
if a != nil { if a != nil {
@ -390,7 +345,17 @@ func (l *queryLog) getData(params getDataParams) []map[string]interface{} {
log.Debug("QueryLog: prepared data (%d/%d) older than %s in %s", log.Debug("QueryLog: prepared data (%d/%d) older than %s in %s",
len(entries), total, params.OlderThan, time.Since(now)) len(entries), total, params.OlderThan, time.Since(now))
return data
var result = map[string]interface{}{}
if len(entries) == getDataLimit {
oldest = entries[0].Time
}
result["oldest"] = ""
if !oldest.IsZero() {
result["oldest"] = oldest.Format(time.RFC3339Nano)
}
result["data"] = data
return result
} }
func answerToMap(a *dns.Msg) []map[string]interface{} { func answerToMap(a *dns.Msg) []map[string]interface{} {
@ -408,9 +373,9 @@ func answerToMap(a *dns.Msg) []map[string]interface{} {
// try most common record types // try most common record types
switch v := k.(type) { switch v := k.(type) {
case *dns.A: case *dns.A:
answer["value"] = v.A answer["value"] = v.A.String()
case *dns.AAAA: case *dns.AAAA:
answer["value"] = v.AAAA answer["value"] = v.AAAA.String()
case *dns.MX: case *dns.MX:
answer["value"] = fmt.Sprintf("%v %v", v.Preference, v.Mx) answer["value"] = fmt.Sprintf("%v %v", v.Preference, v.Mx)
case *dns.CNAME: case *dns.CNAME:

View File

@ -67,12 +67,12 @@ func (l *queryLog) handleQueryLog(w http.ResponseWriter, r *http.Request) {
} }
if len(req.filterQuestionType) != 0 { if len(req.filterQuestionType) != 0 {
qtype, ok := dns.StringToType[req.filterQuestionType] _, ok := dns.StringToType[req.filterQuestionType]
if !ok { if !ok {
httpError(r, w, http.StatusBadRequest, "invalid question_type") httpError(r, w, http.StatusBadRequest, "invalid question_type")
return return
} }
params.QuestionType = qtype params.QuestionType = req.filterQuestionType
} }
if len(req.filterResponseStatus) != 0 { if len(req.filterResponseStatus) != 0 {

View File

@ -21,7 +21,7 @@ type QueryLog interface {
Close() Close()
// Add a log entry // Add a log entry
Add(question *dns.Msg, answer *dns.Msg, result *dnsfilter.Result, elapsed time.Duration, addr net.Addr, upstream string) Add(question *dns.Msg, answer *dns.Msg, result *dnsfilter.Result, elapsed time.Duration, ip net.IP, upstream string)
// WriteDiskConfig - write configuration // WriteDiskConfig - write configuration
WriteDiskConfig(dc *DiskConfig) WriteDiskConfig(dc *DiskConfig)

View File

@ -4,13 +4,17 @@ import (
"bufio" "bufio"
"bytes" "bytes"
"compress/gzip" "compress/gzip"
"encoding/base64"
"encoding/json" "encoding/json"
"io" "io"
"os" "os"
"strconv"
"strings" "strings"
"time" "time"
"github.com/AdguardTeam/AdGuardHome/dnsfilter"
"github.com/AdguardTeam/golibs/log" "github.com/AdguardTeam/golibs/log"
"github.com/miekg/dns"
) )
const enableGzip = false const enableGzip = false
@ -145,13 +149,15 @@ func (l *queryLog) periodicRotate() {
// Reader is the DB reader context // Reader is the DB reader context
type Reader struct { type Reader struct {
ql *queryLog ql *queryLog
search *getDataParams
f *os.File f *os.File
reader *bufio.Reader // reads file line by line reader *bufio.Reader // reads file line by line
now time.Time now time.Time
validFrom int64 // UNIX time (ns) validFrom int64 // UNIX time (ns)
olderThan int64 // UNIX time (ns) olderThan int64 // UNIX time (ns)
oldest time.Time
files []string files []string
ifile int ifile int
@ -161,10 +167,12 @@ type Reader struct {
latest bool // return the latest entries latest bool // return the latest entries
filePrepared bool filePrepared bool
searching bool // we're seaching for an entry with exact time stamp seeking bool // we're seaching for an entry with exact time stamp
fseeker fileSeeker // file seeker object fseeker fileSeeker // file seeker object
fpos uint64 // current file offset fpos uint64 // current file offset
nSeekRequests uint32 // number of Seek() requests made (finding a new line doesn't count) nSeekRequests uint32 // number of Seek() requests made (finding a new line doesn't count)
timecnt uint64
} }
type fileSeeker struct { type fileSeeker struct {
@ -197,8 +205,8 @@ func (r *Reader) Close() {
if r.count > 0 { if r.count > 0 {
perunit = elapsed / time.Duration(r.count) perunit = elapsed / time.Duration(r.count)
} }
log.Debug("querylog: read %d entries in %v, %v/entry, seek-reqs:%d", log.Debug("querylog: read %d entries in %v, %v/entry, seek-reqs:%d time:%dus (%d%%)",
r.count, elapsed, perunit, r.nSeekRequests) r.count, elapsed, perunit, r.nSeekRequests, r.timecnt/1000, r.timecnt*100/uint64(elapsed.Nanoseconds()))
if r.f != nil { if r.f != nil {
r.f.Close() r.f.Close()
@ -208,25 +216,26 @@ func (r *Reader) Close() {
// BeginRead - start reading // BeginRead - start reading
// olderThan: stop returning entries when an entry with this time is reached // olderThan: stop returning entries when an entry with this time is reached
// count: minimum number of entries to return // count: minimum number of entries to return
func (r *Reader) BeginRead(olderThan time.Time, count uint64) { func (r *Reader) BeginRead(olderThan time.Time, count uint64, search *getDataParams) {
r.olderThan = olderThan.UnixNano() r.olderThan = olderThan.UnixNano()
r.latest = olderThan.IsZero() r.latest = olderThan.IsZero()
r.oldest = time.Time{}
r.search = search
r.limit = count r.limit = count
if r.latest { if r.latest {
r.olderThan = r.now.UnixNano() r.olderThan = r.now.UnixNano()
} }
r.filePrepared = false r.filePrepared = false
r.searching = false r.seeking = false
} }
// BeginReadPrev - start reading the previous data chunk // BeginReadPrev - start reading the previous data chunk
func (r *Reader) BeginReadPrev(olderThan time.Time, count uint64) { func (r *Reader) BeginReadPrev(count uint64) {
r.olderThan = olderThan.UnixNano() r.olderThan = r.oldest.UnixNano()
r.latest = olderThan.IsZero() r.oldest = time.Time{}
r.latest = false
r.limit = count r.limit = count
if r.latest { r.count = 0
r.olderThan = r.now.UnixNano()
}
off := r.fpos - maxEntrySize*(r.limit+1) off := r.fpos - maxEntrySize*(r.limit+1)
if int64(off) < maxEntrySize { if int64(off) < maxEntrySize {
@ -245,7 +254,7 @@ func (r *Reader) BeginReadPrev(olderThan time.Time, count uint64) {
r.fseeker.pos = r.fpos r.fseeker.pos = r.fpos
r.filePrepared = true r.filePrepared = true
r.searching = false r.seeking = false
} }
// Perform binary seek // Perform binary seek
@ -335,7 +344,7 @@ func (r *Reader) prepareRead() bool {
} }
} else { } else {
// start searching in file: we'll read the first chunk of data from the middle of file // start searching in file: we'll read the first chunk of data from the middle of file
r.searching = true r.seeking = true
r.fseeker = fileSeeker{} r.fseeker = fileSeeker{}
r.fseeker.target = uint64(r.olderThan) r.fseeker.target = uint64(r.olderThan)
r.fseeker.hi = fsize r.fseeker.hi = fsize
@ -358,6 +367,226 @@ func (r *Reader) prepareRead() bool {
return true return true
} }
// Get bool value from "key":bool
func readJSONBool(s, name string) (bool, bool) {
i := strings.Index(s, "\""+name+"\":")
if i == -1 {
return false, false
}
start := i + 1 + len(name) + 2
b := false
if strings.HasPrefix(s[start:], "true") {
b = true
} else if !strings.HasPrefix(s[start:], "false") {
return false, false
}
return b, true
}
// Get value from "key":"value"
func readJSONValue(s, name string) string {
i := strings.Index(s, "\""+name+"\":\"")
if i == -1 {
return ""
}
start := i + 1 + len(name) + 3
i = strings.IndexByte(s[start:], '"')
if i == -1 {
return ""
}
end := start + i
return s[start:end]
}
func (r *Reader) applySearch(str string) bool {
if r.search.ResponseStatus == responseStatusFiltered {
boolVal, ok := readJSONBool(str, "IsFiltered")
if !ok || !boolVal {
return false
}
}
if len(r.search.Domain) != 0 {
val := readJSONValue(str, "QH")
if len(val) == 0 {
return false
}
if (r.search.StrictMatchDomain && val != r.search.Domain) ||
(!r.search.StrictMatchDomain && strings.Index(val, r.search.Domain) == -1) {
return false
}
}
if len(r.search.QuestionType) != 0 {
val := readJSONValue(str, "QT")
if len(val) == 0 {
return false
}
if val != r.search.QuestionType {
return false
}
}
if len(r.search.Client) != 0 {
val := readJSONValue(str, "IP")
if len(val) == 0 {
log.Debug("QueryLog: failed to decode")
return false
}
if (r.search.StrictMatchClient && val != r.search.Client) ||
(!r.search.StrictMatchClient && strings.Index(val, r.search.Client) == -1) {
return false
}
}
return true
}
const (
jsonTErr = iota
jsonTObj
jsonTStr
jsonTNum
jsonTBool
)
// Parse JSON key-value pair
// e.g.: "key":VALUE where VALUE is "string", true|false (boolean), or 123.456 (number)
// Note the limitations:
// . doesn't support whitespace
// . doesn't support "null"
// . doesn't validate boolean or number
// . no proper handling of {} braces
// . no handling of [] brackets
// Return (key, value, type)
func readJSON(ps *string) (string, string, int32) {
s := *ps
k := ""
v := ""
t := int32(jsonTErr)
q1 := strings.IndexByte(s, '"')
if q1 == -1 {
return k, v, t
}
q2 := strings.IndexByte(s[q1+1:], '"')
if q2 == -1 {
return k, v, t
}
k = s[q1+1 : q1+1+q2]
s = s[q1+1+q2+1:]
if len(s) < 2 || s[0] != ':' {
return k, v, t
}
if s[1] == '"' {
q2 = strings.IndexByte(s[2:], '"')
if q2 == -1 {
return k, v, t
}
v = s[2 : 2+q2]
t = jsonTStr
s = s[2+q2+1:]
} else if s[1] == '{' {
t = jsonTObj
s = s[1+1:]
} else {
sep := strings.IndexAny(s[1:], ",}")
if sep == -1 {
return k, v, t
}
v = s[1 : 1+sep]
if s[1] == 't' || s[1] == 'f' {
t = jsonTBool
} else if s[1] == '.' || (s[1] >= '0' && s[1] <= '9') {
t = jsonTNum
}
s = s[1+sep+1:]
}
*ps = s
return k, v, t
}
// nolint (gocyclo)
func decode(ent *logEntry, str string) {
var b bool
var i int
var err error
for {
k, v, t := readJSON(&str)
if t == jsonTErr {
break
}
switch k {
case "IP":
ent.IP = v
case "T":
ent.Time, err = time.Parse(time.RFC3339, v)
case "QH":
ent.QHost = v
case "QT":
ent.QType = v
case "QC":
ent.QClass = v
case "Answer":
ent.Answer, err = base64.StdEncoding.DecodeString(v)
case "IsFiltered":
b, err = strconv.ParseBool(v)
ent.Result.IsFiltered = b
case "Rule":
ent.Result.Rule = v
case "FilterID":
i, err = strconv.Atoi(v)
ent.Result.FilterID = int64(i)
case "Reason":
i, err = strconv.Atoi(v)
ent.Result.Reason = dnsfilter.Reason(i)
case "Upstream":
ent.Upstream = v
case "Elapsed":
i, err = strconv.Atoi(v)
ent.Elapsed = time.Duration(i)
// pre-v0.99.3 compatibility:
case "Question":
var qstr []byte
qstr, err = base64.StdEncoding.DecodeString(v)
if err != nil {
break
}
q := new(dns.Msg)
err = q.Unpack(qstr)
if err != nil {
break
}
ent.QHost = q.Question[0].Name
if len(ent.QHost) == 0 {
break
}
ent.QHost = ent.QHost[:len(ent.QHost)-1]
ent.QType = dns.TypeToString[q.Question[0].Qtype]
ent.QClass = dns.ClassToString[q.Question[0].Qclass]
case "Time":
ent.Time, err = time.Parse(time.RFC3339, v)
}
if err != nil {
log.Debug("decode err: %s", err)
break
}
}
}
// Next - return the next entry or nil if reading is finished // Next - return the next entry or nil if reading is finished
func (r *Reader) Next() *logEntry { // nolint func (r *Reader) Next() *logEntry { // nolint
for { for {
@ -379,24 +608,28 @@ func (r *Reader) Next() *logEntry { // nolint
r.filePrepared = true r.filePrepared = true
} }
// open decoder
b, err := r.reader.ReadBytes('\n') b, err := r.reader.ReadBytes('\n')
if err != nil { if err != nil {
return nil return nil
} }
strReader := strings.NewReader(string(b)) str := string(b)
jd := json.NewDecoder(strReader)
// read data val := readJSONValue(str, "T")
var entry logEntry if len(val) == 0 {
err = jd.Decode(&entry) val = readJSONValue(str, "Time")
if err != nil { }
log.Debug("QueryLog: Failed to decode: %s", err) if len(val) == 0 {
log.Debug("QueryLog: failed to decode")
continue continue
} }
tm, err := time.Parse(time.RFC3339, val)
if err != nil {
log.Debug("QueryLog: failed to decode")
continue
}
t := tm.UnixNano()
t := entry.Time.UnixNano() if r.seeking {
if r.searching {
r.reader = nil r.reader = nil
rr := r.fseeker.seekBinary(uint64(t)) rr := r.fseeker.seekBinary(uint64(t))
@ -407,7 +640,7 @@ func (r *Reader) Next() *logEntry { // nolint
} else if rr == 0 { } else if rr == 0 {
// We found the target entry. // We found the target entry.
// We'll start reading the previous chunk of data. // We'll start reading the previous chunk of data.
r.searching = false r.seeking = false
off := r.fpos - (maxEntrySize * (r.limit + 1)) off := r.fpos - (maxEntrySize * (r.limit + 1))
if int64(off) < maxEntrySize { if int64(off) < maxEntrySize {
@ -430,19 +663,37 @@ func (r *Reader) Next() *logEntry { // nolint
continue continue
} }
if r.oldest.IsZero() {
r.oldest = tm
}
if t < r.validFrom { if t < r.validFrom {
continue continue
} }
if t >= r.olderThan { if t >= r.olderThan {
return nil return nil
} }
r.count++ r.count++
return &entry
if !r.applySearch(str) {
continue
}
st := time.Now()
var ent logEntry
decode(&ent, str)
r.timecnt += uint64(time.Now().Sub(st).Nanoseconds())
return &ent
} }
} }
// Total returns the total number of items // Total returns the total number of processed items
func (r *Reader) Total() int { func (r *Reader) Total() uint64 {
return 0 return r.count
}
// Oldest returns the time of the oldest processed entry
func (r *Reader) Oldest() time.Time {
return r.oldest
} }

View File

@ -2,6 +2,7 @@ package querylog
import ( import (
"net" "net"
"os"
"testing" "testing"
"time" "time"
@ -10,16 +11,94 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
func prepareTestDir() string {
const dir = "./agh-test"
_ = os.RemoveAll(dir)
_ = os.MkdirAll(dir, 0755)
return dir
}
// Check adding and loading (with filtering) entries from disk and memory
func TestQueryLog(t *testing.T) { func TestQueryLog(t *testing.T) {
conf := Config{ conf := Config{
Enabled: true, Enabled: true,
Interval: 1, Interval: 1,
} }
conf.BaseDir = prepareTestDir()
defer func() { _ = os.RemoveAll(conf.BaseDir) }()
l := newQueryLog(conf) l := newQueryLog(conf)
// add disk entries
addEntry(l, "example.org", "1.2.3.4", "0.1.2.3")
addEntry(l, "example.org", "1.2.3.4", "0.1.2.3")
// write to disk
l.flushLogBuffer(true)
// add memory entries
addEntry(l, "test.example.org", "2.2.3.4", "0.1.2.4")
// get all entries
params := getDataParams{
OlderThan: time.Time{},
}
d := l.getData(params)
mdata := d["data"].([]map[string]interface{})
assert.True(t, len(mdata) == 2)
assert.True(t, checkEntry(t, mdata[0], "test.example.org", "2.2.3.4", "0.1.2.4"))
assert.True(t, checkEntry(t, mdata[1], "example.org", "1.2.3.4", "0.1.2.3"))
// search by domain (strict)
params = getDataParams{
OlderThan: time.Time{},
Domain: "test.example.org",
StrictMatchDomain: true,
}
d = l.getData(params)
mdata = d["data"].([]map[string]interface{})
assert.True(t, len(mdata) == 1)
assert.True(t, checkEntry(t, mdata[0], "test.example.org", "2.2.3.4", "0.1.2.4"))
// search by domain
params = getDataParams{
OlderThan: time.Time{},
Domain: "example.org",
StrictMatchDomain: false,
}
d = l.getData(params)
mdata = d["data"].([]map[string]interface{})
assert.True(t, len(mdata) == 2)
assert.True(t, checkEntry(t, mdata[0], "test.example.org", "2.2.3.4", "0.1.2.4"))
assert.True(t, checkEntry(t, mdata[1], "example.org", "1.2.3.4", "0.1.2.3"))
// search by client IP (strict)
params = getDataParams{
OlderThan: time.Time{},
Client: "0.1.2.3",
StrictMatchClient: true,
}
d = l.getData(params)
mdata = d["data"].([]map[string]interface{})
assert.True(t, len(mdata) == 1)
assert.True(t, checkEntry(t, mdata[0], "example.org", "1.2.3.4", "0.1.2.3"))
// search by client IP
params = getDataParams{
OlderThan: time.Time{},
Client: "0.1.2",
StrictMatchClient: false,
}
d = l.getData(params)
mdata = d["data"].([]map[string]interface{})
assert.True(t, len(mdata) == 2)
assert.True(t, checkEntry(t, mdata[0], "test.example.org", "2.2.3.4", "0.1.2.4"))
assert.True(t, checkEntry(t, mdata[1], "example.org", "1.2.3.4", "0.1.2.3"))
}
func addEntry(l *queryLog, host, answerStr, client string) {
q := dns.Msg{} q := dns.Msg{}
q.Question = append(q.Question, dns.Question{ q.Question = append(q.Question, dns.Question{
Name: "example.org.", Name: host + ".",
Qtype: dns.TypeA, Qtype: dns.TypeA,
Qclass: dns.ClassINET, Qclass: dns.ClassINET,
}) })
@ -32,17 +111,49 @@ func TestQueryLog(t *testing.T) {
Rrtype: dns.TypeA, Rrtype: dns.TypeA,
Class: dns.ClassINET, Class: dns.ClassINET,
} }
answer.A = net.IP{1, 2, 3, 4} answer.A = net.ParseIP(answerStr)
a.Answer = append(a.Answer, answer) a.Answer = append(a.Answer, answer)
res := dnsfilter.Result{} res := dnsfilter.Result{}
l.Add(&q, &a, &res, 0, nil, "upstream") l.Add(&q, &a, &res, 0, net.ParseIP(client), "upstream")
}
params := getDataParams{
OlderThan: time.Now(), func checkEntry(t *testing.T, m map[string]interface{}, host, answer, client string) bool {
} mq := m["question"].(map[string]interface{})
d := l.getData(params) ma := m["answer"].([]map[string]interface{})
m := d[0] ma0 := ma[0]
mq := m["question"].(map[string]interface{}) if !assert.True(t, mq["host"].(string) == host) ||
assert.True(t, mq["host"].(string) == "example.org") !assert.True(t, mq["class"].(string) == "IN") ||
!assert.True(t, mq["type"].(string) == "A") ||
!assert.True(t, ma0["value"].(string) == answer) ||
!assert.True(t, m["client"].(string) == client) {
return false
}
return true
}
func TestJSON(t *testing.T) {
s := `
{"keystr":"val","obj":{"keybool":true,"keyint":123456}}
`
k, v, jtype := readJSON(&s)
assert.Equal(t, jtype, int32(jsonTStr))
assert.Equal(t, "keystr", k)
assert.Equal(t, "val", v)
k, v, jtype = readJSON(&s)
assert.Equal(t, jtype, int32(jsonTObj))
assert.Equal(t, "obj", k)
k, v, jtype = readJSON(&s)
assert.Equal(t, jtype, int32(jsonTBool))
assert.Equal(t, "keybool", k)
assert.Equal(t, "true", v)
k, v, jtype = readJSON(&s)
assert.Equal(t, jtype, int32(jsonTNum))
assert.Equal(t, "keyint", k)
assert.Equal(t, "123456", v)
k, v, jtype = readJSON(&s)
assert.True(t, jtype == jsonTErr)
} }