2018-08-30 14:25:33 +00:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
2018-09-07 14:50:03 +00:00
|
|
|
"bytes"
|
2018-08-30 14:25:33 +00:00
|
|
|
"encoding/json"
|
|
|
|
"fmt"
|
|
|
|
"io/ioutil"
|
|
|
|
"log"
|
|
|
|
"net/http"
|
|
|
|
"os"
|
|
|
|
"path/filepath"
|
|
|
|
"regexp"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
|
|
|
"time"
|
|
|
|
|
2018-11-05 21:47:59 +00:00
|
|
|
"github.com/AdguardTeam/AdGuardHome/upstream"
|
|
|
|
|
2018-10-30 14:16:20 +00:00
|
|
|
corednsplugin "github.com/AdguardTeam/AdGuardHome/coredns_plugin"
|
2018-08-30 14:25:33 +00:00
|
|
|
"gopkg.in/asaskevich/govalidator.v4"
|
|
|
|
)
|
|
|
|
|
|
|
|
const updatePeriod = time.Minute * 30
|
|
|
|
|
2018-10-30 09:24:59 +00:00
|
|
|
var filterTitleRegexp = regexp.MustCompile(`^! Title: +(.*)$`)
|
2018-08-30 14:25:33 +00:00
|
|
|
|
2018-09-20 17:02:25 +00:00
|
|
|
// cached version.json to avoid hammering github.io for each page reload
|
|
|
|
var versionCheckJSON []byte
|
|
|
|
var versionCheckLastTime time.Time
|
|
|
|
|
2018-10-15 13:02:19 +00:00
|
|
|
const versionCheckURL = "https://adguardteam.github.io/AdGuardHome/version.json"
|
2018-09-20 17:02:25 +00:00
|
|
|
const versionCheckPeriod = time.Hour * 8
|
|
|
|
|
2018-10-10 12:47:08 +00:00
|
|
|
var client = &http.Client{
|
|
|
|
Timeout: time.Second * 30,
|
|
|
|
}
|
|
|
|
|
2018-08-30 14:25:33 +00:00
|
|
|
// -------------------
|
|
|
|
// coredns run control
|
|
|
|
// -------------------
|
|
|
|
func tellCoreDNSToReload() {
|
2018-10-30 14:16:20 +00:00
|
|
|
corednsplugin.Reload <- true
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func writeAllConfigsAndReloadCoreDNS() error {
|
|
|
|
err := writeAllConfigs()
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("Couldn't write all configs: %s", err)
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
tellCoreDNSToReload()
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2018-10-10 17:13:03 +00:00
|
|
|
func httpUpdateConfigReloadDNSReturnOK(w http.ResponseWriter, r *http.Request) {
|
|
|
|
err := writeAllConfigsAndReloadCoreDNS()
|
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Couldn't write config file: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
returnOK(w, r)
|
|
|
|
}
|
|
|
|
|
|
|
|
func returnOK(w http.ResponseWriter, r *http.Request) {
|
|
|
|
_, err := fmt.Fprintf(w, "OK\n")
|
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Couldn't write body: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, http.StatusInternalServerError)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-30 14:25:33 +00:00
|
|
|
func handleStatus(w http.ResponseWriter, r *http.Request) {
|
|
|
|
data := map[string]interface{}{
|
2018-10-10 17:13:03 +00:00
|
|
|
"dns_address": config.BindHost,
|
|
|
|
"dns_port": config.CoreDNS.Port,
|
|
|
|
"protection_enabled": config.CoreDNS.ProtectionEnabled,
|
|
|
|
"querylog_enabled": config.CoreDNS.QueryLogEnabled,
|
|
|
|
"running": isRunning(),
|
2018-11-05 21:47:59 +00:00
|
|
|
"bootstrap_dns": config.CoreDNS.BootstrapDNS,
|
2018-10-10 17:13:03 +00:00
|
|
|
"upstream_dns": config.CoreDNS.UpstreamDNS,
|
|
|
|
"version": VersionString,
|
2018-11-21 17:42:55 +00:00
|
|
|
"language": config.Language,
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
|
|
|
|
2018-10-20 16:58:39 +00:00
|
|
|
jsonVal, err := json.Marshal(data)
|
2018-08-30 14:25:33 +00:00
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
2018-10-20 16:58:39 +00:00
|
|
|
_, err = w.Write(jsonVal)
|
2018-08-30 14:25:33 +00:00
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Unable to write response json: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-10-10 17:13:03 +00:00
|
|
|
func handleProtectionEnable(w http.ResponseWriter, r *http.Request) {
|
|
|
|
config.CoreDNS.ProtectionEnabled = true
|
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
|
|
|
}
|
|
|
|
|
|
|
|
func handleProtectionDisable(w http.ResponseWriter, r *http.Request) {
|
|
|
|
config.CoreDNS.ProtectionEnabled = false
|
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
|
|
|
}
|
|
|
|
|
2018-08-30 14:25:33 +00:00
|
|
|
// -----
|
|
|
|
// stats
|
|
|
|
// -----
|
|
|
|
func handleQueryLogEnable(w http.ResponseWriter, r *http.Request) {
|
|
|
|
config.CoreDNS.QueryLogEnabled = true
|
2018-10-10 17:13:03 +00:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleQueryLogDisable(w http.ResponseWriter, r *http.Request) {
|
|
|
|
config.CoreDNS.QueryLogEnabled = false
|
2018-10-10 17:13:03 +00:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
|
|
|
|
2018-09-26 14:47:23 +00:00
|
|
|
func httpError(w http.ResponseWriter, code int, format string, args ...interface{}) {
|
|
|
|
text := fmt.Sprintf(format, args...)
|
2018-10-11 15:32:23 +00:00
|
|
|
log.Println(text)
|
2018-09-26 14:47:23 +00:00
|
|
|
http.Error(w, text, code)
|
|
|
|
}
|
|
|
|
|
2018-08-30 14:25:33 +00:00
|
|
|
func handleSetUpstreamDNS(w http.ResponseWriter, r *http.Request) {
|
|
|
|
body, err := ioutil.ReadAll(r.Body)
|
|
|
|
if err != nil {
|
2018-11-01 11:45:32 +00:00
|
|
|
errorText := fmt.Sprintf("Failed to read request body: %s", err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusBadRequest)
|
2018-08-30 14:25:33 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
// if empty body -- user is asking for default servers
|
2018-11-05 21:47:59 +00:00
|
|
|
hosts := strings.Fields(string(body))
|
|
|
|
|
2018-08-30 14:25:33 +00:00
|
|
|
if len(hosts) == 0 {
|
|
|
|
config.CoreDNS.UpstreamDNS = defaultDNS
|
|
|
|
} else {
|
|
|
|
config.CoreDNS.UpstreamDNS = hosts
|
|
|
|
}
|
|
|
|
|
|
|
|
err = writeAllConfigs()
|
|
|
|
if err != nil {
|
2018-11-01 11:45:32 +00:00
|
|
|
errorText := fmt.Sprintf("Couldn't write config file: %s", err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusInternalServerError)
|
2018-08-30 14:25:33 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
tellCoreDNSToReload()
|
2018-09-14 13:50:56 +00:00
|
|
|
_, err = fmt.Fprintf(w, "OK %d servers\n", len(hosts))
|
|
|
|
if err != nil {
|
2018-11-01 11:45:32 +00:00
|
|
|
errorText := fmt.Sprintf("Couldn't write body: %s", err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusInternalServerError)
|
2018-09-14 13:50:56 +00:00
|
|
|
}
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
|
|
|
|
2018-09-19 16:12:09 +00:00
|
|
|
func handleTestUpstreamDNS(w http.ResponseWriter, r *http.Request) {
|
|
|
|
body, err := ioutil.ReadAll(r.Body)
|
|
|
|
if err != nil {
|
2018-11-01 11:45:32 +00:00
|
|
|
errorText := fmt.Sprintf("Failed to read request body: %s", err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, 400)
|
2018-09-19 16:12:09 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
hosts := strings.Fields(string(body))
|
|
|
|
|
|
|
|
if len(hosts) == 0 {
|
2018-11-01 11:45:32 +00:00
|
|
|
errorText := fmt.Sprintf("No servers specified")
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusBadRequest)
|
2018-09-19 16:12:09 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
result := map[string]string{}
|
|
|
|
|
|
|
|
for _, host := range hosts {
|
2018-10-07 20:43:24 +00:00
|
|
|
err = checkDNS(host)
|
2018-09-19 16:12:09 +00:00
|
|
|
if err != nil {
|
|
|
|
log.Println(err)
|
|
|
|
result[host] = err.Error()
|
|
|
|
} else {
|
|
|
|
result[host] = "OK"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-10-20 16:58:39 +00:00
|
|
|
jsonVal, err := json.Marshal(result)
|
2018-09-19 16:12:09 +00:00
|
|
|
if err != nil {
|
2018-11-01 11:45:32 +00:00
|
|
|
errorText := fmt.Sprintf("Unable to marshal status json: %s", err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusInternalServerError)
|
2018-09-19 16:12:09 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
2018-10-20 16:58:39 +00:00
|
|
|
_, err = w.Write(jsonVal)
|
2018-09-19 16:12:09 +00:00
|
|
|
if err != nil {
|
2018-11-01 11:45:32 +00:00
|
|
|
errorText := fmt.Sprintf("Couldn't write body: %s", err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusInternalServerError)
|
2018-09-19 16:12:09 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-09-26 14:47:23 +00:00
|
|
|
func checkDNS(input string) error {
|
2018-11-05 21:47:59 +00:00
|
|
|
u, err := upstream.NewUpstream(input, config.CoreDNS.BootstrapDNS)
|
|
|
|
|
2018-09-26 14:47:23 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
2018-09-19 16:12:09 +00:00
|
|
|
}
|
2018-11-05 21:52:27 +00:00
|
|
|
defer u.Close()
|
2018-09-19 16:12:09 +00:00
|
|
|
|
2018-11-05 21:47:59 +00:00
|
|
|
alive, err := upstream.IsAlive(u)
|
2018-09-26 14:47:23 +00:00
|
|
|
|
2018-09-19 16:12:09 +00:00
|
|
|
if err != nil {
|
2018-10-30 09:24:59 +00:00
|
|
|
return fmt.Errorf("couldn't communicate with DNS server %s: %s", input, err)
|
2018-09-19 16:12:09 +00:00
|
|
|
}
|
2018-09-26 14:47:23 +00:00
|
|
|
|
2018-11-05 21:47:59 +00:00
|
|
|
if !alive {
|
|
|
|
return fmt.Errorf("DNS server has not passed the healthcheck: %s", input)
|
2018-09-26 14:47:23 +00:00
|
|
|
}
|
|
|
|
|
2018-11-05 21:47:59 +00:00
|
|
|
return nil
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
|
|
|
|
2018-09-20 17:02:25 +00:00
|
|
|
func handleGetVersionJSON(w http.ResponseWriter, r *http.Request) {
|
|
|
|
now := time.Now()
|
|
|
|
if now.Sub(versionCheckLastTime) <= versionCheckPeriod && len(versionCheckJSON) != 0 {
|
|
|
|
// return cached copy
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
|
|
w.Write(versionCheckJSON)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
resp, err := client.Get(versionCheckURL)
|
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Couldn't get querylog from coredns: %T %s\n", err, err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, http.StatusBadGateway)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if resp != nil && resp.Body != nil {
|
|
|
|
defer resp.Body.Close()
|
|
|
|
}
|
|
|
|
|
|
|
|
// read the body entirely
|
|
|
|
body, err := ioutil.ReadAll(resp.Body)
|
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Couldn't read response body: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, http.StatusBadGateway)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
|
|
_, err = w.Write(body)
|
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Couldn't write body: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, http.StatusInternalServerError)
|
|
|
|
}
|
|
|
|
|
|
|
|
versionCheckLastTime = now
|
|
|
|
versionCheckJSON = body
|
|
|
|
}
|
|
|
|
|
2018-08-30 14:25:33 +00:00
|
|
|
// ---------
|
|
|
|
// filtering
|
|
|
|
// ---------
|
|
|
|
|
|
|
|
func handleFilteringEnable(w http.ResponseWriter, r *http.Request) {
|
|
|
|
config.CoreDNS.FilteringEnabled = true
|
2018-10-10 17:13:03 +00:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleFilteringDisable(w http.ResponseWriter, r *http.Request) {
|
|
|
|
config.CoreDNS.FilteringEnabled = false
|
2018-10-10 17:13:03 +00:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleFilteringStatus(w http.ResponseWriter, r *http.Request) {
|
|
|
|
data := map[string]interface{}{
|
|
|
|
"enabled": config.CoreDNS.FilteringEnabled,
|
|
|
|
}
|
|
|
|
|
2018-10-06 21:58:59 +00:00
|
|
|
config.RLock()
|
2018-08-30 14:25:33 +00:00
|
|
|
data["filters"] = config.Filters
|
|
|
|
data["user_rules"] = config.UserRules
|
2018-10-20 16:58:39 +00:00
|
|
|
jsonVal, err := json.Marshal(data)
|
2018-10-06 21:58:59 +00:00
|
|
|
config.RUnlock()
|
2018-08-30 14:25:33 +00:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
2018-10-20 16:58:39 +00:00
|
|
|
_, err = w.Write(jsonVal)
|
2018-08-30 14:25:33 +00:00
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Unable to write response json: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func handleFilteringAddURL(w http.ResponseWriter, r *http.Request) {
|
2018-10-11 15:32:23 +00:00
|
|
|
filter := filter{}
|
|
|
|
err := json.NewDecoder(r.Body).Decode(&filter)
|
2018-08-30 14:25:33 +00:00
|
|
|
if err != nil {
|
2018-10-11 15:32:23 +00:00
|
|
|
httpError(w, http.StatusBadRequest, "Failed to parse request body json: %s", err)
|
2018-08-30 14:25:33 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2018-10-11 15:32:23 +00:00
|
|
|
if len(filter.URL) == 0 {
|
2018-08-30 14:25:33 +00:00
|
|
|
http.Error(w, "URL parameter was not specified", 400)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2018-10-11 15:32:23 +00:00
|
|
|
if valid := govalidator.IsRequestURL(filter.URL); !valid {
|
2018-08-30 14:25:33 +00:00
|
|
|
http.Error(w, "URL parameter is not valid request URL", 400)
|
|
|
|
return
|
|
|
|
}
|
2018-09-14 01:33:54 +00:00
|
|
|
|
2018-10-30 09:24:59 +00:00
|
|
|
// Check for duplicates
|
2018-09-14 01:33:54 +00:00
|
|
|
for i := range config.Filters {
|
2018-10-11 15:32:23 +00:00
|
|
|
if config.Filters[i].URL == filter.URL {
|
2018-10-30 09:24:59 +00:00
|
|
|
errorText := fmt.Sprintf("Filter URL already added -- %s", filter.URL)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusBadRequest)
|
2018-09-14 01:33:54 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-10-30 09:24:59 +00:00
|
|
|
// Set necessary properties
|
2018-11-27 18:25:03 +00:00
|
|
|
filter.ID = assignUniqueFilterID()
|
2018-10-30 09:24:59 +00:00
|
|
|
filter.Enabled = true
|
|
|
|
|
|
|
|
// Download the filter contents
|
2018-10-29 23:17:24 +00:00
|
|
|
ok, err := filter.update(true)
|
2018-09-14 01:33:54 +00:00
|
|
|
if err != nil {
|
2018-10-30 09:24:59 +00:00
|
|
|
errorText := fmt.Sprintf("Couldn't fetch filter from url %s: %s", filter.URL, err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusBadRequest)
|
2018-09-14 01:33:54 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
if filter.RulesCount == 0 {
|
2018-10-30 09:24:59 +00:00
|
|
|
errorText := fmt.Sprintf("Filter at the url %s has no rules (maybe it points to blank page?)", filter.URL)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusBadRequest)
|
2018-09-14 01:33:54 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
if !ok {
|
2018-10-30 09:24:59 +00:00
|
|
|
errorText := fmt.Sprintf("Filter at the url %s is invalid (maybe it points to blank page?)", filter.URL)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusBadRequest)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Save the filter contents
|
|
|
|
err = filter.save()
|
|
|
|
if err != nil {
|
|
|
|
errorText := fmt.Sprintf("Failed to save filter %d due to %s", filter.ID, err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusBadRequest)
|
2018-09-14 01:33:54 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// URL is deemed valid, append it to filters, update config, write new filter file and tell coredns to reload it
|
2018-08-30 14:25:33 +00:00
|
|
|
config.Filters = append(config.Filters, filter)
|
|
|
|
err = writeAllConfigs()
|
|
|
|
if err != nil {
|
2018-10-30 09:24:59 +00:00
|
|
|
errorText := fmt.Sprintf("Couldn't write config file: %s", err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusInternalServerError)
|
2018-08-30 14:25:33 +00:00
|
|
|
return
|
|
|
|
}
|
2018-10-29 23:17:24 +00:00
|
|
|
|
2018-08-30 14:25:33 +00:00
|
|
|
tellCoreDNSToReload()
|
2018-10-29 23:17:24 +00:00
|
|
|
|
2018-09-14 13:50:56 +00:00
|
|
|
_, err = fmt.Fprintf(w, "OK %d rules\n", filter.RulesCount)
|
|
|
|
if err != nil {
|
2018-10-30 09:24:59 +00:00
|
|
|
errorText := fmt.Sprintf("Couldn't write body: %s", err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, http.StatusInternalServerError)
|
2018-09-14 13:50:56 +00:00
|
|
|
}
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleFilteringRemoveURL(w http.ResponseWriter, r *http.Request) {
|
|
|
|
parameters, err := parseParametersFromBody(r.Body)
|
|
|
|
if err != nil {
|
2018-10-30 09:24:59 +00:00
|
|
|
errorText := fmt.Sprintf("failed to parse parameters from body: %s", err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, 400)
|
2018-08-30 14:25:33 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
url, ok := parameters["url"]
|
|
|
|
if !ok {
|
|
|
|
http.Error(w, "URL parameter was not specified", 400)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if valid := govalidator.IsRequestURL(url); !valid {
|
|
|
|
http.Error(w, "URL parameter is not valid request URL", 400)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// go through each element and delete if url matches
|
|
|
|
newFilters := config.Filters[:0]
|
|
|
|
for _, filter := range config.Filters {
|
|
|
|
if filter.URL != url {
|
|
|
|
newFilters = append(newFilters, filter)
|
2018-10-29 23:17:24 +00:00
|
|
|
} else {
|
|
|
|
// Remove the filter file
|
2018-11-27 13:48:57 +00:00
|
|
|
err := os.Remove(filter.Path())
|
2018-10-29 23:17:24 +00:00
|
|
|
if err != nil {
|
2018-10-30 09:24:59 +00:00
|
|
|
errorText := fmt.Sprintf("Couldn't remove the filter file: %s", err)
|
|
|
|
http.Error(w, errorText, http.StatusInternalServerError)
|
2018-10-29 23:17:24 +00:00
|
|
|
return
|
|
|
|
}
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
|
|
|
}
|
2018-10-29 23:17:24 +00:00
|
|
|
// Update the configuration after removing filter files
|
2018-08-30 14:25:33 +00:00
|
|
|
config.Filters = newFilters
|
2018-10-10 17:13:03 +00:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleFilteringEnableURL(w http.ResponseWriter, r *http.Request) {
|
|
|
|
parameters, err := parseParametersFromBody(r.Body)
|
|
|
|
if err != nil {
|
2018-10-30 09:24:59 +00:00
|
|
|
errorText := fmt.Sprintf("failed to parse parameters from body: %s", err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, 400)
|
2018-08-30 14:25:33 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
url, ok := parameters["url"]
|
|
|
|
if !ok {
|
|
|
|
http.Error(w, "URL parameter was not specified", 400)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if valid := govalidator.IsRequestURL(url); !valid {
|
|
|
|
http.Error(w, "URL parameter is not valid request URL", http.StatusBadRequest)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
found := false
|
|
|
|
for i := range config.Filters {
|
|
|
|
filter := &config.Filters[i] // otherwise we will be operating on a copy
|
|
|
|
if filter.URL == url {
|
|
|
|
filter.Enabled = true
|
|
|
|
found = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !found {
|
|
|
|
http.Error(w, "URL parameter was not previously added", http.StatusBadRequest)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// kick off refresh of rules from new URLs
|
2018-11-27 18:30:11 +00:00
|
|
|
refreshFiltersIfNeccessary(false)
|
2018-10-10 17:13:03 +00:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleFilteringDisableURL(w http.ResponseWriter, r *http.Request) {
|
|
|
|
parameters, err := parseParametersFromBody(r.Body)
|
|
|
|
if err != nil {
|
2018-10-30 09:24:59 +00:00
|
|
|
errorText := fmt.Sprintf("failed to parse parameters from body: %s", err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, 400)
|
2018-08-30 14:25:33 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
url, ok := parameters["url"]
|
|
|
|
if !ok {
|
|
|
|
http.Error(w, "URL parameter was not specified", 400)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if valid := govalidator.IsRequestURL(url); !valid {
|
|
|
|
http.Error(w, "URL parameter is not valid request URL", http.StatusBadRequest)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
found := false
|
|
|
|
for i := range config.Filters {
|
|
|
|
filter := &config.Filters[i] // otherwise we will be operating on a copy
|
|
|
|
if filter.URL == url {
|
|
|
|
filter.Enabled = false
|
|
|
|
found = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !found {
|
|
|
|
http.Error(w, "URL parameter was not previously added", http.StatusBadRequest)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2018-10-10 17:13:03 +00:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleFilteringSetRules(w http.ResponseWriter, r *http.Request) {
|
|
|
|
body, err := ioutil.ReadAll(r.Body)
|
|
|
|
if err != nil {
|
2018-10-30 09:24:59 +00:00
|
|
|
errorText := fmt.Sprintf("Failed to read request body: %s", err)
|
|
|
|
log.Println(errorText)
|
|
|
|
http.Error(w, errorText, 400)
|
2018-08-30 14:25:33 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
config.UserRules = strings.Split(string(body), "\n")
|
2018-10-10 17:13:03 +00:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleFilteringRefresh(w http.ResponseWriter, r *http.Request) {
|
|
|
|
force := r.URL.Query().Get("force")
|
2018-11-27 18:30:11 +00:00
|
|
|
updated := refreshFiltersIfNeccessary(force != "")
|
2018-08-30 14:25:33 +00:00
|
|
|
fmt.Fprintf(w, "OK %d filters updated\n", updated)
|
|
|
|
}
|
|
|
|
|
2018-10-30 09:24:59 +00:00
|
|
|
// Sets up a timer that will be checking for filters updates periodically
|
2018-11-27 18:30:11 +00:00
|
|
|
func periodicallyRefreshFilters() {
|
|
|
|
for range time.Tick(time.Minute) {
|
|
|
|
refreshFiltersIfNeccessary(false)
|
|
|
|
}
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
|
|
|
|
2018-10-30 09:24:59 +00:00
|
|
|
// Checks filters updates if necessary
|
|
|
|
// If force is true, it ignores the filter.LastUpdated field value
|
2018-11-27 18:30:11 +00:00
|
|
|
func refreshFiltersIfNeccessary(force bool) int {
|
2018-08-30 14:25:33 +00:00
|
|
|
config.Lock()
|
|
|
|
|
|
|
|
// fetch URLs
|
|
|
|
updateCount := 0
|
|
|
|
for i := range config.Filters {
|
|
|
|
filter := &config.Filters[i] // otherwise we will be operating on a copy
|
2018-11-27 18:25:03 +00:00
|
|
|
|
|
|
|
if filter.ID == 0 { // protect against users modifying the yaml and removing the ID
|
|
|
|
filter.ID = assignUniqueFilterID()
|
|
|
|
}
|
|
|
|
|
2018-10-30 09:24:59 +00:00
|
|
|
updated, err := filter.update(force)
|
2018-08-30 14:25:33 +00:00
|
|
|
if err != nil {
|
|
|
|
log.Printf("Failed to update filter %s: %s\n", filter.URL, err)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if updated {
|
2018-10-30 09:24:59 +00:00
|
|
|
// Saving it to the filters dir now
|
|
|
|
err = filter.save()
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("Failed to save the updated filter %d: %s", filter.ID, err)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2018-08-30 14:25:33 +00:00
|
|
|
updateCount++
|
|
|
|
}
|
|
|
|
}
|
2018-10-06 21:58:59 +00:00
|
|
|
config.Unlock()
|
2018-08-30 14:25:33 +00:00
|
|
|
|
|
|
|
if updateCount > 0 {
|
2018-09-05 23:09:05 +00:00
|
|
|
tellCoreDNSToReload()
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
|
|
|
return updateCount
|
|
|
|
}
|
|
|
|
|
2018-10-30 09:24:59 +00:00
|
|
|
// A helper function that parses filter contents and returns a number of rules and a filter name (if there's any)
|
|
|
|
func parseFilterContents(contents []byte) (int, string) {
|
|
|
|
lines := strings.Split(string(contents), "\n")
|
|
|
|
rulesCount := 0
|
|
|
|
name := ""
|
|
|
|
seenTitle := false
|
|
|
|
|
|
|
|
// Count lines in the filter
|
|
|
|
for _, line := range lines {
|
|
|
|
line = strings.TrimSpace(line)
|
|
|
|
if len(line) > 0 && line[0] == '!' {
|
|
|
|
if m := filterTitleRegexp.FindAllStringSubmatch(line, -1); len(m) > 0 && len(m[0]) >= 2 && !seenTitle {
|
|
|
|
name = m[0][1]
|
|
|
|
seenTitle = true
|
|
|
|
}
|
|
|
|
} else if len(line) != 0 {
|
|
|
|
rulesCount++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return rulesCount, name
|
|
|
|
}
|
|
|
|
|
2018-10-29 23:17:24 +00:00
|
|
|
// Checks for filters updates
|
|
|
|
// If "force" is true -- does not check the filter's LastUpdated field
|
2018-10-30 09:24:59 +00:00
|
|
|
// Call "save" to persist the filter contents
|
2018-10-29 23:17:24 +00:00
|
|
|
func (filter *filter) update(force bool) (bool, error) {
|
2018-11-27 18:25:03 +00:00
|
|
|
if filter.ID == 0 { // protect against users deleting the ID
|
|
|
|
filter.ID = assignUniqueFilterID()
|
|
|
|
}
|
2018-08-30 14:25:33 +00:00
|
|
|
if !filter.Enabled {
|
|
|
|
return false, nil
|
|
|
|
}
|
2018-10-29 23:17:24 +00:00
|
|
|
if !force && time.Since(filter.LastUpdated) <= updatePeriod {
|
2018-08-30 14:25:33 +00:00
|
|
|
return false, nil
|
|
|
|
}
|
|
|
|
|
2018-10-30 09:24:59 +00:00
|
|
|
log.Printf("Downloading update for filter %d from %s", filter.ID, filter.URL)
|
2018-10-29 23:17:24 +00:00
|
|
|
|
2018-10-30 09:24:59 +00:00
|
|
|
// use the same update period for failed filter downloads to avoid flooding with requests
|
2018-10-29 23:17:24 +00:00
|
|
|
filter.LastUpdated = time.Now()
|
2018-08-30 14:25:33 +00:00
|
|
|
|
|
|
|
resp, err := client.Get(filter.URL)
|
|
|
|
if resp != nil && resp.Body != nil {
|
|
|
|
defer resp.Body.Close()
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("Couldn't request filter from URL %s, skipping: %s", filter.URL, err)
|
|
|
|
return false, err
|
|
|
|
}
|
|
|
|
|
2018-10-29 23:17:24 +00:00
|
|
|
if resp.StatusCode != 200 {
|
2018-08-30 14:25:33 +00:00
|
|
|
log.Printf("Got status code %d from URL %s, skipping", resp.StatusCode, filter.URL)
|
2018-10-29 23:17:24 +00:00
|
|
|
return false, fmt.Errorf("got status code != 200: %d", resp.StatusCode)
|
|
|
|
}
|
|
|
|
|
|
|
|
contentType := strings.ToLower(resp.Header.Get("content-type"))
|
|
|
|
if !strings.HasPrefix(contentType, "text/plain") {
|
|
|
|
log.Printf("Non-text response %s from %s, skipping", contentType, filter.URL)
|
|
|
|
return false, fmt.Errorf("non-text response %s", contentType)
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
body, err := ioutil.ReadAll(resp.Body)
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("Couldn't fetch filter contents from URL %s, skipping: %s", filter.URL, err)
|
|
|
|
return false, err
|
|
|
|
}
|
|
|
|
|
2018-10-29 23:17:24 +00:00
|
|
|
// Extract filter name and count number of rules
|
2018-10-30 09:24:59 +00:00
|
|
|
rulesCount, filterName := parseFilterContents(body)
|
2018-10-29 23:17:24 +00:00
|
|
|
|
2018-10-30 09:24:59 +00:00
|
|
|
if filterName != "" {
|
|
|
|
filter.Name = filterName
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
2018-10-29 23:17:24 +00:00
|
|
|
|
2018-10-30 09:24:59 +00:00
|
|
|
// Check if the filter has been really changed
|
2018-11-27 13:48:57 +00:00
|
|
|
if bytes.Equal(filter.Contents, body) {
|
2018-10-30 09:24:59 +00:00
|
|
|
log.Printf("The filter %d text has not changed", filter.ID)
|
2018-09-14 01:33:54 +00:00
|
|
|
return false, nil
|
|
|
|
}
|
2018-10-29 23:17:24 +00:00
|
|
|
|
2018-10-30 09:24:59 +00:00
|
|
|
log.Printf("Filter %d has been updated: %d bytes, %d rules", filter.ID, len(body), rulesCount)
|
2018-08-30 14:25:33 +00:00
|
|
|
filter.RulesCount = rulesCount
|
2018-11-27 13:48:57 +00:00
|
|
|
filter.Contents = body
|
2018-10-29 23:17:24 +00:00
|
|
|
|
2018-08-30 14:25:33 +00:00
|
|
|
return true, nil
|
|
|
|
}
|
|
|
|
|
2018-11-27 17:51:12 +00:00
|
|
|
// saves filter contents to the file in dataDir
|
2018-10-29 23:17:24 +00:00
|
|
|
func (filter *filter) save() error {
|
2018-11-27 13:48:57 +00:00
|
|
|
filterFilePath := filter.Path()
|
2018-10-29 23:17:24 +00:00
|
|
|
log.Printf("Saving filter %d contents to: %s", filter.ID, filterFilePath)
|
|
|
|
|
2018-11-28 10:37:42 +00:00
|
|
|
return safeWriteFile(filterFilePath, filter.Contents)
|
2018-10-29 23:17:24 +00:00
|
|
|
}
|
|
|
|
|
2018-11-27 17:51:12 +00:00
|
|
|
// loads filter contents from the file in dataDir
|
2018-10-29 23:17:24 +00:00
|
|
|
func (filter *filter) load() error {
|
|
|
|
if !filter.Enabled {
|
|
|
|
// No need to load a filter that is not enabled
|
|
|
|
return nil
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
2018-10-29 23:17:24 +00:00
|
|
|
|
2018-11-27 13:48:57 +00:00
|
|
|
filterFilePath := filter.Path()
|
2018-10-29 23:17:24 +00:00
|
|
|
log.Printf("Loading filter %d contents to: %s", filter.ID, filterFilePath)
|
|
|
|
|
|
|
|
if _, err := os.Stat(filterFilePath); os.IsNotExist(err) {
|
|
|
|
// do nothing, file doesn't exist
|
2018-08-30 14:25:33 +00:00
|
|
|
return err
|
|
|
|
}
|
2018-09-05 23:03:03 +00:00
|
|
|
|
2018-10-30 09:24:59 +00:00
|
|
|
filterFileContents, err := ioutil.ReadFile(filterFilePath)
|
2018-09-05 23:03:03 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2018-10-29 23:17:24 +00:00
|
|
|
|
2018-10-30 09:24:59 +00:00
|
|
|
log.Printf("Filter %d length is %d", filter.ID, len(filterFileContents))
|
2018-11-27 13:48:57 +00:00
|
|
|
filter.Contents = filterFileContents
|
2018-10-30 09:24:59 +00:00
|
|
|
|
|
|
|
// Now extract the rules count
|
2018-11-27 13:48:57 +00:00
|
|
|
rulesCount, _ := parseFilterContents(filter.Contents)
|
2018-10-30 09:24:59 +00:00
|
|
|
filter.RulesCount = rulesCount
|
|
|
|
|
2018-08-30 14:25:33 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2018-10-29 23:17:24 +00:00
|
|
|
// Path to the filter contents
|
2018-11-27 13:48:57 +00:00
|
|
|
func (filter *filter) Path() string {
|
2018-11-27 17:51:12 +00:00
|
|
|
return filepath.Join(config.ourBinaryDir, dataDir, filterDir, strconv.FormatInt(filter.ID, 10)+".txt")
|
2018-10-29 23:17:24 +00:00
|
|
|
}
|
|
|
|
|
2018-08-30 14:25:33 +00:00
|
|
|
// ------------
|
|
|
|
// safebrowsing
|
|
|
|
// ------------
|
|
|
|
|
|
|
|
func handleSafeBrowsingEnable(w http.ResponseWriter, r *http.Request) {
|
|
|
|
config.CoreDNS.SafeBrowsingEnabled = true
|
2018-10-10 17:13:03 +00:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleSafeBrowsingDisable(w http.ResponseWriter, r *http.Request) {
|
|
|
|
config.CoreDNS.SafeBrowsingEnabled = false
|
2018-10-10 17:13:03 +00:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleSafeBrowsingStatus(w http.ResponseWriter, r *http.Request) {
|
|
|
|
data := map[string]interface{}{
|
|
|
|
"enabled": config.CoreDNS.SafeBrowsingEnabled,
|
|
|
|
}
|
2018-10-20 16:58:39 +00:00
|
|
|
jsonVal, err := json.Marshal(data)
|
2018-08-30 14:25:33 +00:00
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, 500)
|
|
|
|
}
|
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
2018-10-20 16:58:39 +00:00
|
|
|
_, err = w.Write(jsonVal)
|
2018-08-30 14:25:33 +00:00
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Unable to write response json: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// --------
|
|
|
|
// parental
|
|
|
|
// --------
|
|
|
|
func handleParentalEnable(w http.ResponseWriter, r *http.Request) {
|
|
|
|
parameters, err := parseParametersFromBody(r.Body)
|
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("failed to parse parameters from body: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, 400)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
sensitivity, ok := parameters["sensitivity"]
|
|
|
|
if !ok {
|
|
|
|
http.Error(w, "URL parameter was not specified", 400)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
switch sensitivity {
|
|
|
|
case "3":
|
|
|
|
break
|
|
|
|
case "EARLY_CHILDHOOD":
|
|
|
|
sensitivity = "3"
|
|
|
|
case "10":
|
|
|
|
break
|
|
|
|
case "YOUNG":
|
|
|
|
sensitivity = "10"
|
|
|
|
case "13":
|
|
|
|
break
|
|
|
|
case "TEEN":
|
|
|
|
sensitivity = "13"
|
|
|
|
case "17":
|
|
|
|
break
|
|
|
|
case "MATURE":
|
|
|
|
sensitivity = "17"
|
|
|
|
default:
|
|
|
|
http.Error(w, "Sensitivity must be set to valid value", 400)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
i, err := strconv.Atoi(sensitivity)
|
|
|
|
if err != nil {
|
|
|
|
http.Error(w, "Sensitivity must be set to valid value", 400)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
config.CoreDNS.ParentalSensitivity = i
|
|
|
|
config.CoreDNS.ParentalEnabled = true
|
2018-10-10 17:13:03 +00:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleParentalDisable(w http.ResponseWriter, r *http.Request) {
|
|
|
|
config.CoreDNS.ParentalEnabled = false
|
2018-10-10 17:13:03 +00:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleParentalStatus(w http.ResponseWriter, r *http.Request) {
|
|
|
|
data := map[string]interface{}{
|
|
|
|
"enabled": config.CoreDNS.ParentalEnabled,
|
|
|
|
}
|
|
|
|
if config.CoreDNS.ParentalEnabled {
|
|
|
|
data["sensitivity"] = config.CoreDNS.ParentalSensitivity
|
|
|
|
}
|
2018-10-20 16:58:39 +00:00
|
|
|
jsonVal, err := json.Marshal(data)
|
2018-08-30 14:25:33 +00:00
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
2018-10-20 16:58:39 +00:00
|
|
|
_, err = w.Write(jsonVal)
|
2018-08-30 14:25:33 +00:00
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Unable to write response json: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// ------------
|
|
|
|
// safebrowsing
|
|
|
|
// ------------
|
|
|
|
|
|
|
|
func handleSafeSearchEnable(w http.ResponseWriter, r *http.Request) {
|
|
|
|
config.CoreDNS.SafeSearchEnabled = true
|
2018-10-10 17:13:03 +00:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleSafeSearchDisable(w http.ResponseWriter, r *http.Request) {
|
|
|
|
config.CoreDNS.SafeSearchEnabled = false
|
2018-10-10 17:13:03 +00:00
|
|
|
httpUpdateConfigReloadDNSReturnOK(w, r)
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleSafeSearchStatus(w http.ResponseWriter, r *http.Request) {
|
|
|
|
data := map[string]interface{}{
|
|
|
|
"enabled": config.CoreDNS.SafeSearchEnabled,
|
|
|
|
}
|
2018-10-20 16:58:39 +00:00
|
|
|
jsonVal, err := json.Marshal(data)
|
2018-08-30 14:25:33 +00:00
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
2018-10-20 16:58:39 +00:00
|
|
|
_, err = w.Write(jsonVal)
|
2018-08-30 14:25:33 +00:00
|
|
|
if err != nil {
|
|
|
|
errortext := fmt.Sprintf("Unable to write response json: %s", err)
|
|
|
|
log.Println(errortext)
|
|
|
|
http.Error(w, errortext, 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func registerControlHandlers() {
|
2018-09-18 17:59:41 +00:00
|
|
|
http.HandleFunc("/control/status", optionalAuth(ensureGET(handleStatus)))
|
2018-10-10 17:13:03 +00:00
|
|
|
http.HandleFunc("/control/enable_protection", optionalAuth(ensurePOST(handleProtectionEnable)))
|
|
|
|
http.HandleFunc("/control/disable_protection", optionalAuth(ensurePOST(handleProtectionDisable)))
|
2018-10-30 14:16:20 +00:00
|
|
|
http.HandleFunc("/control/querylog", optionalAuth(ensureGET(corednsplugin.HandleQueryLog)))
|
2018-09-18 17:59:41 +00:00
|
|
|
http.HandleFunc("/control/querylog_enable", optionalAuth(ensurePOST(handleQueryLogEnable)))
|
|
|
|
http.HandleFunc("/control/querylog_disable", optionalAuth(ensurePOST(handleQueryLogDisable)))
|
|
|
|
http.HandleFunc("/control/set_upstream_dns", optionalAuth(ensurePOST(handleSetUpstreamDNS)))
|
2018-09-19 16:12:09 +00:00
|
|
|
http.HandleFunc("/control/test_upstream_dns", optionalAuth(ensurePOST(handleTestUpstreamDNS)))
|
2018-11-21 17:42:55 +00:00
|
|
|
http.HandleFunc("/control/i18n/change_language", optionalAuth(ensurePOST(handleI18nChangeLanguage)))
|
|
|
|
http.HandleFunc("/control/i18n/current_language", optionalAuth(ensureGET(handleI18nCurrentLanguage)))
|
2018-10-30 14:16:20 +00:00
|
|
|
http.HandleFunc("/control/stats_top", optionalAuth(ensureGET(corednsplugin.HandleStatsTop)))
|
|
|
|
http.HandleFunc("/control/stats", optionalAuth(ensureGET(corednsplugin.HandleStats)))
|
|
|
|
http.HandleFunc("/control/stats_history", optionalAuth(ensureGET(corednsplugin.HandleStatsHistory)))
|
|
|
|
http.HandleFunc("/control/stats_reset", optionalAuth(ensurePOST(corednsplugin.HandleStatsReset)))
|
2018-09-20 17:02:25 +00:00
|
|
|
http.HandleFunc("/control/version.json", optionalAuth(handleGetVersionJSON))
|
2018-09-18 17:59:41 +00:00
|
|
|
http.HandleFunc("/control/filtering/enable", optionalAuth(ensurePOST(handleFilteringEnable)))
|
|
|
|
http.HandleFunc("/control/filtering/disable", optionalAuth(ensurePOST(handleFilteringDisable)))
|
|
|
|
http.HandleFunc("/control/filtering/add_url", optionalAuth(ensurePUT(handleFilteringAddURL)))
|
|
|
|
http.HandleFunc("/control/filtering/remove_url", optionalAuth(ensureDELETE(handleFilteringRemoveURL)))
|
|
|
|
http.HandleFunc("/control/filtering/enable_url", optionalAuth(ensurePOST(handleFilteringEnableURL)))
|
|
|
|
http.HandleFunc("/control/filtering/disable_url", optionalAuth(ensurePOST(handleFilteringDisableURL)))
|
|
|
|
http.HandleFunc("/control/filtering/refresh", optionalAuth(ensurePOST(handleFilteringRefresh)))
|
2018-10-10 17:13:03 +00:00
|
|
|
http.HandleFunc("/control/filtering/status", optionalAuth(ensureGET(handleFilteringStatus)))
|
|
|
|
http.HandleFunc("/control/filtering/set_rules", optionalAuth(ensurePUT(handleFilteringSetRules)))
|
2018-09-18 17:59:41 +00:00
|
|
|
http.HandleFunc("/control/safebrowsing/enable", optionalAuth(ensurePOST(handleSafeBrowsingEnable)))
|
|
|
|
http.HandleFunc("/control/safebrowsing/disable", optionalAuth(ensurePOST(handleSafeBrowsingDisable)))
|
|
|
|
http.HandleFunc("/control/safebrowsing/status", optionalAuth(ensureGET(handleSafeBrowsingStatus)))
|
|
|
|
http.HandleFunc("/control/parental/enable", optionalAuth(ensurePOST(handleParentalEnable)))
|
|
|
|
http.HandleFunc("/control/parental/disable", optionalAuth(ensurePOST(handleParentalDisable)))
|
|
|
|
http.HandleFunc("/control/parental/status", optionalAuth(ensureGET(handleParentalStatus)))
|
|
|
|
http.HandleFunc("/control/safesearch/enable", optionalAuth(ensurePOST(handleSafeSearchEnable)))
|
|
|
|
http.HandleFunc("/control/safesearch/disable", optionalAuth(ensurePOST(handleSafeSearchDisable)))
|
|
|
|
http.HandleFunc("/control/safesearch/status", optionalAuth(ensureGET(handleSafeSearchStatus)))
|
2018-08-30 14:25:33 +00:00
|
|
|
}
|