docstring_tokens
stringlengths 0
76.5k
| code_tokens
stringlengths 75
1.81M
| label_window
sequencelengths 4
2.12k
| html_url
stringlengths 74
116
| file_name
stringlengths 3
311
|
---|---|---|---|---|
errortext := fmt.Sprintf("Couldn't read response body: %s", err) | <mask> summed["stats_period"] = "24 hours"
<mask>
<mask> json, err := json.Marshal(summed)
<mask> if err != nil {
<mask> errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
<mask> log.Println(errortext)
<mask> http.Error(w, errortext, 500)
<mask> return
<mask> }
<mask> w.Header().Set("Content-Type", "application/json")
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove summed["stats_period"] = "24 hours"
</s> add </s> remove json, err := json.Marshal(summed)
</s> add // read the body entirely
body, err := ioutil.ReadAll(resp.Body) </s> remove http.Error(w, errortext, 500)
</s> add http.Error(w, errortext, http.StatusBadGateway) </s> remove http.Error(w, errortext, 500)
</s> add http.Error(w, errortext, http.StatusBadGateway) </s> remove errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't read response body: %s", err) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | control.go |
http.Error(w, errortext, http.StatusBadGateway) | <mask> json, err := json.Marshal(summed)
<mask> if err != nil {
<mask> errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
<mask> log.Println(errortext)
<mask> http.Error(w, errortext, 500)
<mask> return
<mask> }
<mask> w.Header().Set("Content-Type", "application/json")
<mask> _, err = w.Write(json)
<mask> if err != nil {
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove http.Error(w, errortext, 500)
</s> add http.Error(w, errortext, http.StatusBadGateway) </s> remove errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't read response body: %s", err) </s> remove json, err := json.Marshal(summed)
</s> add // read the body entirely
body, err := ioutil.ReadAll(resp.Body) </s> remove summed["stats_period"] = "24 hours"
</s> add </s> remove errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't read response body: %s", err) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | control.go |
// forward body entirely with status code | <mask> return
<mask> }
<mask> w.Header().Set("Content-Type", "application/json")
<mask> w.Header().Set("Content-Length", strconv.Itoa(len(body)))
<mask> w.WriteHeader(resp.StatusCode)
<mask> _, err = w.Write(body)
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove _, err = w.Write(json)
</s> add w.Header().Set("Content-Length", strconv.Itoa(len(body)))
w.WriteHeader(resp.StatusCode)
_, err = w.Write(body) </s> remove _, err = w.Write(json)
</s> add w.Header().Set("Content-Length", strconv.Itoa(len(body)))
w.WriteHeader(resp.StatusCode)
_, err = w.Write(body) </s> remove errortext := fmt.Sprintf("Unable to write response json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't write body: %s", err) </s> remove errortext := fmt.Sprintf("Unable to write response json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't write body: %s", err) | [
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | control.go |
w.Header().Set("Content-Length", strconv.Itoa(len(body)))
w.WriteHeader(resp.StatusCode)
_, err = w.Write(body) | <mask> http.Error(w, errortext, 500)
<mask> return
<mask> }
<mask> w.Header().Set("Content-Type", "application/json")
<mask> _, err = w.Write(json)
<mask> if err != nil {
<mask> errortext := fmt.Sprintf("Unable to write response json: %s", err)
<mask> log.Println(errortext)
<mask> http.Error(w, errortext, 500)
<mask> return
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove _, err = w.Write(json)
</s> add w.Header().Set("Content-Length", strconv.Itoa(len(body)))
w.WriteHeader(resp.StatusCode)
_, err = w.Write(body) </s> remove errortext := fmt.Sprintf("Unable to write response json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't write body: %s", err) </s> remove errortext := fmt.Sprintf("Unable to write response json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't write body: %s", err) </s> remove http.Error(w, errortext, 500)
</s> add http.Error(w, errortext, http.StatusBadGateway) </s> remove http.Error(w, errortext, 500)
</s> add http.Error(w, errortext, http.StatusBadGateway) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | control.go |
errortext := fmt.Sprintf("Couldn't write body: %s", err) | <mask> }
<mask> w.Header().Set("Content-Type", "application/json")
<mask> _, err = w.Write(json)
<mask> if err != nil {
<mask> errortext := fmt.Sprintf("Unable to write response json: %s", err)
<mask> log.Println(errortext)
<mask> http.Error(w, errortext, 500)
<mask> return
<mask> }
<mask> }
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove errortext := fmt.Sprintf("Unable to write response json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't write body: %s", err) </s> remove _, err = w.Write(json)
</s> add w.Header().Set("Content-Length", strconv.Itoa(len(body)))
w.WriteHeader(resp.StatusCode)
_, err = w.Write(body) </s> remove _, err = w.Write(json)
</s> add w.Header().Set("Content-Length", strconv.Itoa(len(body)))
w.WriteHeader(resp.StatusCode)
_, err = w.Write(body) </s> remove http.Error(w, errortext, 500)
return
</s> add http.Error(w, errortext, http.StatusInternalServerError) </s> remove http.Error(w, errortext, 500)
</s> add http.Error(w, errortext, http.StatusBadGateway) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | control.go |
http.Error(w, errortext, http.StatusInternalServerError) | <mask> _, err = w.Write(json)
<mask> if err != nil {
<mask> errortext := fmt.Sprintf("Unable to write response json: %s", err)
<mask> log.Println(errortext)
<mask> http.Error(w, errortext, 500)
<mask> return
<mask> }
<mask> }
<mask>
<mask> func handleStatsHistory(w http.ResponseWriter, r *http.Request) {
<mask> // handle time unit and prepare our time window size
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove // handle time unit and prepare our time window size
now := time.Now()
timeUnitString := r.URL.Query().Get("time_unit")
var stats *periodicStats
var timeUnit time.Duration
switch timeUnitString {
case "seconds":
timeUnit = time.Second
stats = &statistics.PerSecond
case "minutes":
timeUnit = time.Minute
stats = &statistics.PerMinute
case "hours":
timeUnit = time.Hour
stats = &statistics.PerHour
case "days":
timeUnit = time.Hour * 24
stats = &statistics.PerDay
default:
http.Error(w, "Must specify valid time_unit parameter", 400)
return
}
// parse start and end time
startTime, err := time.Parse(time.RFC3339, r.URL.Query().Get("start_time"))
if err != nil {
errortext := fmt.Sprintf("Must specify valid start_time parameter: %s", err)
log.Println(errortext)
http.Error(w, errortext, 400)
return
}
endTime, err := time.Parse(time.RFC3339, r.URL.Query().Get("end_time"))
</s> add resp, err := client.Get("http://127.0.0.1:8618/stats_history?" + r.URL.RawQuery) </s> remove http.Error(w, errortext, 500)
return
</s> add http.Error(w, errortext, http.StatusInternalServerError) </s> remove errortext := fmt.Sprintf("Unable to write response json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't write body: %s", err) </s> remove errortext := fmt.Sprintf("Unable to write response json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't write body: %s", err) </s> remove return
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | control.go |
resp, err := client.Get("http://127.0.0.1:8618/stats_history?" + r.URL.RawQuery) | <mask> }
<mask> }
<mask>
<mask> func handleStatsHistory(w http.ResponseWriter, r *http.Request) {
<mask> // handle time unit and prepare our time window size
<mask> now := time.Now()
<mask> timeUnitString := r.URL.Query().Get("time_unit")
<mask> var stats *periodicStats
<mask> var timeUnit time.Duration
<mask> switch timeUnitString {
<mask> case "seconds":
<mask> timeUnit = time.Second
<mask> stats = &statistics.PerSecond
<mask> case "minutes":
<mask> timeUnit = time.Minute
<mask> stats = &statistics.PerMinute
<mask> case "hours":
<mask> timeUnit = time.Hour
<mask> stats = &statistics.PerHour
<mask> case "days":
<mask> timeUnit = time.Hour * 24
<mask> stats = &statistics.PerDay
<mask> default:
<mask> http.Error(w, "Must specify valid time_unit parameter", 400)
<mask> return
<mask> }
<mask>
<mask> // parse start and end time
<mask> startTime, err := time.Parse(time.RFC3339, r.URL.Query().Get("start_time"))
<mask> if err != nil {
<mask> errortext := fmt.Sprintf("Must specify valid start_time parameter: %s", err)
<mask> log.Println(errortext)
<mask> http.Error(w, errortext, 400)
<mask> return
<mask> }
<mask> endTime, err := time.Parse(time.RFC3339, r.URL.Query().Get("end_time"))
<mask> if err != nil {
<mask> errortext := fmt.Sprintf("Must specify valid end_time parameter: %s", err)
<mask> log.Println(errortext)
<mask> http.Error(w, errortext, 400)
<mask> return
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove http.Error(w, errortext, 400)
return
}
// check if start and time times are within supported time range
timeRange := timeUnit * statsHistoryElements
if startTime.Add(timeRange).Before(now) {
http.Error(w, "start_time parameter is outside of supported range", 501)
return
}
if endTime.Add(timeRange).Before(now) {
http.Error(w, "end_time parameter is outside of supported range", 501)
</s> add http.Error(w, errortext, http.StatusBadGateway) </s> remove errortext := fmt.Sprintf("Must specify valid end_time parameter: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't get stats_top from coredns: %T %s\n", err, err) </s> remove http.Error(w, errortext, 500)
return
</s> add http.Error(w, errortext, http.StatusInternalServerError) </s> remove histrical := generateMapFromStats(&statistics.PerHour, 0, 24)
// sum them up
summed := map[string]interface{}{}
for key, values := range histrical {
summedValue := 0.0
floats, ok := values.([]float64)
if !ok {
continue
}
for _, v := range floats {
summedValue += v
}
summed[key] = summedValue
</s> add resp, err := client.Get("http://127.0.0.1:8618/stats")
if err != nil {
errortext := fmt.Sprintf("Couldn't get stats_top from coredns: %T %s\n", err, err)
log.Println(errortext)
http.Error(w, errortext, http.StatusBadGateway)
return
}
if resp != nil && resp.Body != nil {
defer resp.Body.Close() </s> remove http.Error(w, errortext, 500)
return
</s> add http.Error(w, errortext, http.StatusInternalServerError) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | control.go |
errortext := fmt.Sprintf("Couldn't get stats_top from coredns: %T %s\n", err, err) | <mask> return
<mask> }
<mask> endTime, err := time.Parse(time.RFC3339, r.URL.Query().Get("end_time"))
<mask> if err != nil {
<mask> errortext := fmt.Sprintf("Must specify valid end_time parameter: %s", err)
<mask> log.Println(errortext)
<mask> http.Error(w, errortext, 400)
<mask> return
<mask> }
<mask>
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove // handle time unit and prepare our time window size
now := time.Now()
timeUnitString := r.URL.Query().Get("time_unit")
var stats *periodicStats
var timeUnit time.Duration
switch timeUnitString {
case "seconds":
timeUnit = time.Second
stats = &statistics.PerSecond
case "minutes":
timeUnit = time.Minute
stats = &statistics.PerMinute
case "hours":
timeUnit = time.Hour
stats = &statistics.PerHour
case "days":
timeUnit = time.Hour * 24
stats = &statistics.PerDay
default:
http.Error(w, "Must specify valid time_unit parameter", 400)
return
}
// parse start and end time
startTime, err := time.Parse(time.RFC3339, r.URL.Query().Get("start_time"))
if err != nil {
errortext := fmt.Sprintf("Must specify valid start_time parameter: %s", err)
log.Println(errortext)
http.Error(w, errortext, 400)
return
}
endTime, err := time.Parse(time.RFC3339, r.URL.Query().Get("end_time"))
</s> add resp, err := client.Get("http://127.0.0.1:8618/stats_history?" + r.URL.RawQuery) </s> remove http.Error(w, errortext, 400)
return
}
// check if start and time times are within supported time range
timeRange := timeUnit * statsHistoryElements
if startTime.Add(timeRange).Before(now) {
http.Error(w, "start_time parameter is outside of supported range", 501)
return
}
if endTime.Add(timeRange).Before(now) {
http.Error(w, "end_time parameter is outside of supported range", 501)
</s> add http.Error(w, errortext, http.StatusBadGateway) </s> remove err := runningTop.addEntry(entry, now)
</s> add if len(entry.Question) == 0 {
log.Printf("entry question is absent, skipping")
return nil
}
if entry.Time.After(now) {
log.Printf("t %v vs %v is in the future, ignoring", entry.Time, now)
return nil
}
q := new(dns.Msg)
if err := q.Unpack(entry.Question); err != nil {
log.Printf("failed to unpack dns message question: %s", err)
return err
}
if len(q.Question) != 1 {
log.Printf("malformed dns message, has no questions, skipping")
return nil
}
err := runningTop.addEntry(entry, q, now) </s> remove log.Printf("Loading top from querylog")
err := loadTopFromFiles()
</s> add log.Printf("Loading stats from querylog")
err := fillStatsFromQueryLog() </s> remove errortext := fmt.Sprintf("Unable to write response json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't write body: %s", err) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | control.go |
http.Error(w, errortext, http.StatusBadGateway) | <mask> endTime, err := time.Parse(time.RFC3339, r.URL.Query().Get("end_time"))
<mask> if err != nil {
<mask> errortext := fmt.Sprintf("Must specify valid end_time parameter: %s", err)
<mask> log.Println(errortext)
<mask> http.Error(w, errortext, 400)
<mask> return
<mask> }
<mask>
<mask> // check if start and time times are within supported time range
<mask> timeRange := timeUnit * statsHistoryElements
<mask> if startTime.Add(timeRange).Before(now) {
<mask> http.Error(w, "start_time parameter is outside of supported range", 501)
<mask> return
<mask> }
<mask> if endTime.Add(timeRange).Before(now) {
<mask> http.Error(w, "end_time parameter is outside of supported range", 501)
<mask> return
<mask> }
<mask>
<mask> // calculate start and end of our array
<mask> // basically it's how many hours/minutes/etc have passed since now
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove
// calculate start and end of our array
// basically it's how many hours/minutes/etc have passed since now
start := int(now.Sub(endTime) / timeUnit)
end := int(now.Sub(startTime) / timeUnit)
// swap them around if they're inverted
if start > end {
start, end = end, start
</s> add if resp != nil && resp.Body != nil {
defer resp.Body.Close() </s> remove // handle time unit and prepare our time window size
now := time.Now()
timeUnitString := r.URL.Query().Get("time_unit")
var stats *periodicStats
var timeUnit time.Duration
switch timeUnitString {
case "seconds":
timeUnit = time.Second
stats = &statistics.PerSecond
case "minutes":
timeUnit = time.Minute
stats = &statistics.PerMinute
case "hours":
timeUnit = time.Hour
stats = &statistics.PerHour
case "days":
timeUnit = time.Hour * 24
stats = &statistics.PerDay
default:
http.Error(w, "Must specify valid time_unit parameter", 400)
return
}
// parse start and end time
startTime, err := time.Parse(time.RFC3339, r.URL.Query().Get("start_time"))
if err != nil {
errortext := fmt.Sprintf("Must specify valid start_time parameter: %s", err)
log.Println(errortext)
http.Error(w, errortext, 400)
return
}
endTime, err := time.Parse(time.RFC3339, r.URL.Query().Get("end_time"))
</s> add resp, err := client.Get("http://127.0.0.1:8618/stats_history?" + r.URL.RawQuery) </s> remove errortext := fmt.Sprintf("Must specify valid end_time parameter: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't get stats_top from coredns: %T %s\n", err, err) </s> remove http.Error(w, errortext, 500)
return
</s> add http.Error(w, errortext, http.StatusInternalServerError) </s> remove data := generateMapFromStats(stats, start, end)
json, err := json.Marshal(data)
</s> add // read the body entirely
body, err := ioutil.ReadAll(resp.Body) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | control.go |
if resp != nil && resp.Body != nil {
defer resp.Body.Close() | <mask> if endTime.Add(timeRange).Before(now) {
<mask> http.Error(w, "end_time parameter is outside of supported range", 501)
<mask> return
<mask> }
<mask>
<mask> // calculate start and end of our array
<mask> // basically it's how many hours/minutes/etc have passed since now
<mask> start := int(now.Sub(endTime) / timeUnit)
<mask> end := int(now.Sub(startTime) / timeUnit)
<mask>
<mask> // swap them around if they're inverted
<mask> if start > end {
<mask> start, end = end, start
<mask> }
<mask>
<mask> data := generateMapFromStats(stats, start, end)
<mask> json, err := json.Marshal(data)
<mask> if err != nil {
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove http.Error(w, errortext, 400)
return
}
// check if start and time times are within supported time range
timeRange := timeUnit * statsHistoryElements
if startTime.Add(timeRange).Before(now) {
http.Error(w, "start_time parameter is outside of supported range", 501)
return
}
if endTime.Add(timeRange).Before(now) {
http.Error(w, "end_time parameter is outside of supported range", 501)
</s> add http.Error(w, errortext, http.StatusBadGateway) </s> remove data := generateMapFromStats(stats, start, end)
json, err := json.Marshal(data)
</s> add // read the body entirely
body, err := ioutil.ReadAll(resp.Body) </s> remove errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't read response body: %s", err) </s> remove // handle time unit and prepare our time window size
now := time.Now()
timeUnitString := r.URL.Query().Get("time_unit")
var stats *periodicStats
var timeUnit time.Duration
switch timeUnitString {
case "seconds":
timeUnit = time.Second
stats = &statistics.PerSecond
case "minutes":
timeUnit = time.Minute
stats = &statistics.PerMinute
case "hours":
timeUnit = time.Hour
stats = &statistics.PerHour
case "days":
timeUnit = time.Hour * 24
stats = &statistics.PerDay
default:
http.Error(w, "Must specify valid time_unit parameter", 400)
return
}
// parse start and end time
startTime, err := time.Parse(time.RFC3339, r.URL.Query().Get("start_time"))
if err != nil {
errortext := fmt.Sprintf("Must specify valid start_time parameter: %s", err)
log.Println(errortext)
http.Error(w, errortext, 400)
return
}
endTime, err := time.Parse(time.RFC3339, r.URL.Query().Get("end_time"))
</s> add resp, err := client.Get("http://127.0.0.1:8618/stats_history?" + r.URL.RawQuery) </s> remove histrical := generateMapFromStats(&statistics.PerHour, 0, 24)
// sum them up
summed := map[string]interface{}{}
for key, values := range histrical {
summedValue := 0.0
floats, ok := values.([]float64)
if !ok {
continue
}
for _, v := range floats {
summedValue += v
}
summed[key] = summedValue
</s> add resp, err := client.Get("http://127.0.0.1:8618/stats")
if err != nil {
errortext := fmt.Sprintf("Couldn't get stats_top from coredns: %T %s\n", err, err)
log.Println(errortext)
http.Error(w, errortext, http.StatusBadGateway)
return
}
if resp != nil && resp.Body != nil {
defer resp.Body.Close() | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | control.go |
// read the body entirely
body, err := ioutil.ReadAll(resp.Body) | <mask> if start > end {
<mask> start, end = end, start
<mask> }
<mask>
<mask> data := generateMapFromStats(stats, start, end)
<mask> json, err := json.Marshal(data)
<mask> if err != nil {
<mask> errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
<mask> log.Println(errortext)
<mask> http.Error(w, errortext, 500)
<mask> return
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove
// calculate start and end of our array
// basically it's how many hours/minutes/etc have passed since now
start := int(now.Sub(endTime) / timeUnit)
end := int(now.Sub(startTime) / timeUnit)
// swap them around if they're inverted
if start > end {
start, end = end, start
</s> add if resp != nil && resp.Body != nil {
defer resp.Body.Close() </s> remove errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't read response body: %s", err) </s> remove http.Error(w, errortext, 500)
</s> add http.Error(w, errortext, http.StatusBadGateway) </s> remove http.Error(w, errortext, 500)
</s> add http.Error(w, errortext, http.StatusBadGateway) </s> remove errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't read response body: %s", err) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | control.go |
errortext := fmt.Sprintf("Couldn't read response body: %s", err) | <mask>
<mask> data := generateMapFromStats(stats, start, end)
<mask> json, err := json.Marshal(data)
<mask> if err != nil {
<mask> errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
<mask> log.Println(errortext)
<mask> http.Error(w, errortext, 500)
<mask> return
<mask> }
<mask> w.Header().Set("Content-Type", "application/json")
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove data := generateMapFromStats(stats, start, end)
json, err := json.Marshal(data)
</s> add // read the body entirely
body, err := ioutil.ReadAll(resp.Body) </s> remove http.Error(w, errortext, 500)
</s> add http.Error(w, errortext, http.StatusBadGateway) </s> remove http.Error(w, errortext, 500)
</s> add http.Error(w, errortext, http.StatusBadGateway) </s> remove errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't read response body: %s", err) </s> remove
// calculate start and end of our array
// basically it's how many hours/minutes/etc have passed since now
start := int(now.Sub(endTime) / timeUnit)
end := int(now.Sub(startTime) / timeUnit)
// swap them around if they're inverted
if start > end {
start, end = end, start
</s> add if resp != nil && resp.Body != nil {
defer resp.Body.Close() | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | control.go |
http.Error(w, errortext, http.StatusBadGateway) | <mask> json, err := json.Marshal(data)
<mask> if err != nil {
<mask> errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
<mask> log.Println(errortext)
<mask> http.Error(w, errortext, 500)
<mask> return
<mask> }
<mask> w.Header().Set("Content-Type", "application/json")
<mask> _, err = w.Write(json)
<mask> if err != nil {
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove http.Error(w, errortext, 500)
</s> add http.Error(w, errortext, http.StatusBadGateway) </s> remove errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't read response body: %s", err) </s> remove errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't read response body: %s", err) </s> remove data := generateMapFromStats(stats, start, end)
json, err := json.Marshal(data)
</s> add // read the body entirely
body, err := ioutil.ReadAll(resp.Body) </s> remove errortext := fmt.Sprintf("Unable to write response json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't write body: %s", err) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | control.go |
// forward body entirely with status code | <mask> http.Error(w, errortext, http.StatusBadGateway)
<mask> return
<mask> }
<mask> w.Header().Set("Content-Type", "application/json")
<mask> w.Header().Set("Content-Length", strconv.Itoa(len(body)))
<mask> w.WriteHeader(resp.StatusCode)
<mask> _, err = w.Write(body)
<mask> if err != nil {
<mask> errortext := fmt.Sprintf("Couldn't write body: %s", err)
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove _, err = w.Write(json)
</s> add w.Header().Set("Content-Length", strconv.Itoa(len(body)))
w.WriteHeader(resp.StatusCode)
_, err = w.Write(body) </s> remove _, err = w.Write(json)
</s> add w.Header().Set("Content-Length", strconv.Itoa(len(body)))
w.WriteHeader(resp.StatusCode)
_, err = w.Write(body) </s> remove errortext := fmt.Sprintf("Unable to write response json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't write body: %s", err) </s> remove errortext := fmt.Sprintf("Unable to write response json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't write body: %s", err) | [
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | control.go |
w.Header().Set("Content-Length", strconv.Itoa(len(body)))
w.WriteHeader(resp.StatusCode)
_, err = w.Write(body) | <mask> http.Error(w, errortext, 500)
<mask> return
<mask> }
<mask> w.Header().Set("Content-Type", "application/json")
<mask> _, err = w.Write(json)
<mask> if err != nil {
<mask> errortext := fmt.Sprintf("Unable to write response json: %s", err)
<mask> log.Println(errortext)
<mask> http.Error(w, errortext, 500)
<mask> return
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | control.go |
errortext := fmt.Sprintf("Couldn't write body: %s", err) | <mask> }
<mask> w.Header().Set("Content-Type", "application/json")
<mask> _, err = w.Write(json)
<mask> if err != nil {
<mask> errortext := fmt.Sprintf("Unable to write response json: %s", err)
<mask> log.Println(errortext)
<mask> http.Error(w, errortext, 500)
<mask> return
<mask> }
<mask> }
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | control.go |
http.Error(w, errortext, http.StatusInternalServerError) | <mask> _, err = w.Write(json)
<mask> if err != nil {
<mask> errortext := fmt.Sprintf("Unable to write response json: %s", err)
<mask> log.Println(errortext)
<mask> http.Error(w, errortext, 500)
<mask> return
<mask> }
<mask> }
<mask>
<mask> func handleQueryLog(w http.ResponseWriter, r *http.Request) {
<mask> isDownload := r.URL.Query().Get("download") != ""
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove http.Error(w, errortext, 500)
return
</s> add http.Error(w, errortext, http.StatusInternalServerError) </s> remove errortext := fmt.Sprintf("Unable to write response json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't write body: %s", err) </s> remove errortext := fmt.Sprintf("Unable to write response json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't write body: %s", err) </s> remove _, err = w.Write(json)
</s> add w.Header().Set("Content-Length", strconv.Itoa(len(body)))
w.WriteHeader(resp.StatusCode)
_, err = w.Write(body) </s> remove _, err = w.Write(json)
</s> add w.Header().Set("Content-Length", strconv.Itoa(len(body)))
w.WriteHeader(resp.StatusCode)
_, err = w.Write(body) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | control.go |
<mask> ParentalBlockHost: "family.block.dns.adguard.com",
<mask> BlockedTTL: 3600, // in seconds
<mask> }
<mask>
<mask> func newDNSCounter(name string, help string) prometheus.Counter {
<mask> return prometheus.NewCounter(prometheus.CounterOpts{
<mask> Namespace: plugin.Namespace,
<mask> Subsystem: "dnsfilter",
<mask> Name: name,
<mask> Help: help,
<mask> })
<mask> }
<mask>
<mask> var (
<mask> requests = newDNSCounter("requests_total", "Count of requests seen by dnsfilter.")
<mask> filtered = newDNSCounter("filtered_total", "Count of requests filtered by dnsfilter.")
<mask> filteredLists = newDNSCounter("filtered_lists_total", "Count of requests filtered by dnsfilter using lists.")
<mask> filteredSafebrowsing = newDNSCounter("filtered_safebrowsing_total", "Count of requests filtered by dnsfilter using safebrowsing.")
<mask> filteredParental = newDNSCounter("filtered_parental_total", "Count of requests filtered by dnsfilter using parental.")
<mask> filteredInvalid = newDNSCounter("filtered_invalid_total", "Count of requests filtered by dnsfilter because they were invalid.")
<mask> whitelisted = newDNSCounter("whitelisted_total", "Count of requests not filtered by dnsfilter because they are whitelisted.")
<mask> safesearch = newDNSCounter("safesearch_total", "Count of requests replaced by dnsfilter safesearch.")
<mask> errorsTotal = newDNSCounter("errors_total", "Count of requests that dnsfilter couldn't process because of transitive errors.")
<mask> )
<mask>
<mask> //
<mask> // coredns handling functions
<mask> //
<mask> func setupPlugin(c *caddy.Controller) (*plug, error) {
<mask> // create new Plugin and copy default values
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove http.Error(w, errortext, 400)
return
}
// check if start and time times are within supported time range
timeRange := timeUnit * statsHistoryElements
if startTime.Add(timeRange).Before(now) {
http.Error(w, "start_time parameter is outside of supported range", 501)
return
}
if endTime.Add(timeRange).Before(now) {
http.Error(w, "end_time parameter is outside of supported range", 501)
</s> add http.Error(w, errortext, http.StatusBadGateway) </s> remove
// calculate start and end of our array
// basically it's how many hours/minutes/etc have passed since now
start := int(now.Sub(endTime) / timeUnit)
end := int(now.Sub(startTime) / timeUnit)
// swap them around if they're inverted
if start > end {
start, end = end, start
</s> add if resp != nil && resp.Body != nil {
defer resp.Body.Close() </s> remove // handle time unit and prepare our time window size
now := time.Now()
timeUnitString := r.URL.Query().Get("time_unit")
var stats *periodicStats
var timeUnit time.Duration
switch timeUnitString {
case "seconds":
timeUnit = time.Second
stats = &statistics.PerSecond
case "minutes":
timeUnit = time.Minute
stats = &statistics.PerMinute
case "hours":
timeUnit = time.Hour
stats = &statistics.PerHour
case "days":
timeUnit = time.Hour * 24
stats = &statistics.PerDay
default:
http.Error(w, "Must specify valid time_unit parameter", 400)
return
}
// parse start and end time
startTime, err := time.Parse(time.RFC3339, r.URL.Query().Get("start_time"))
if err != nil {
errortext := fmt.Sprintf("Must specify valid start_time parameter: %s", err)
log.Println(errortext)
http.Error(w, errortext, 400)
return
}
endTime, err := time.Parse(time.RFC3339, r.URL.Query().Get("end_time"))
</s> add resp, err := client.Get("http://127.0.0.1:8618/stats_history?" + r.URL.RawQuery) </s> remove values := logBuffer
</s> add values = logBuffer </s> remove http.Error(w, errortext, 500)
return
</s> add http.Error(w, errortext, http.StatusInternalServerError) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | coredns_plugin/coredns_plugin.go |
|
log.Printf("Loading stats from querylog")
err := fillStatsFromQueryLog() | <mask> return nil, err
<mask> }
<mask> }
<mask>
<mask> log.Printf("Loading top from querylog")
<mask> err := loadTopFromFiles()
<mask> if err != nil {
<mask> log.Printf("Failed to load top from querylog: %s", err)
<mask> return nil, err
<mask> }
<mask>
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove log.Printf("Failed to load top from querylog: %s", err)
</s> add log.Printf("Failed to load stats from querylog: %s", err) </s> remove err = runningTop.addEntry(&entry, now)
</s> add err = runningTop.addEntry(&entry, question, now) </s> remove errortext := fmt.Sprintf("Must specify valid end_time parameter: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't get stats_top from coredns: %T %s\n", err, err) </s> remove err := runningTop.addEntry(entry, now)
</s> add if len(entry.Question) == 0 {
log.Printf("entry question is absent, skipping")
return nil
}
if entry.Time.After(now) {
log.Printf("t %v vs %v is in the future, ignoring", entry.Time, now)
return nil
}
q := new(dns.Msg)
if err := q.Unpack(entry.Question); err != nil {
log.Printf("failed to unpack dns message question: %s", err)
return err
}
if len(q.Question) != 1 {
log.Printf("malformed dns message, has no questions, skipping")
return nil
}
err := runningTop.addEntry(entry, q, now) </s> remove errortext := fmt.Sprintf("Unable to write response json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't write body: %s", err) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | coredns_plugin/coredns_plugin.go |
log.Printf("Failed to load stats from querylog: %s", err) | <mask>
<mask> log.Printf("Loading top from querylog")
<mask> err := loadTopFromFiles()
<mask> if err != nil {
<mask> log.Printf("Failed to load top from querylog: %s", err)
<mask> return nil, err
<mask> }
<mask>
<mask> if p.settings.QueryLogEnabled {
<mask> onceQueryLog.Do(func() {
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove log.Printf("Loading top from querylog")
err := loadTopFromFiles()
</s> add log.Printf("Loading stats from querylog")
err := fillStatsFromQueryLog() </s> remove err = runningTop.addEntry(&entry, now)
</s> add err = runningTop.addEntry(&entry, question, now) </s> remove err := runningTop.addEntry(entry, now)
</s> add if len(entry.Question) == 0 {
log.Printf("entry question is absent, skipping")
return nil
}
if entry.Time.After(now) {
log.Printf("t %v vs %v is in the future, ignoring", entry.Time, now)
return nil
}
q := new(dns.Msg)
if err := q.Unpack(entry.Question); err != nil {
log.Printf("failed to unpack dns message question: %s", err)
return err
}
if len(q.Question) != 1 {
log.Printf("malformed dns message, has no questions, skipping")
return nil
}
err := runningTop.addEntry(entry, q, now) </s> remove errortext := fmt.Sprintf("Must specify valid end_time parameter: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't get stats_top from coredns: %T %s\n", err, err) </s> remove histrical := generateMapFromStats(&statistics.PerHour, 0, 24)
// sum them up
summed := map[string]interface{}{}
for key, values := range histrical {
summedValue := 0.0
floats, ok := values.([]float64)
if !ok {
continue
}
for _, v := range floats {
summedValue += v
}
summed[key] = summedValue
</s> add resp, err := client.Get("http://127.0.0.1:8618/stats")
if err != nil {
errortext := fmt.Sprintf("Couldn't get stats_top from coredns: %T %s\n", err, err)
log.Println(errortext)
http.Error(w, errortext, http.StatusBadGateway)
return
}
if resp != nil && resp.Body != nil {
defer resp.Body.Close() | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | coredns_plugin/coredns_plugin.go |
x.MustRegister(elapsedTime) | <mask> x.MustRegister(whitelisted)
<mask> x.MustRegister(safesearch)
<mask> x.MustRegister(errorsTotal)
<mask> x.MustRegister(p)
<mask> }
<mask> return nil
<mask> })
<mask> c.OnShutdown(p.onShutdown)
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove err := runningTop.addEntry(entry, now)
</s> add if len(entry.Question) == 0 {
log.Printf("entry question is absent, skipping")
return nil
}
if entry.Time.After(now) {
log.Printf("t %v vs %v is in the future, ignoring", entry.Time, now)
return nil
}
q := new(dns.Msg)
if err := q.Unpack(entry.Question); err != nil {
log.Printf("failed to unpack dns message question: %s", err)
return err
}
if len(q.Question) != 1 {
log.Printf("malformed dns message, has no questions, skipping")
return nil
}
err := runningTop.addEntry(entry, q, now) </s> remove log.Printf("file \"%s\": read %d entries", file, i)
</s> add elapsed := time.Since(now)
log.Printf("file \"%s\": read %d entries in %v, %v/entry", file, i, elapsed, elapsed/time.Duration(i)) </s> remove func loadTopFromFiles() error {
</s> add func fillStatsFromQueryLog() error { </s> remove q := new(dns.Msg)
if err := q.Unpack(entry.Question); err != nil {
log.Printf("failed to unpack dns message question: %s", err)
return err
}
if len(q.Question) != 1 {
log.Printf("malformed dns message, has no questions, skipping")
return nil
}
</s> add </s> remove log.Printf("Loading top from querylog")
err := loadTopFromFiles()
</s> add log.Printf("Loading stats from querylog")
err := fillStatsFromQueryLog() | [
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | coredns_plugin/coredns_plugin.go |
elapsed := time.Since(start)
elapsedTime.Observe(elapsed.Seconds()) | <mask>
<mask> // log
<mask> if p.settings.QueryLogEnabled {
<mask> logRequest(r, rrw.Msg, result, time.Since(start), ip)
<mask> }
<mask> return rcode, err
<mask> }
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove log.Printf("Failed to load top from querylog: %s", err)
</s> add log.Printf("Failed to load stats from querylog: %s", err) </s> remove err = runningTop.addEntry(&entry, now)
</s> add err = runningTop.addEntry(&entry, question, now) </s> remove err := runningTop.addEntry(entry, now)
</s> add if len(entry.Question) == 0 {
log.Printf("entry question is absent, skipping")
return nil
}
if entry.Time.After(now) {
log.Printf("t %v vs %v is in the future, ignoring", entry.Time, now)
return nil
}
q := new(dns.Msg)
if err := q.Unpack(entry.Question); err != nil {
log.Printf("failed to unpack dns message question: %s", err)
return err
}
if len(q.Question) != 1 {
log.Printf("malformed dns message, has no questions, skipping")
return nil
}
err := runningTop.addEntry(entry, q, now) </s> remove log.Printf("Loading top from querylog")
err := loadTopFromFiles()
</s> add log.Printf("Loading stats from querylog")
err := fillStatsFromQueryLog() </s> remove q := new(dns.Msg)
if err := q.Unpack(entry.Question); err != nil {
log.Printf("failed to unpack dns message question: %s", err)
return err
}
if len(q.Question) != 1 {
log.Printf("malformed dns message, has no questions, skipping")
return nil
}
</s> add | [
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | coredns_plugin/coredns_plugin.go |
err = runningTop.addEntry(&entry, question, now) | <mask> }
<mask> logBufferLock.Unlock()
<mask>
<mask> // add it to running top
<mask> err = runningTop.addEntry(&entry, now)
<mask> if err != nil {
<mask> log.Printf("Failed to add entry to running top: %s", err)
<mask> // don't do failure, just log
<mask> }
<mask>
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove err := runningTop.addEntry(entry, now)
</s> add if len(entry.Question) == 0 {
log.Printf("entry question is absent, skipping")
return nil
}
if entry.Time.After(now) {
log.Printf("t %v vs %v is in the future, ignoring", entry.Time, now)
return nil
}
q := new(dns.Msg)
if err := q.Unpack(entry.Question); err != nil {
log.Printf("failed to unpack dns message question: %s", err)
return err
}
if len(q.Question) != 1 {
log.Printf("malformed dns message, has no questions, skipping")
return nil
}
err := runningTop.addEntry(entry, q, now) </s> remove log.Printf("Failed to load top from querylog: %s", err)
</s> add log.Printf("Failed to load stats from querylog: %s", err) </s> remove log.Printf("Loading top from querylog")
err := loadTopFromFiles()
</s> add log.Printf("Loading stats from querylog")
err := fillStatsFromQueryLog() </s> remove errortext := fmt.Sprintf("Unable to write response json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't write body: %s", err) </s> remove errortext := fmt.Sprintf("Unable to write response json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't write body: %s", err) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | coredns_plugin/querylog.go |
<mask> // write to file
<mask> // do it in separate goroutine -- we are stalling DNS response this whole time
<mask> go flushToFile(flushBuffer)
<mask> }
<mask> return
<mask> }
<mask>
<mask> func handleQueryLog(w http.ResponseWriter, r *http.Request) {
<mask> now := time.Now()
<mask>
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove http.Error(w, errortext, 500)
return
</s> add http.Error(w, errortext, http.StatusInternalServerError) </s> remove http.Error(w, errortext, 500)
return
</s> add http.Error(w, errortext, http.StatusInternalServerError) </s> remove // handle time unit and prepare our time window size
now := time.Now()
timeUnitString := r.URL.Query().Get("time_unit")
var stats *periodicStats
var timeUnit time.Duration
switch timeUnitString {
case "seconds":
timeUnit = time.Second
stats = &statistics.PerSecond
case "minutes":
timeUnit = time.Minute
stats = &statistics.PerMinute
case "hours":
timeUnit = time.Hour
stats = &statistics.PerHour
case "days":
timeUnit = time.Hour * 24
stats = &statistics.PerDay
default:
http.Error(w, "Must specify valid time_unit parameter", 400)
return
}
// parse start and end time
startTime, err := time.Parse(time.RFC3339, r.URL.Query().Get("start_time"))
if err != nil {
errortext := fmt.Sprintf("Must specify valid start_time parameter: %s", err)
log.Println(errortext)
http.Error(w, errortext, 400)
return
}
endTime, err := time.Parse(time.RFC3339, r.URL.Query().Get("end_time"))
</s> add resp, err := client.Get("http://127.0.0.1:8618/stats_history?" + r.URL.RawQuery) </s> remove func loadTopFromFiles() error {
</s> add func fillStatsFromQueryLog() error { </s> remove func (r *dayTop) addEntry(entry *logEntry, now time.Time) error {
if len(entry.Question) == 0 {
log.Printf("entry question is absent, skipping")
return nil
}
if entry.Time.After(now) {
log.Printf("t %v vs %v is in the future, ignoring", entry.Time, now)
return nil
}
</s> add func (r *dayTop) addEntry(entry *logEntry, q *dns.Msg, now time.Time) error { | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | coredns_plugin/querylog.go |
|
values = logBuffer | <mask>
<mask> if needRefresh {
<mask> // need to get fresh data
<mask> logBufferLock.RLock()
<mask> values := logBuffer
<mask> logBufferLock.RUnlock()
<mask>
<mask> if len(values) < queryLogCacheSize {
<mask> values = appendFromLogFile(values, queryLogCacheSize, queryLogTimeLimit)
<mask> }
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove histrical := generateMapFromStats(&statistics.PerHour, 0, 24)
// sum them up
summed := map[string]interface{}{}
for key, values := range histrical {
summedValue := 0.0
floats, ok := values.([]float64)
if !ok {
continue
}
for _, v := range floats {
summedValue += v
}
summed[key] = summedValue
</s> add resp, err := client.Get("http://127.0.0.1:8618/stats")
if err != nil {
errortext := fmt.Sprintf("Couldn't get stats_top from coredns: %T %s\n", err, err)
log.Println(errortext)
http.Error(w, errortext, http.StatusBadGateway)
return
}
if resp != nil && resp.Body != nil {
defer resp.Body.Close() </s> remove data := generateMapFromStats(stats, start, end)
json, err := json.Marshal(data)
</s> add // read the body entirely
body, err := ioutil.ReadAll(resp.Body) </s> remove
// calculate start and end of our array
// basically it's how many hours/minutes/etc have passed since now
start := int(now.Sub(endTime) / timeUnit)
end := int(now.Sub(startTime) / timeUnit)
// swap them around if they're inverted
if start > end {
start, end = end, start
</s> add if resp != nil && resp.Body != nil {
defer resp.Body.Close() </s> remove q := new(dns.Msg)
if err := q.Unpack(entry.Question); err != nil {
log.Printf("failed to unpack dns message question: %s", err)
return err
}
if len(q.Question) != 1 {
log.Printf("malformed dns message, has no questions, skipping")
return nil
}
</s> add </s> remove errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't read response body: %s", err) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | coredns_plugin/querylog.go |
go statsRotator() | <mask> listenAddr := net.JoinHostPort("127.0.0.1", queryLogAPIPort)
<mask>
<mask> go periodicQueryLogRotate()
<mask> go periodicHourlyTopRotate()
<mask>
<mask> http.HandleFunc("/querylog", handleQueryLog)
<mask> http.HandleFunc("/stats", handleStats)
<mask> http.HandleFunc("/stats_top", handleStatsTop)
<mask> http.HandleFunc("/stats_history", handleStatsHistory)
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove return
</s> add </s> remove errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't read response body: %s", err) </s> remove histrical := generateMapFromStats(&statistics.PerHour, 0, 24)
// sum them up
summed := map[string]interface{}{}
for key, values := range histrical {
summedValue := 0.0
floats, ok := values.([]float64)
if !ok {
continue
}
for _, v := range floats {
summedValue += v
}
summed[key] = summedValue
</s> add resp, err := client.Get("http://127.0.0.1:8618/stats")
if err != nil {
errortext := fmt.Sprintf("Couldn't get stats_top from coredns: %T %s\n", err, err)
log.Println(errortext)
http.Error(w, errortext, http.StatusBadGateway)
return
}
if resp != nil && resp.Body != nil {
defer resp.Body.Close() | [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | coredns_plugin/querylog.go |
http.HandleFunc("/stats", handleStats) | <mask> go periodicHourlyTopRotate()
<mask> go statsRotator()
<mask>
<mask> http.HandleFunc("/querylog", handleQueryLog)
<mask> http.HandleFunc("/stats_top", handleStatsTop)
<mask> http.HandleFunc("/stats_history", handleStatsHistory)
<mask> if err := http.ListenAndServe(listenAddr, nil); err != nil {
<mask> log.Fatalf("error in ListenAndServe: %s", err)
<mask> }
<mask> }
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove err := runningTop.addEntry(entry, now)
</s> add if len(entry.Question) == 0 {
log.Printf("entry question is absent, skipping")
return nil
}
if entry.Time.After(now) {
log.Printf("t %v vs %v is in the future, ignoring", entry.Time, now)
return nil
}
q := new(dns.Msg)
if err := q.Unpack(entry.Question); err != nil {
log.Printf("failed to unpack dns message question: %s", err)
return err
}
if len(q.Question) != 1 {
log.Printf("malformed dns message, has no questions, skipping")
return nil
}
err := runningTop.addEntry(entry, q, now) </s> remove errortext := fmt.Sprintf("Unable to write response json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't write body: %s", err) </s> remove errortext := fmt.Sprintf("Unable to write response json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't write body: %s", err) | [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | coredns_plugin/querylog.go |
http.HandleFunc("/stats_history", handleStatsHistory) | <mask> http.HandleFunc("/querylog", handleQueryLog)
<mask> http.HandleFunc("/stats", handleStats)
<mask> http.HandleFunc("/stats_top", handleStatsTop)
<mask> if err := http.ListenAndServe(listenAddr, nil); err != nil {
<mask> log.Fatalf("error in ListenAndServe: %s", err)
<mask> }
<mask> }
<mask>
<mask> func trace(format string, args ...interface{}) {
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove err := runningTop.addEntry(entry, now)
</s> add if len(entry.Question) == 0 {
log.Printf("entry question is absent, skipping")
return nil
}
if entry.Time.After(now) {
log.Printf("t %v vs %v is in the future, ignoring", entry.Time, now)
return nil
}
q := new(dns.Msg)
if err := q.Unpack(entry.Question); err != nil {
log.Printf("failed to unpack dns message question: %s", err)
return err
}
if len(q.Question) != 1 {
log.Printf("malformed dns message, has no questions, skipping")
return nil
}
err := runningTop.addEntry(entry, q, now) </s> remove http.Error(w, errortext, 500)
return
</s> add http.Error(w, errortext, http.StatusInternalServerError) </s> remove log.Printf("file \"%s\": read %d entries", file, i)
</s> add elapsed := time.Since(now)
log.Printf("file \"%s\": read %d entries in %v, %v/entry", file, i, elapsed, elapsed/time.Duration(i)) | [
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | coredns_plugin/querylog.go |
elapsed := time.Since(now)
log.Printf("file \"%s\": read %d entries in %v, %v/entry", file, i, elapsed, elapsed/time.Duration(i)) | <mask> if err != nil {
<mask> return err
<mask> }
<mask> }
<mask> log.Printf("file \"%s\": read %d entries", file, i)
<mask> }
<mask> return nil
<mask> }
<mask>
<mask> func appendFromLogFile(values []logEntry, maxLen int, timeWindow time.Duration) []logEntry {
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove err := runningTop.addEntry(entry, now)
</s> add if len(entry.Question) == 0 {
log.Printf("entry question is absent, skipping")
return nil
}
if entry.Time.After(now) {
log.Printf("t %v vs %v is in the future, ignoring", entry.Time, now)
return nil
}
q := new(dns.Msg)
if err := q.Unpack(entry.Question); err != nil {
log.Printf("failed to unpack dns message question: %s", err)
return err
}
if len(q.Question) != 1 {
log.Printf("malformed dns message, has no questions, skipping")
return nil
}
err := runningTop.addEntry(entry, q, now) </s> remove log.Printf("Loading top from querylog")
err := loadTopFromFiles()
</s> add log.Printf("Loading stats from querylog")
err := fillStatsFromQueryLog() </s> remove q := new(dns.Msg)
if err := q.Unpack(entry.Question); err != nil {
log.Printf("failed to unpack dns message question: %s", err)
return err
}
if len(q.Question) != 1 {
log.Printf("malformed dns message, has no questions, skipping")
return nil
}
</s> add </s> remove http.Error(w, errortext, 500)
return
</s> add http.Error(w, errortext, http.StatusInternalServerError) </s> remove histrical := generateMapFromStats(&statistics.PerHour, 0, 24)
// sum them up
summed := map[string]interface{}{}
for key, values := range histrical {
summedValue := 0.0
floats, ok := values.([]float64)
if !ok {
continue
}
for _, v := range floats {
summedValue += v
}
summed[key] = summedValue
</s> add resp, err := client.Get("http://127.0.0.1:8618/stats")
if err != nil {
errortext := fmt.Sprintf("Couldn't get stats_top from coredns: %T %s\n", err, err)
log.Println(errortext)
http.Error(w, errortext, http.StatusBadGateway)
return
}
if resp != nil && resp.Body != nil {
defer resp.Body.Close() | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | coredns_plugin/querylog_file.go |
"github.com/AdguardTeam/AdguardDNS/dnsfilter" | <mask> "sync"
<mask> "time"
<mask>
<mask> "github.com/bluele/gcache"
<mask> "github.com/miekg/dns"
<mask> )
<mask>
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove func newDNSCounter(name string, help string) prometheus.Counter {
return prometheus.NewCounter(prometheus.CounterOpts{
Namespace: plugin.Namespace,
Subsystem: "dnsfilter",
Name: name,
Help: help,
})
}
var (
requests = newDNSCounter("requests_total", "Count of requests seen by dnsfilter.")
filtered = newDNSCounter("filtered_total", "Count of requests filtered by dnsfilter.")
filteredLists = newDNSCounter("filtered_lists_total", "Count of requests filtered by dnsfilter using lists.")
filteredSafebrowsing = newDNSCounter("filtered_safebrowsing_total", "Count of requests filtered by dnsfilter using safebrowsing.")
filteredParental = newDNSCounter("filtered_parental_total", "Count of requests filtered by dnsfilter using parental.")
filteredInvalid = newDNSCounter("filtered_invalid_total", "Count of requests filtered by dnsfilter because they were invalid.")
whitelisted = newDNSCounter("whitelisted_total", "Count of requests not filtered by dnsfilter because they are whitelisted.")
safesearch = newDNSCounter("safesearch_total", "Count of requests replaced by dnsfilter safesearch.")
errorsTotal = newDNSCounter("errors_total", "Count of requests that dnsfilter couldn't process because of transitive errors.")
)
</s> add </s> remove err := runningTop.addEntry(entry, now)
</s> add if len(entry.Question) == 0 {
log.Printf("entry question is absent, skipping")
return nil
}
if entry.Time.After(now) {
log.Printf("t %v vs %v is in the future, ignoring", entry.Time, now)
return nil
}
q := new(dns.Msg)
if err := q.Unpack(entry.Question); err != nil {
log.Printf("failed to unpack dns message question: %s", err)
return err
}
if len(q.Question) != 1 {
log.Printf("malformed dns message, has no questions, skipping")
return nil
}
err := runningTop.addEntry(entry, q, now) </s> remove http.Error(w, errortext, 500)
return
</s> add http.Error(w, errortext, http.StatusInternalServerError) </s> remove errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't read response body: %s", err) </s> remove data := generateMapFromStats(stats, start, end)
json, err := json.Marshal(data)
</s> add // read the body entirely
body, err := ioutil.ReadAll(resp.Body) | [
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | coredns_plugin/querylog_top.go |
func (r *dayTop) addEntry(entry *logEntry, q *dns.Msg, now time.Time) error { | <mask> func (top *hourTop) lockedGetClients(key string) (int, error) {
<mask> return top.lockedGetValue(key, top.clients)
<mask> }
<mask>
<mask> func (r *dayTop) addEntry(entry *logEntry, now time.Time) error {
<mask> if len(entry.Question) == 0 {
<mask> log.Printf("entry question is absent, skipping")
<mask> return nil
<mask> }
<mask>
<mask> if entry.Time.After(now) {
<mask> log.Printf("t %v vs %v is in the future, ignoring", entry.Time, now)
<mask> return nil
<mask> }
<mask> // figure out which hour bucket it belongs to
<mask> hour := int(now.Sub(entry.Time).Hours())
<mask> if hour >= 24 {
<mask> log.Printf("t %v is >24 hours ago, ignoring", entry.Time)
<mask> return nil
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove err := runningTop.addEntry(entry, now)
</s> add if len(entry.Question) == 0 {
log.Printf("entry question is absent, skipping")
return nil
}
if entry.Time.After(now) {
log.Printf("t %v vs %v is in the future, ignoring", entry.Time, now)
return nil
}
q := new(dns.Msg)
if err := q.Unpack(entry.Question); err != nil {
log.Printf("failed to unpack dns message question: %s", err)
return err
}
if len(q.Question) != 1 {
log.Printf("malformed dns message, has no questions, skipping")
return nil
}
err := runningTop.addEntry(entry, q, now) </s> remove q := new(dns.Msg)
if err := q.Unpack(entry.Question); err != nil {
log.Printf("failed to unpack dns message question: %s", err)
return err
}
if len(q.Question) != 1 {
log.Printf("malformed dns message, has no questions, skipping")
return nil
}
</s> add </s> remove func loadTopFromFiles() error {
</s> add func fillStatsFromQueryLog() error { </s> remove http.Error(w, errortext, 400)
return
}
// check if start and time times are within supported time range
timeRange := timeUnit * statsHistoryElements
if startTime.Add(timeRange).Before(now) {
http.Error(w, "start_time parameter is outside of supported range", 501)
return
}
if endTime.Add(timeRange).Before(now) {
http.Error(w, "end_time parameter is outside of supported range", 501)
</s> add http.Error(w, errortext, http.StatusBadGateway) </s> remove
// calculate start and end of our array
// basically it's how many hours/minutes/etc have passed since now
start := int(now.Sub(endTime) / timeUnit)
end := int(now.Sub(startTime) / timeUnit)
// swap them around if they're inverted
if start > end {
start, end = end, start
</s> add if resp != nil && resp.Body != nil {
defer resp.Body.Close() | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | coredns_plugin/querylog_top.go |
<mask> log.Printf("t %v is >24 hours ago, ignoring", entry.Time)
<mask> return nil
<mask> }
<mask>
<mask> q := new(dns.Msg)
<mask> if err := q.Unpack(entry.Question); err != nil {
<mask> log.Printf("failed to unpack dns message question: %s", err)
<mask> return err
<mask> }
<mask>
<mask> if len(q.Question) != 1 {
<mask> log.Printf("malformed dns message, has no questions, skipping")
<mask> return nil
<mask> }
<mask>
<mask> hostname := strings.ToLower(strings.TrimSuffix(q.Question[0].Name, "."))
<mask>
<mask> // get value, if not set, crate one
<mask> runningTop.hoursReadLock()
<mask> defer runningTop.hoursReadUnlock()
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove err := runningTop.addEntry(entry, now)
</s> add if len(entry.Question) == 0 {
log.Printf("entry question is absent, skipping")
return nil
}
if entry.Time.After(now) {
log.Printf("t %v vs %v is in the future, ignoring", entry.Time, now)
return nil
}
q := new(dns.Msg)
if err := q.Unpack(entry.Question); err != nil {
log.Printf("failed to unpack dns message question: %s", err)
return err
}
if len(q.Question) != 1 {
log.Printf("malformed dns message, has no questions, skipping")
return nil
}
err := runningTop.addEntry(entry, q, now) </s> remove func (r *dayTop) addEntry(entry *logEntry, now time.Time) error {
if len(entry.Question) == 0 {
log.Printf("entry question is absent, skipping")
return nil
}
if entry.Time.After(now) {
log.Printf("t %v vs %v is in the future, ignoring", entry.Time, now)
return nil
}
</s> add func (r *dayTop) addEntry(entry *logEntry, q *dns.Msg, now time.Time) error { </s> remove histrical := generateMapFromStats(&statistics.PerHour, 0, 24)
// sum them up
summed := map[string]interface{}{}
for key, values := range histrical {
summedValue := 0.0
floats, ok := values.([]float64)
if !ok {
continue
}
for _, v := range floats {
summedValue += v
}
summed[key] = summedValue
</s> add resp, err := client.Get("http://127.0.0.1:8618/stats")
if err != nil {
errortext := fmt.Sprintf("Couldn't get stats_top from coredns: %T %s\n", err, err)
log.Println(errortext)
http.Error(w, errortext, http.StatusBadGateway)
return
}
if resp != nil && resp.Body != nil {
defer resp.Body.Close() </s> remove errortext := fmt.Sprintf("Must specify valid end_time parameter: %s", err)
</s> add errortext := fmt.Sprintf("Couldn't get stats_top from coredns: %T %s\n", err, err) </s> remove http.Error(w, errortext, 500)
</s> add http.Error(w, errortext, http.StatusBadGateway) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | coredns_plugin/querylog_top.go |
|
func fillStatsFromQueryLog() error { | <mask>
<mask> return nil
<mask> }
<mask>
<mask> func loadTopFromFiles() error {
<mask> now := time.Now()
<mask> runningTop.loadedWriteLock()
<mask> defer runningTop.loadedWriteUnlock()
<mask> if runningTop.loaded {
<mask> return nil
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove func (r *dayTop) addEntry(entry *logEntry, now time.Time) error {
if len(entry.Question) == 0 {
log.Printf("entry question is absent, skipping")
return nil
}
if entry.Time.After(now) {
log.Printf("t %v vs %v is in the future, ignoring", entry.Time, now)
return nil
}
</s> add func (r *dayTop) addEntry(entry *logEntry, q *dns.Msg, now time.Time) error { </s> remove err := runningTop.addEntry(entry, now)
</s> add if len(entry.Question) == 0 {
log.Printf("entry question is absent, skipping")
return nil
}
if entry.Time.After(now) {
log.Printf("t %v vs %v is in the future, ignoring", entry.Time, now)
return nil
}
q := new(dns.Msg)
if err := q.Unpack(entry.Question); err != nil {
log.Printf("failed to unpack dns message question: %s", err)
return err
}
if len(q.Question) != 1 {
log.Printf("malformed dns message, has no questions, skipping")
return nil
}
err := runningTop.addEntry(entry, q, now) </s> remove q := new(dns.Msg)
if err := q.Unpack(entry.Question); err != nil {
log.Printf("failed to unpack dns message question: %s", err)
return err
}
if len(q.Question) != 1 {
log.Printf("malformed dns message, has no questions, skipping")
return nil
}
</s> add </s> remove log.Printf("Loading top from querylog")
err := loadTopFromFiles()
</s> add log.Printf("Loading stats from querylog")
err := fillStatsFromQueryLog() </s> remove
// calculate start and end of our array
// basically it's how many hours/minutes/etc have passed since now
start := int(now.Sub(endTime) / timeUnit)
end := int(now.Sub(startTime) / timeUnit)
// swap them around if they're inverted
if start > end {
start, end = end, start
</s> add if resp != nil && resp.Body != nil {
defer resp.Body.Close() | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | coredns_plugin/querylog_top.go |
if len(entry.Question) == 0 {
log.Printf("entry question is absent, skipping")
return nil
}
if entry.Time.After(now) {
log.Printf("t %v vs %v is in the future, ignoring", entry.Time, now)
return nil
}
q := new(dns.Msg)
if err := q.Unpack(entry.Question); err != nil {
log.Printf("failed to unpack dns message question: %s", err)
return err
}
if len(q.Question) != 1 {
log.Printf("malformed dns message, has no questions, skipping")
return nil
}
err := runningTop.addEntry(entry, q, now) | <mask> if runningTop.loaded {
<mask> return nil
<mask> }
<mask> onEntry := func(entry *logEntry) error {
<mask> err := runningTop.addEntry(entry, now)
<mask> if err != nil {
<mask> log.Printf("Failed to add entry to running top: %s", err)
<mask> return err
<mask> }
<mask> return nil
</s> Implement online stats calculation in coredns plugin instead of scraping prometheus. </s> remove err = runningTop.addEntry(&entry, now)
</s> add err = runningTop.addEntry(&entry, question, now) </s> remove log.Printf("Failed to load top from querylog: %s", err)
</s> add log.Printf("Failed to load stats from querylog: %s", err) </s> remove log.Printf("Loading top from querylog")
err := loadTopFromFiles()
</s> add log.Printf("Loading stats from querylog")
err := fillStatsFromQueryLog() </s> remove func loadTopFromFiles() error {
</s> add func fillStatsFromQueryLog() error { </s> remove http.Error(w, errortext, 500)
</s> add http.Error(w, errortext, http.StatusBadGateway) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37f6d38c498740c7c3db59c31da031cd5c305694 | coredns_plugin/querylog_top.go |
github.com/kr/pretty v0.1.0 // indirect | <mask> github.com/gobuffalo/packr v1.19.0
<mask> github.com/joomcode/errorx v1.0.0
<mask> github.com/kardianos/osext v0.0.0-20170510131534-ae77be60afb1 // indirect
<mask> github.com/kardianos/service v0.0.0-20181115005516-4c239ee84e7b
<mask> github.com/krolaw/dhcp4 v0.0.0-20180925202202-7cead472c414
<mask> github.com/miekg/dns v1.1.19
<mask> github.com/sparrc/go-ping v0.0.0-20181106165434-ef3ab45e41b0
<mask> github.com/stretchr/testify v1.4.0
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove github.com/likexian/whois-go v0.0.0-20190627090909-384b3df3fc49
</s> add </s> remove github.com/likexian/gokit v0.0.0-20190309162924-0a377eecf7aa/go.mod h1:QdfYv6y6qPA9pbBA2qXtoT8BMKha6UyNbxWGWl/9Jfk=
github.com/likexian/gokit v0.0.0-20190418170008-ace88ad0983b/go.mod h1:KKqSnk/VVSW8kEyO2vVCXoanzEutKdlBAPohmGXkxCk=
github.com/likexian/gokit v0.0.0-20190501133040-e77ea8b19cdc/go.mod h1:3kvONayqCaj+UgrRZGpgfXzHdMYCAO0KAt4/8n0L57Y=
github.com/likexian/gokit v0.0.0-20190604165112-68b8a4ba758c h1:KByA4IxKqqYwpqzk/P+w1DBpkPbvy3DArTP/U3LSxTQ=
github.com/likexian/gokit v0.0.0-20190604165112-68b8a4ba758c/go.mod h1:kn+nTv3tqh6yhor9BC4Lfiu58SmH8NmQ2PmEl+uM6nU=
github.com/likexian/simplejson-go v0.0.0-20190409170913-40473a74d76d/go.mod h1:Typ1BfnATYtZ/+/shXfFYLrovhFyuKvzwrdOnIDHlmg=
github.com/likexian/simplejson-go v0.0.0-20190419151922-c1f9f0b4f084/go.mod h1:U4O1vIJvIKwbMZKUJ62lppfdvkCdVd2nfMimHK81eec=
github.com/likexian/simplejson-go v0.0.0-20190502021454-d8787b4bfa0b/go.mod h1:3BWwtmKP9cXWwYCr5bkoVDEfLywacOv0s06OBEDpyt8=
github.com/likexian/whois-go v0.0.0-20190627090909-384b3df3fc49 h1:xGa+flE6p2UnMgxIS8bm7Q9JSt47HRuYVtwneDVnfLk=
github.com/likexian/whois-go v0.0.0-20190627090909-384b3df3fc49/go.mod h1:oR3bJMzrOb55cqTAn14DEzYFLDpSPTXJ3ORe7go9Hc8=
</s> add | [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | go.mod |
<mask> github.com/joomcode/errorx v1.0.0
<mask> github.com/kardianos/osext v0.0.0-20170510131534-ae77be60afb1 // indirect
<mask> github.com/kardianos/service v0.0.0-20181115005516-4c239ee84e7b
<mask> github.com/krolaw/dhcp4 v0.0.0-20180925202202-7cead472c414
<mask> github.com/likexian/whois-go v0.0.0-20190627090909-384b3df3fc49
<mask> github.com/miekg/dns v1.1.19
<mask> github.com/sparrc/go-ping v0.0.0-20181106165434-ef3ab45e41b0
<mask> github.com/stretchr/testify v1.4.0
<mask> go.etcd.io/bbolt v1.3.3
<mask> golang.org/x/crypto v0.0.0-20191001170739-f9e2070545dc
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove github.com/likexian/gokit v0.0.0-20190309162924-0a377eecf7aa/go.mod h1:QdfYv6y6qPA9pbBA2qXtoT8BMKha6UyNbxWGWl/9Jfk=
github.com/likexian/gokit v0.0.0-20190418170008-ace88ad0983b/go.mod h1:KKqSnk/VVSW8kEyO2vVCXoanzEutKdlBAPohmGXkxCk=
github.com/likexian/gokit v0.0.0-20190501133040-e77ea8b19cdc/go.mod h1:3kvONayqCaj+UgrRZGpgfXzHdMYCAO0KAt4/8n0L57Y=
github.com/likexian/gokit v0.0.0-20190604165112-68b8a4ba758c h1:KByA4IxKqqYwpqzk/P+w1DBpkPbvy3DArTP/U3LSxTQ=
github.com/likexian/gokit v0.0.0-20190604165112-68b8a4ba758c/go.mod h1:kn+nTv3tqh6yhor9BC4Lfiu58SmH8NmQ2PmEl+uM6nU=
github.com/likexian/simplejson-go v0.0.0-20190409170913-40473a74d76d/go.mod h1:Typ1BfnATYtZ/+/shXfFYLrovhFyuKvzwrdOnIDHlmg=
github.com/likexian/simplejson-go v0.0.0-20190419151922-c1f9f0b4f084/go.mod h1:U4O1vIJvIKwbMZKUJ62lppfdvkCdVd2nfMimHK81eec=
github.com/likexian/simplejson-go v0.0.0-20190502021454-d8787b4bfa0b/go.mod h1:3BWwtmKP9cXWwYCr5bkoVDEfLywacOv0s06OBEDpyt8=
github.com/likexian/whois-go v0.0.0-20190627090909-384b3df3fc49 h1:xGa+flE6p2UnMgxIS8bm7Q9JSt47HRuYVtwneDVnfLk=
github.com/likexian/whois-go v0.0.0-20190627090909-384b3df3fc49/go.mod h1:oR3bJMzrOb55cqTAn14DEzYFLDpSPTXJ3ORe7go9Hc8=
</s> add </s> remove golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5 h1:58fnuSXlxZmFdJyvtTFVmVhcMLU6v5fEb/ok4wyqtNU=
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | go.mod |
|
<mask> github.com/AdguardTeam/dnsproxy v0.19.4 h1:rZb40VUr/yN8RG4j3+NuGqODmPvte7acPfSDl0j2wiU=
<mask> github.com/AdguardTeam/dnsproxy v0.19.4/go.mod h1:NaulY9i279jZwN8QBbvbZnn5HkrjBgJi4hbFY5nW+Kc=
<mask> github.com/AdguardTeam/dnsproxy v0.19.5 h1:QAKWa2+rTp7GAeOFLMPqIYPS7eglLVEkVLH4kHRbnCQ=
<mask> github.com/AdguardTeam/dnsproxy v0.19.5/go.mod h1:qEiDndktnVJYYzHiQGKUl8Zm0b7HGpPmYWShAxmqjtw=
<mask> github.com/AdguardTeam/golibs v0.1.3 h1:hmapdTtMtIk3T8eQDwTOLdqZLGDKNKk9325uC8z12xg=
<mask> github.com/AdguardTeam/golibs v0.1.3/go.mod h1:b0XkhgIcn2TxwX6C5AQMtpIFAgjPehNgxJErWkwA3ko=
<mask> github.com/AdguardTeam/golibs v0.2.1 h1:jGCnbM5UOUq/GrG+8eLN7Y+OTfEo5F/8L0wq3ur2h4E=
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove github.com/AdguardTeam/urlfilter v0.5.0 h1:ATzs2Er0BMt7NbZnFJ4UEzt3uIV+rydbQCYqBXNRbJc=
github.com/AdguardTeam/urlfilter v0.5.0/go.mod h1:6YehXZ8e0Hx2MvqeQWLFom6IkPinm04tNhO1CkwAxmg=
</s> add </s> remove assert.True(t, strings.Index(resp, "OrgName: Google LLC") != -1)
assert.True(t, strings.Index(resp, "City: Mountain View") != -1)
assert.True(t, strings.Index(resp, "Country: US") != -1)
</s> add </s> remove resp, err := whois.Whois("8.8.8.8")
</s> add w := Whois{timeoutMsec: 5000}
resp, err := w.queryAll("8.8.8.8") </s> remove github.com/likexian/whois-go v0.0.0-20190627090909-384b3df3fc49
</s> add | [
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | go.sum |
|
<mask> github.com/AdguardTeam/golibs v0.1.3 h1:hmapdTtMtIk3T8eQDwTOLdqZLGDKNKk9325uC8z12xg=
<mask> github.com/AdguardTeam/golibs v0.1.3/go.mod h1:b0XkhgIcn2TxwX6C5AQMtpIFAgjPehNgxJErWkwA3ko=
<mask> github.com/AdguardTeam/golibs v0.2.1 h1:jGCnbM5UOUq/GrG+8eLN7Y+OTfEo5F/8L0wq3ur2h4E=
<mask> github.com/AdguardTeam/golibs v0.2.1/go.mod h1:caAJ5knSHbR6vV6qfRDgAfXVia4hHgLqeztAY4UX0fw=
<mask> github.com/AdguardTeam/urlfilter v0.5.0 h1:ATzs2Er0BMt7NbZnFJ4UEzt3uIV+rydbQCYqBXNRbJc=
<mask> github.com/AdguardTeam/urlfilter v0.5.0/go.mod h1:6YehXZ8e0Hx2MvqeQWLFom6IkPinm04tNhO1CkwAxmg=
<mask> github.com/AdguardTeam/urlfilter v0.6.0 h1:HVPfAsGcHW47HasmqcLNA/VJ41GaR/SzUufuIj70ouA=
<mask> github.com/AdguardTeam/urlfilter v0.6.0/go.mod h1:y+XdxBdbRG9v7pfjznlvv4Ufi2HTG8D0YMqR22OVy0Y=
<mask> github.com/NYTimes/gziphandler v1.1.1 h1:ZUDjpQae29j0ryrS0u/B8HZfJBtBQHjqw2rQ2cqUQ3I=
<mask> github.com/NYTimes/gziphandler v1.1.1/go.mod h1:n/CVRwUEOgIxrgPvAQhUUr9oeUtvrhMomdKFjzJNB0c=
<mask> github.com/StackExchange/wmi v0.0.0-20181212234831-e0a55b97c705 h1:UUppSQnhf4Yc6xGxSkoQpPhb7RVzuv5Nb1mwJ5VId9s=
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove github.com/AdguardTeam/dnsproxy v0.19.4 h1:rZb40VUr/yN8RG4j3+NuGqODmPvte7acPfSDl0j2wiU=
github.com/AdguardTeam/dnsproxy v0.19.4/go.mod h1:NaulY9i279jZwN8QBbvbZnn5HkrjBgJi4hbFY5nW+Kc=
</s> add </s> remove assert.True(t, strings.Index(resp, "OrgName: Google LLC") != -1)
assert.True(t, strings.Index(resp, "City: Mountain View") != -1)
assert.True(t, strings.Index(resp, "Country: US") != -1)
</s> add </s> remove resp, err := whois.Whois("8.8.8.8")
</s> add w := Whois{timeoutMsec: 5000}
resp, err := w.queryAll("8.8.8.8") | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | go.sum |
|
github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d h1:G0m3OIz70MZUWq3EgK3CesDbo8upS2Vm9/P3FtgI+Jk= | <mask> github.com/StackExchange/wmi v0.0.0-20181212234831-e0a55b97c705 h1:UUppSQnhf4Yc6xGxSkoQpPhb7RVzuv5Nb1mwJ5VId9s=
<mask> github.com/StackExchange/wmi v0.0.0-20181212234831-e0a55b97c705/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg=
<mask> github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg=
<mask> github.com/aead/chacha20 v0.0.0-20180709150244-8b13a72661da h1:KjTM2ks9d14ZYCvmHS9iAKVt9AyzRSqNU1qabPih5BY=
<mask> github.com/aead/chacha20 v0.0.0-20180709150244-8b13a72661da/go.mod h1:eHEWzANqSiWQsof+nXEI9bUVUyV6F53Fp89EuCh2EAA=
<mask> github.com/aead/poly1305 v0.0.0-20180717145839-3fee0db0b635 h1:52m0LGchQBBVqJRyYYufQuIbVqRawmubW3OFGqK1ekw=
<mask> github.com/aead/poly1305 v0.0.0-20180717145839-3fee0db0b635/go.mod h1:lmLxL+FV291OopO93Bwf9fQLQeLyt33VJRUg5VJ30us=
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove github.com/AdguardTeam/urlfilter v0.5.0 h1:ATzs2Er0BMt7NbZnFJ4UEzt3uIV+rydbQCYqBXNRbJc=
github.com/AdguardTeam/urlfilter v0.5.0/go.mod h1:6YehXZ8e0Hx2MvqeQWLFom6IkPinm04tNhO1CkwAxmg=
</s> add </s> remove assert.True(t, strings.Index(resp, "OrgName: Google LLC") != -1)
assert.True(t, strings.Index(resp, "City: Mountain View") != -1)
assert.True(t, strings.Index(resp, "Country: US") != -1)
</s> add </s> remove resp, err := whois.Whois("8.8.8.8")
</s> add w := Whois{timeoutMsec: 5000}
resp, err := w.queryAll("8.8.8.8") </s> remove github.com/likexian/whois-go v0.0.0-20190627090909-384b3df3fc49
</s> add </s> remove github.com/AdguardTeam/dnsproxy v0.19.4 h1:rZb40VUr/yN8RG4j3+NuGqODmPvte7acPfSDl0j2wiU=
github.com/AdguardTeam/dnsproxy v0.19.4/go.mod h1:NaulY9i279jZwN8QBbvbZnn5HkrjBgJi4hbFY5nW+Kc=
</s> add | [
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | go.sum |
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= | <mask> github.com/kardianos/service v0.0.0-20181115005516-4c239ee84e7b h1:vfiqKno48aUndBMjTeWFpCExNnTf2Xnd6d228L4EfTQ=
<mask> github.com/kardianos/service v0.0.0-20181115005516-4c239ee84e7b/go.mod h1:10UU/bEkzh2iEN6aYzbevY7J6p03KO5siTxQWXMEerg=
<mask> github.com/krolaw/dhcp4 v0.0.0-20180925202202-7cead472c414 h1:6wnYc2S/lVM7BvR32BM74ph7bPgqMztWopMYKgVyEho=
<mask> github.com/krolaw/dhcp4 v0.0.0-20180925202202-7cead472c414/go.mod h1:0AqAH3ZogsCrvrtUpvc6EtVKbc3w6xwZhkvGLuqyi3o=
<mask> github.com/markbates/oncer v0.0.0-20181014194634-05fccaae8fc4 h1:Mlji5gkcpzkqTROyE4ZxZ8hN7osunMb2RuGVrbvMvCc=
<mask> github.com/markbates/oncer v0.0.0-20181014194634-05fccaae8fc4/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE=
<mask> github.com/miekg/dns v1.1.8 h1:1QYRAKU3lN5cRfLCkPU08hwvLJFhvjP6MqNMmQz6ZVI=
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove github.com/likexian/gokit v0.0.0-20190309162924-0a377eecf7aa/go.mod h1:QdfYv6y6qPA9pbBA2qXtoT8BMKha6UyNbxWGWl/9Jfk=
github.com/likexian/gokit v0.0.0-20190418170008-ace88ad0983b/go.mod h1:KKqSnk/VVSW8kEyO2vVCXoanzEutKdlBAPohmGXkxCk=
github.com/likexian/gokit v0.0.0-20190501133040-e77ea8b19cdc/go.mod h1:3kvONayqCaj+UgrRZGpgfXzHdMYCAO0KAt4/8n0L57Y=
github.com/likexian/gokit v0.0.0-20190604165112-68b8a4ba758c h1:KByA4IxKqqYwpqzk/P+w1DBpkPbvy3DArTP/U3LSxTQ=
github.com/likexian/gokit v0.0.0-20190604165112-68b8a4ba758c/go.mod h1:kn+nTv3tqh6yhor9BC4Lfiu58SmH8NmQ2PmEl+uM6nU=
github.com/likexian/simplejson-go v0.0.0-20190409170913-40473a74d76d/go.mod h1:Typ1BfnATYtZ/+/shXfFYLrovhFyuKvzwrdOnIDHlmg=
github.com/likexian/simplejson-go v0.0.0-20190419151922-c1f9f0b4f084/go.mod h1:U4O1vIJvIKwbMZKUJ62lppfdvkCdVd2nfMimHK81eec=
github.com/likexian/simplejson-go v0.0.0-20190502021454-d8787b4bfa0b/go.mod h1:3BWwtmKP9cXWwYCr5bkoVDEfLywacOv0s06OBEDpyt8=
github.com/likexian/whois-go v0.0.0-20190627090909-384b3df3fc49 h1:xGa+flE6p2UnMgxIS8bm7Q9JSt47HRuYVtwneDVnfLk=
github.com/likexian/whois-go v0.0.0-20190627090909-384b3df3fc49/go.mod h1:oR3bJMzrOb55cqTAn14DEzYFLDpSPTXJ3ORe7go9Hc8=
</s> add </s> remove github.com/likexian/whois-go v0.0.0-20190627090909-384b3df3fc49
</s> add </s> remove resp, err := whois.Whois("8.8.8.8")
</s> add w := Whois{timeoutMsec: 5000}
resp, err := w.queryAll("8.8.8.8") </s> remove github.com/AdguardTeam/dnsproxy v0.19.4 h1:rZb40VUr/yN8RG4j3+NuGqODmPvte7acPfSDl0j2wiU=
github.com/AdguardTeam/dnsproxy v0.19.4/go.mod h1:NaulY9i279jZwN8QBbvbZnn5HkrjBgJi4hbFY5nW+Kc=
</s> add | [
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | go.sum |
<mask> github.com/kardianos/service v0.0.0-20181115005516-4c239ee84e7b h1:vfiqKno48aUndBMjTeWFpCExNnTf2Xnd6d228L4EfTQ=
<mask> github.com/kardianos/service v0.0.0-20181115005516-4c239ee84e7b/go.mod h1:10UU/bEkzh2iEN6aYzbevY7J6p03KO5siTxQWXMEerg=
<mask> github.com/krolaw/dhcp4 v0.0.0-20180925202202-7cead472c414 h1:6wnYc2S/lVM7BvR32BM74ph7bPgqMztWopMYKgVyEho=
<mask> github.com/krolaw/dhcp4 v0.0.0-20180925202202-7cead472c414/go.mod h1:0AqAH3ZogsCrvrtUpvc6EtVKbc3w6xwZhkvGLuqyi3o=
<mask> github.com/likexian/gokit v0.0.0-20190309162924-0a377eecf7aa/go.mod h1:QdfYv6y6qPA9pbBA2qXtoT8BMKha6UyNbxWGWl/9Jfk=
<mask> github.com/likexian/gokit v0.0.0-20190418170008-ace88ad0983b/go.mod h1:KKqSnk/VVSW8kEyO2vVCXoanzEutKdlBAPohmGXkxCk=
<mask> github.com/likexian/gokit v0.0.0-20190501133040-e77ea8b19cdc/go.mod h1:3kvONayqCaj+UgrRZGpgfXzHdMYCAO0KAt4/8n0L57Y=
<mask> github.com/likexian/gokit v0.0.0-20190604165112-68b8a4ba758c h1:KByA4IxKqqYwpqzk/P+w1DBpkPbvy3DArTP/U3LSxTQ=
<mask> github.com/likexian/gokit v0.0.0-20190604165112-68b8a4ba758c/go.mod h1:kn+nTv3tqh6yhor9BC4Lfiu58SmH8NmQ2PmEl+uM6nU=
<mask> github.com/likexian/simplejson-go v0.0.0-20190409170913-40473a74d76d/go.mod h1:Typ1BfnATYtZ/+/shXfFYLrovhFyuKvzwrdOnIDHlmg=
<mask> github.com/likexian/simplejson-go v0.0.0-20190419151922-c1f9f0b4f084/go.mod h1:U4O1vIJvIKwbMZKUJ62lppfdvkCdVd2nfMimHK81eec=
<mask> github.com/likexian/simplejson-go v0.0.0-20190502021454-d8787b4bfa0b/go.mod h1:3BWwtmKP9cXWwYCr5bkoVDEfLywacOv0s06OBEDpyt8=
<mask> github.com/likexian/whois-go v0.0.0-20190627090909-384b3df3fc49 h1:xGa+flE6p2UnMgxIS8bm7Q9JSt47HRuYVtwneDVnfLk=
<mask> github.com/likexian/whois-go v0.0.0-20190627090909-384b3df3fc49/go.mod h1:oR3bJMzrOb55cqTAn14DEzYFLDpSPTXJ3ORe7go9Hc8=
<mask> github.com/markbates/oncer v0.0.0-20181014194634-05fccaae8fc4 h1:Mlji5gkcpzkqTROyE4ZxZ8hN7osunMb2RuGVrbvMvCc=
<mask> github.com/markbates/oncer v0.0.0-20181014194634-05fccaae8fc4/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE=
<mask> github.com/miekg/dns v1.1.8 h1:1QYRAKU3lN5cRfLCkPU08hwvLJFhvjP6MqNMmQz6ZVI=
<mask> github.com/miekg/dns v1.1.8/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
<mask> github.com/miekg/dns v1.1.19 h1:0ymbfaLG1/utH2+BydNiF+dx1jSEmdr/nylOtkGHZZg=
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove github.com/likexian/whois-go v0.0.0-20190627090909-384b3df3fc49
</s> add </s> remove resp, err := whois.Whois("8.8.8.8")
</s> add w := Whois{timeoutMsec: 5000}
resp, err := w.queryAll("8.8.8.8") </s> remove github.com/AdguardTeam/dnsproxy v0.19.4 h1:rZb40VUr/yN8RG4j3+NuGqODmPvte7acPfSDl0j2wiU=
github.com/AdguardTeam/dnsproxy v0.19.4/go.mod h1:NaulY9i279jZwN8QBbvbZnn5HkrjBgJi4hbFY5nW+Kc=
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | go.sum |
|
github.com/shirou/gopsutil v2.19.9+incompatible h1:IrPVlK4nfwW10DF7pW+7YJKws9NkgNzWozwwWv9FsgY= | <mask> github.com/rogpeppe/go-charset v0.0.0-20180617210344-2471d30d28b4/go.mod h1:qgYeAmZ5ZIpBWTGllZSQnw97Dj+woV0toclVaRGI8pc=
<mask> github.com/rogpeppe/go-charset v0.0.0-20190617161244-0dc95cdf6f31/go.mod h1:qgYeAmZ5ZIpBWTGllZSQnw97Dj+woV0toclVaRGI8pc=
<mask> github.com/shirou/gopsutil v2.18.12+incompatible h1:1eaJvGomDnH74/5cF4CTmTbLHAriGFsTZppLXDX93OM=
<mask> github.com/shirou/gopsutil v2.18.12+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA=
<mask> github.com/shirou/gopsutil v2.19.9+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA=
<mask> github.com/shirou/w32 v0.0.0-20160930032740-bb4de0191aa4 h1:udFKJ0aHUL60LboW/A+DfgoHVedieIzIXE8uylPue0U=
<mask> github.com/shirou/w32 v0.0.0-20160930032740-bb4de0191aa4/go.mod h1:qsXQc7+bwAM3Q1u/4XEfrquwF8Lw7D7y5cD8CuHnfIc=
<mask> github.com/sparrc/go-ping v0.0.0-20181106165434-ef3ab45e41b0 h1:mu7brOsdaH5Dqf93vdch+mr/0To8Sgc+yInt/jE/RJM=
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove github.com/likexian/whois-go v0.0.0-20190627090909-384b3df3fc49
</s> add </s> remove resp, err := whois.Whois("8.8.8.8")
</s> add w := Whois{timeoutMsec: 5000}
resp, err := w.queryAll("8.8.8.8") </s> remove github.com/AdguardTeam/dnsproxy v0.19.4 h1:rZb40VUr/yN8RG4j3+NuGqODmPvte7acPfSDl0j2wiU=
github.com/AdguardTeam/dnsproxy v0.19.4/go.mod h1:NaulY9i279jZwN8QBbvbZnn5HkrjBgJi4hbFY5nW+Kc=
</s> add </s> remove github.com/AdguardTeam/urlfilter v0.5.0 h1:ATzs2Er0BMt7NbZnFJ4UEzt3uIV+rydbQCYqBXNRbJc=
github.com/AdguardTeam/urlfilter v0.5.0/go.mod h1:6YehXZ8e0Hx2MvqeQWLFom6IkPinm04tNhO1CkwAxmg=
</s> add | [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | go.sum |
<mask> golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9 h1:mKdxBk7AujPs8kU4m80U72y/zjbZ3UcXC7dClwKbUI0=
<mask> golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
<mask> golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2 h1:VklqNMn3ovrHsnt90PveolxSbWFaJdECFbxSq0Mqo2M=
<mask> golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
<mask> golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5 h1:58fnuSXlxZmFdJyvtTFVmVhcMLU6v5fEb/ok4wyqtNU=
<mask> golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
<mask> golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY=
<mask> golang.org/x/crypto v0.0.0-20191001170739-f9e2070545dc h1:KyTYo8xkh/2WdbFLUyQwBS0Jfn3qfZ9QmuPbok2oENE=
<mask> golang.org/x/crypto v0.0.0-20191001170739-f9e2070545dc/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
<mask> golang.org/x/net v0.0.0-20181102091132-c10e9556a7bc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
<mask> golang.org/x/net v0.0.0-20190119204137-ed066c81e75e h1:MDa3fSUp6MdYHouVmCCNz/zaH2a6CRcxY3VhT/K3C5Q=
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove github.com/likexian/whois-go v0.0.0-20190627090909-384b3df3fc49
</s> add </s> remove assert.True(t, strings.Index(resp, "OrgName: Google LLC") != -1)
assert.True(t, strings.Index(resp, "City: Mountain View") != -1)
assert.True(t, strings.Index(resp, "Country: US") != -1)
</s> add </s> remove resp, err := whois.Whois("8.8.8.8")
</s> add w := Whois{timeoutMsec: 5000}
resp, err := w.queryAll("8.8.8.8") </s> remove github.com/AdguardTeam/dnsproxy v0.19.4 h1:rZb40VUr/yN8RG4j3+NuGqODmPvte7acPfSDl0j2wiU=
github.com/AdguardTeam/dnsproxy v0.19.4/go.mod h1:NaulY9i279jZwN8QBbvbZnn5HkrjBgJi4hbFY5nW+Kc=
</s> add </s> remove github.com/AdguardTeam/urlfilter v0.5.0 h1:ATzs2Er0BMt7NbZnFJ4UEzt3uIV+rydbQCYqBXNRbJc=
github.com/AdguardTeam/urlfilter v0.5.0/go.mod h1:6YehXZ8e0Hx2MvqeQWLFom6IkPinm04tNhO1CkwAxmg=
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | go.sum |
|
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e h1:vcxGaoTs7kV8m5Np9uUNQin4BrLOthgV7252N8V+FwY= | <mask> golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
<mask> golang.org/x/sync v0.0.0-20190423024810-112230192c58 h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU=
<mask> golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
<mask> golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
<mask> golang.org/x/sys v0.0.0-20190122071731-054c452bb702 h1:Lk4tbZFnlyPgV+sLgTw5yGfzrlOn9kx4vSombi2FFlY=
<mask> golang.org/x/sys v0.0.0-20190122071731-054c452bb702/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
<mask> golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a h1:1BGLXjeY4akVXGgbC9HugT3Jv3hCI0z56oJR5vAMgBU=
<mask> golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
<mask> golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0 h1:HyfiK1WMnHj5FXFXatD+Qs1A/xC2Run6RzeW1SyHxpc=
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
</s> add </s> remove assert.True(t, strings.Index(resp, "OrgName: Google LLC") != -1)
assert.True(t, strings.Index(resp, "City: Mountain View") != -1)
assert.True(t, strings.Index(resp, "Country: US") != -1)
</s> add </s> remove resp, err := whois.Whois("8.8.8.8")
</s> add w := Whois{timeoutMsec: 5000}
resp, err := w.queryAll("8.8.8.8") </s> remove github.com/likexian/whois-go v0.0.0-20190627090909-384b3df3fc49
</s> add </s> remove github.com/AdguardTeam/dnsproxy v0.19.4 h1:rZb40VUr/yN8RG4j3+NuGqODmPvte7acPfSDl0j2wiU=
github.com/AdguardTeam/dnsproxy v0.19.4/go.mod h1:NaulY9i279jZwN8QBbvbZnn5HkrjBgJi4hbFY5nW+Kc=
</s> add | [
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | go.sum |
<mask> golang.org/x/sys v0.0.0-20190122071731-054c452bb702/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
<mask> golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a h1:1BGLXjeY4akVXGgbC9HugT3Jv3hCI0z56oJR5vAMgBU=
<mask> golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
<mask> golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
<mask> golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0 h1:HyfiK1WMnHj5FXFXatD+Qs1A/xC2Run6RzeW1SyHxpc=
<mask> golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
<mask> golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
<mask> golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
<mask> golang.org/x/sys v0.0.0-20191002091554-b397fe3ad8ed h1:5TJcLJn2a55mJjzYk0yOoqN8X1OdvBDUnaZaKKyQtkY=
<mask> golang.org/x/sys v0.0.0-20191002091554-b397fe3ad8ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
<mask> golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove assert.True(t, strings.Index(resp, "OrgName: Google LLC") != -1)
assert.True(t, strings.Index(resp, "City: Mountain View") != -1)
assert.True(t, strings.Index(resp, "Country: US") != -1)
</s> add </s> remove resp, err := whois.Whois("8.8.8.8")
</s> add w := Whois{timeoutMsec: 5000}
resp, err := w.queryAll("8.8.8.8") </s> remove github.com/likexian/whois-go v0.0.0-20190627090909-384b3df3fc49
</s> add </s> remove github.com/AdguardTeam/dnsproxy v0.19.4 h1:rZb40VUr/yN8RG4j3+NuGqODmPvte7acPfSDl0j2wiU=
github.com/AdguardTeam/dnsproxy v0.19.4/go.mod h1:NaulY9i279jZwN8QBbvbZnn5HkrjBgJi4hbFY5nW+Kc=
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | go.sum |
|
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= | <mask> gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
<mask> gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
<mask> gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
<mask> gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
<mask> gopkg.in/yaml.v2 v2.2.3 h1:fvjTMHxHEw/mxHbtzPi3JCcKXQRAnQTBRo6YCJSVHKI=
<mask> gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
<mask> rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove assert.True(t, strings.Index(resp, "OrgName: Google LLC") != -1)
assert.True(t, strings.Index(resp, "City: Mountain View") != -1)
assert.True(t, strings.Index(resp, "Country: US") != -1)
</s> add </s> remove resp, err := whois.Whois("8.8.8.8")
</s> add w := Whois{timeoutMsec: 5000}
resp, err := w.queryAll("8.8.8.8") </s> remove github.com/likexian/whois-go v0.0.0-20190627090909-384b3df3fc49
</s> add </s> remove github.com/AdguardTeam/dnsproxy v0.19.4 h1:rZb40VUr/yN8RG4j3+NuGqODmPvte7acPfSDl0j2wiU=
github.com/AdguardTeam/dnsproxy v0.19.4/go.mod h1:NaulY9i279jZwN8QBbvbZnn5HkrjBgJi4hbFY5nW+Kc=
</s> add </s> remove github.com/AdguardTeam/urlfilter v0.5.0 h1:ATzs2Er0BMt7NbZnFJ4UEzt3uIV+rydbQCYqBXNRbJc=
github.com/AdguardTeam/urlfilter v0.5.0/go.mod h1:6YehXZ8e0Hx2MvqeQWLFom6IkPinm04tNhO1CkwAxmg=
</s> add | [
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | go.sum |
const topClientsNumber = 30 // the number of clients to get
topClients := config.stats.GetTopClientsIP(topClientsNumber) | <mask>
<mask> config.dnsctx.rdns = InitRDNS(&config.clients)
<mask> config.dnsctx.whois = initWhois(&config.clients)
<mask>
<mask> topClients := config.stats.GetTopData(30)
<mask> for _, ip := range topClients {
<mask> ipAddr := net.ParseIP(ip)
<mask> if !ipAddr.IsLoopback() {
<mask> config.dnsctx.rdns.Begin(ip)
<mask> }
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove info := whoisProcess(ip)
</s> add info := w.process(ip) </s> remove lines := strings.Split(data, "\n")
for _, ln := range lines {
ln = strings.TrimSpace(ln)
if len(ln) == 0 || ln[0] == '#' {
</s> add for len(data) != 0 {
ln := SplitNext(&data, '\n')
if len(ln) == 0 || ln[0] == '#' || ln[0] == '%' { </s> remove resp, err := whois.Whois(ip)
</s> add resp, err := w.queryAll(ip) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | home/dns.go |
"github.com/stretchr/testify/assert" | <mask> import (
<mask> "testing"
<mask>
<mask> "github.com/AdguardTeam/golibs/log"
<mask> )
<mask>
<mask> func TestGetValidNetInterfacesForWeb(t *testing.T) {
<mask> ifaces, err := getValidNetInterfacesForWeb()
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove whois "github.com/likexian/whois-go"
</s> add </s> remove "strings"
</s> add </s> remove resp, err := whois.Whois("8.8.8.8")
</s> add w := Whois{timeoutMsec: 5000}
resp, err := w.queryAll("8.8.8.8") | [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | home/helpers_test.go |
func TestSplitNext(t *testing.T) {
s := " a,b , c "
assert.True(t, SplitNext(&s, ',') == "a")
assert.True(t, SplitNext(&s, ',') == "b")
assert.True(t, SplitNext(&s, ',') == "c" && len(s) == 0)
} | <mask>
<mask> log.Printf("%v", iface)
<mask> }
<mask> }
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove info := whoisProcess(ip)
</s> add info := w.process(ip) </s> remove resp, err := whois.Whois(ip)
</s> add resp, err := w.queryAll(ip) </s> remove topClients := config.stats.GetTopData(30)
</s> add const topClientsNumber = 30 // the number of clients to get
topClients := config.stats.GetTopClientsIP(topClientsNumber) | [
"keep",
"keep",
"keep",
"add"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | home/helpers_test.go |
"fmt"
"io/ioutil"
"net" | <mask>
<mask> import (
<mask> "strings"
<mask> "sync"
<mask> "time"
<mask>
<mask> "github.com/AdguardTeam/golibs/log"
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove whois "github.com/likexian/whois-go"
</s> add </s> remove "strings"
</s> add </s> remove whois "github.com/likexian/whois-go"
</s> add | [
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | home/whois.go |
"time" | <mask> "io/ioutil"
<mask> "net"
<mask> "strings"
<mask> "sync"
<mask>
<mask> "github.com/AdguardTeam/golibs/log"
<mask> )
<mask>
<mask> const (
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove whois "github.com/likexian/whois-go"
</s> add </s> remove const maxValueLength = 250
</s> add const (
defaultServer = "whois.arin.net"
defaultPort = "43"
maxValueLength = 250
) </s> remove "strings"
</s> add | [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | home/whois.go |
<mask> "strings"
<mask> "sync"
<mask>
<mask> "github.com/AdguardTeam/golibs/log"
<mask> whois "github.com/likexian/whois-go"
<mask> )
<mask>
<mask> const maxValueLength = 250
<mask>
<mask> // Whois - module context
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove const maxValueLength = 250
</s> add const (
defaultServer = "whois.arin.net"
defaultPort = "43"
maxValueLength = 250
) </s> remove clients *clientsContainer
ips map[string]bool
lock sync.Mutex
ipChan chan string
</s> add clients *clientsContainer
ips map[string]bool
lock sync.Mutex
ipChan chan string
timeoutMsec uint </s> remove "strings"
</s> add </s> remove whois "github.com/likexian/whois-go"
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | home/whois.go |
|
const (
defaultServer = "whois.arin.net"
defaultPort = "43"
maxValueLength = 250
) | <mask> "github.com/AdguardTeam/golibs/log"
<mask> whois "github.com/likexian/whois-go"
<mask> )
<mask>
<mask> const maxValueLength = 250
<mask>
<mask> // Whois - module context
<mask> type Whois struct {
<mask> clients *clientsContainer
<mask> ips map[string]bool
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove clients *clientsContainer
ips map[string]bool
lock sync.Mutex
ipChan chan string
</s> add clients *clientsContainer
ips map[string]bool
lock sync.Mutex
ipChan chan string
timeoutMsec uint </s> remove whois "github.com/likexian/whois-go"
</s> add </s> remove "strings"
</s> add </s> remove whois "github.com/likexian/whois-go"
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | home/whois.go |
clients *clientsContainer
ips map[string]bool
lock sync.Mutex
ipChan chan string
timeoutMsec uint | <mask> const maxValueLength = 250
<mask>
<mask> // Whois - module context
<mask> type Whois struct {
<mask> clients *clientsContainer
<mask> ips map[string]bool
<mask> lock sync.Mutex
<mask> ipChan chan string
<mask> }
<mask>
<mask> // Create module context
<mask> func initWhois(clients *clientsContainer) *Whois {
<mask> w := Whois{}
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove const maxValueLength = 250
</s> add const (
defaultServer = "whois.arin.net"
defaultPort = "43"
maxValueLength = 250
) </s> remove whois "github.com/likexian/whois-go"
</s> add </s> remove topClients := config.stats.GetTopData(30)
</s> add const topClientsNumber = 30 // the number of clients to get
topClients := config.stats.GetTopClientsIP(topClientsNumber) </s> remove info := whoisProcess(ip)
</s> add info := w.process(ip) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | home/whois.go |
w.timeoutMsec = 5000 | <mask> func initWhois(clients *clientsContainer) *Whois {
<mask> w := Whois{}
<mask> w.clients = clients
<mask> w.ips = make(map[string]bool)
<mask> w.ipChan = make(chan string, 255)
<mask> go w.workerLoop()
<mask> return &w
<mask> }
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove clients *clientsContainer
ips map[string]bool
lock sync.Mutex
ipChan chan string
</s> add clients *clientsContainer
ips map[string]bool
lock sync.Mutex
ipChan chan string
timeoutMsec uint </s> remove topClients := config.stats.GetTopData(30)
</s> add const topClientsNumber = 30 // the number of clients to get
topClients := config.stats.GetTopClientsIP(topClientsNumber) </s> remove const maxValueLength = 250
</s> add const (
defaultServer = "whois.arin.net"
defaultPort = "43"
maxValueLength = 250
) </s> remove func whoisProcess(ip string) [][]string {
</s> add func (w *Whois) process(ip string) [][]string { | [
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | home/whois.go |
for len(data) != 0 {
ln := SplitNext(&data, '\n')
if len(ln) == 0 || ln[0] == '#' || ln[0] == '%' { | <mask> func whoisParse(data string) map[string]string {
<mask> m := map[string]string{}
<mask> descr := ""
<mask> netname := ""
<mask> lines := strings.Split(data, "\n")
<mask> for _, ln := range lines {
<mask> ln = strings.TrimSpace(ln)
<mask>
<mask> if len(ln) == 0 || ln[0] == '#' {
<mask> continue
<mask> }
<mask>
<mask> kv := strings.SplitN(ln, ":", 2)
<mask> if len(kv) != 2 {
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove info := whoisProcess(ip)
</s> add info := w.process(ip) </s> remove topClients := config.stats.GetTopData(30)
</s> add const topClientsNumber = 30 // the number of clients to get
topClients := config.stats.GetTopClientsIP(topClientsNumber) </s> remove func whoisProcess(ip string) [][]string {
</s> add func (w *Whois) process(ip string) [][]string { </s> remove assert.True(t, strings.Index(resp, "OrgName: Google LLC") != -1)
assert.True(t, strings.Index(resp, "City: Mountain View") != -1)
assert.True(t, strings.Index(resp, "Country: US") != -1)
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | home/whois.go |
case "whois": // "whois: whois.arin.net"
m["whois"] = v
case "referralserver": // "ReferralServer: whois://whois.ripe.net"
if strings.HasPrefix(v, "whois://") {
m["whois"] = v[len("whois://"):]
} | <mask> case "descr":
<mask> descr = v
<mask> case "netname":
<mask> netname = v
<mask> }
<mask> }
<mask>
<mask> // descr or netname -> orgname
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove lines := strings.Split(data, "\n")
for _, ln := range lines {
ln = strings.TrimSpace(ln)
if len(ln) == 0 || ln[0] == '#' {
</s> add for len(data) != 0 {
ln := SplitNext(&data, '\n')
if len(ln) == 0 || ln[0] == '#' || ln[0] == '%' { </s> remove topClients := config.stats.GetTopData(30)
</s> add const topClientsNumber = 30 // the number of clients to get
topClients := config.stats.GetTopClientsIP(topClientsNumber) </s> remove const maxValueLength = 250
</s> add const (
defaultServer = "whois.arin.net"
defaultPort = "43"
maxValueLength = 250
) </s> remove info := whoisProcess(ip)
</s> add info := w.process(ip) | [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | home/whois.go |
func (w *Whois) process(ip string) [][]string { | <mask> return m
<mask> }
<mask>
<mask> // Request WHOIS information
<mask> func whoisProcess(ip string) [][]string {
<mask> data := [][]string{}
<mask> resp, err := whois.Whois(ip)
<mask> if err != nil {
<mask> log.Debug("Whois: error: %s IP:%s", err, ip)
<mask> return data
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove resp, err := whois.Whois(ip)
</s> add resp, err := w.queryAll(ip) </s> remove resp, err := whois.Whois("8.8.8.8")
</s> add w := Whois{timeoutMsec: 5000}
resp, err := w.queryAll("8.8.8.8") </s> remove assert.True(t, strings.Index(resp, "OrgName: Google LLC") != -1)
assert.True(t, strings.Index(resp, "City: Mountain View") != -1)
assert.True(t, strings.Index(resp, "Country: US") != -1)
</s> add </s> remove lines := strings.Split(data, "\n")
for _, ln := range lines {
ln = strings.TrimSpace(ln)
if len(ln) == 0 || ln[0] == '#' {
</s> add for len(data) != 0 {
ln := SplitNext(&data, '\n')
if len(ln) == 0 || ln[0] == '#' || ln[0] == '%' { </s> remove whois "github.com/likexian/whois-go"
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | home/whois.go |
resp, err := w.queryAll(ip) | <mask>
<mask> // Request WHOIS information
<mask> func whoisProcess(ip string) [][]string {
<mask> data := [][]string{}
<mask> resp, err := whois.Whois(ip)
<mask> if err != nil {
<mask> log.Debug("Whois: error: %s IP:%s", err, ip)
<mask> return data
<mask> }
<mask>
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove func whoisProcess(ip string) [][]string {
</s> add func (w *Whois) process(ip string) [][]string { </s> remove resp, err := whois.Whois("8.8.8.8")
</s> add w := Whois{timeoutMsec: 5000}
resp, err := w.queryAll("8.8.8.8") </s> remove assert.True(t, strings.Index(resp, "OrgName: Google LLC") != -1)
assert.True(t, strings.Index(resp, "City: Mountain View") != -1)
assert.True(t, strings.Index(resp, "Country: US") != -1)
</s> add </s> remove whois "github.com/likexian/whois-go"
</s> add </s> remove lines := strings.Split(data, "\n")
for _, ln := range lines {
ln = strings.TrimSpace(ln)
if len(ln) == 0 || ln[0] == '#' {
</s> add for len(data) != 0 {
ln := SplitNext(&data, '\n')
if len(ln) == 0 || ln[0] == '#' || ln[0] == '%' { | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | home/whois.go |
info := w.process(ip) | <mask> for {
<mask> var ip string
<mask> ip = <-w.ipChan
<mask>
<mask> info := whoisProcess(ip)
<mask> if len(info) == 0 {
<mask> continue
<mask> }
<mask>
<mask> w.clients.SetWhoisInfo(ip, info)
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove lines := strings.Split(data, "\n")
for _, ln := range lines {
ln = strings.TrimSpace(ln)
if len(ln) == 0 || ln[0] == '#' {
</s> add for len(data) != 0 {
ln := SplitNext(&data, '\n')
if len(ln) == 0 || ln[0] == '#' || ln[0] == '%' { </s> remove topClients := config.stats.GetTopData(30)
</s> add const topClientsNumber = 30 // the number of clients to get
topClients := config.stats.GetTopClientsIP(topClientsNumber) </s> remove clients *clientsContainer
ips map[string]bool
lock sync.Mutex
ipChan chan string
</s> add clients *clientsContainer
ips map[string]bool
lock sync.Mutex
ipChan chan string
timeoutMsec uint | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | home/whois.go |
<mask> package home
<mask>
<mask> import (
<mask> "strings"
<mask> "testing"
<mask>
<mask> whois "github.com/likexian/whois-go"
<mask> "github.com/stretchr/testify/assert"
<mask> )
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove whois "github.com/likexian/whois-go"
</s> add </s> remove whois "github.com/likexian/whois-go"
</s> add </s> remove const maxValueLength = 250
</s> add const (
defaultServer = "whois.arin.net"
defaultPort = "43"
maxValueLength = 250
) | [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | home/whois_test.go |
|
<mask> import (
<mask> "strings"
<mask> "testing"
<mask>
<mask> whois "github.com/likexian/whois-go"
<mask> "github.com/stretchr/testify/assert"
<mask> )
<mask>
<mask> func TestWhois(t *testing.T) {
<mask> resp, err := whois.Whois("8.8.8.8")
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove "strings"
</s> add </s> remove resp, err := whois.Whois("8.8.8.8")
</s> add w := Whois{timeoutMsec: 5000}
resp, err := w.queryAll("8.8.8.8") </s> remove assert.True(t, strings.Index(resp, "OrgName: Google LLC") != -1)
assert.True(t, strings.Index(resp, "City: Mountain View") != -1)
assert.True(t, strings.Index(resp, "Country: US") != -1)
</s> add </s> remove whois "github.com/likexian/whois-go"
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | home/whois_test.go |
|
w := Whois{timeoutMsec: 5000}
resp, err := w.queryAll("8.8.8.8") | <mask> "github.com/stretchr/testify/assert"
<mask> )
<mask>
<mask> func TestWhois(t *testing.T) {
<mask> resp, err := whois.Whois("8.8.8.8")
<mask> assert.True(t, err == nil)
<mask> assert.True(t, strings.Index(resp, "OrgName: Google LLC") != -1)
<mask> assert.True(t, strings.Index(resp, "City: Mountain View") != -1)
<mask> assert.True(t, strings.Index(resp, "Country: US") != -1)
<mask> m := whoisParse(resp)
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove assert.True(t, strings.Index(resp, "OrgName: Google LLC") != -1)
assert.True(t, strings.Index(resp, "City: Mountain View") != -1)
assert.True(t, strings.Index(resp, "Country: US") != -1)
</s> add </s> remove whois "github.com/likexian/whois-go"
</s> add </s> remove resp, err := whois.Whois(ip)
</s> add resp, err := w.queryAll(ip) </s> remove func whoisProcess(ip string) [][]string {
</s> add func (w *Whois) process(ip string) [][]string { | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | home/whois_test.go |
<mask>
<mask> func TestWhois(t *testing.T) {
<mask> resp, err := whois.Whois("8.8.8.8")
<mask> assert.True(t, err == nil)
<mask> assert.True(t, strings.Index(resp, "OrgName: Google LLC") != -1)
<mask> assert.True(t, strings.Index(resp, "City: Mountain View") != -1)
<mask> assert.True(t, strings.Index(resp, "Country: US") != -1)
<mask> m := whoisParse(resp)
<mask> assert.True(t, m["orgname"] == "Google LLC")
<mask> assert.True(t, m["country"] == "US")
<mask> assert.True(t, m["city"] == "Mountain View")
<mask> }
</s> * whois: use whois.arin.net
+ robust redirect mechanism
* decrease timeout 30sec -> 5sec
* faster response parsing
* don't use likexian/whois-go package </s> remove resp, err := whois.Whois("8.8.8.8")
</s> add w := Whois{timeoutMsec: 5000}
resp, err := w.queryAll("8.8.8.8") </s> remove lines := strings.Split(data, "\n")
for _, ln := range lines {
ln = strings.TrimSpace(ln)
if len(ln) == 0 || ln[0] == '#' {
</s> add for len(data) != 0 {
ln := SplitNext(&data, '\n')
if len(ln) == 0 || ln[0] == '#' || ln[0] == '%' { </s> remove whois "github.com/likexian/whois-go"
</s> add </s> remove resp, err := whois.Whois(ip)
</s> add resp, err := w.queryAll(ip) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/37fe3c148f6edcb7ad8567ed2490f8c67208c50f | home/whois_test.go |
|
"github.com/AdguardTeam/AdGuardHome/internal/filtering/hashprefix" | <mask> "github.com/AdguardTeam/AdGuardHome/internal/aghtest"
<mask> "github.com/AdguardTeam/AdGuardHome/internal/dhcpd"
<mask> "github.com/AdguardTeam/AdGuardHome/internal/filtering"
<mask> "github.com/AdguardTeam/AdGuardHome/internal/filtering/safesearch"
<mask> "github.com/AdguardTeam/dnsproxy/proxy"
<mask> "github.com/AdguardTeam/dnsproxy/upstream"
<mask> "github.com/AdguardTeam/golibs/netutil"
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove "github.com/AdguardTeam/golibs/cache"
</s> add "github.com/AdguardTeam/AdGuardHome/internal/filtering/hashprefix" </s> remove "time"
</s> add </s> remove "strings"
</s> add | [
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/dnsforward/dnsforward_test.go |
const (
hostname = "wmconvirus.narod.ru"
cacheTime = 10 * time.Minute
cacheSize = 10000
)
sbChecker := hashprefix.New(&hashprefix.Config{
CacheTime: cacheTime,
CacheSize: cacheSize,
Upstream: aghtest.NewBlockUpstream(hostname, true),
}) | <mask> assert.Equalf(t, net.IP{127, 0, 0, 1}, a.A, "dns server %s returned wrong answer instead of 8.8.8.8: %v", addr, a.A)
<mask> }
<mask>
<mask> func TestBlockedBySafeBrowsing(t *testing.T) {
<mask> const hostname = "wmconvirus.narod.ru"
<mask>
<mask> sbUps := aghtest.NewBlockUpstream(hostname, true)
<mask> ans4, _ := (&aghtest.TestResolver{}).HostToIPs(hostname)
<mask>
<mask> filterConf := &filtering.Config{
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove sbUps := aghtest.NewBlockUpstream(hostname, true)
</s> add </s> remove ParentalEnabled: true,
SafeBrowsingEnabled: false,
</s> add ParentalEnabled: true,
SafeBrowsingEnabled: false,
SafeBrowsingChecker: newChecker(sbBlocked),
ParentalControlChecker: newChecker(pcBlocked), </s> remove d, setts := newForTest(t, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(t, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) </s> remove // TODO(a.garipov): Make configurable.
const (
dnsTimeout = 3 * time.Second
defaultSafebrowsingServer = `https://family.adguard-dns.com/dns-query`
defaultParentalServer = `https://family.adguard-dns.com/dns-query`
sbTXTSuffix = `sb.dns.adguard.com.`
pcTXTSuffix = `pc.dns.adguard.com.`
)
// SetParentalUpstream sets the parental upstream for *DNSFilter.
//
// TODO(e.burkov): Remove this in v1 API to forbid the direct access.
func (d *DNSFilter) SetParentalUpstream(u upstream.Upstream) {
d.parentalUpstream = u
}
// SetSafeBrowsingUpstream sets the safe browsing upstream for *DNSFilter.
//
// TODO(e.burkov): Remove this in v1 API to forbid the direct access.
func (d *DNSFilter) SetSafeBrowsingUpstream(u upstream.Upstream) {
d.safeBrowsingUpstream = u
}
func (d *DNSFilter) initSecurityServices() error {
var err error
d.safeBrowsingServer = defaultSafebrowsingServer
d.parentalServer = defaultParentalServer
opts := &upstream.Options{
Timeout: dnsTimeout,
ServerIPAddrs: []net.IP{
{94, 140, 14, 15},
{94, 140, 15, 16},
net.ParseIP("2a10:50c0::bad1:ff"),
net.ParseIP("2a10:50c0::bad2:ff"),
},
}
parUps, err := upstream.AddressToUpstream(d.parentalServer, opts)
if err != nil {
return fmt.Errorf("converting parental server: %w", err)
}
d.SetParentalUpstream(parUps)
sbUps, err := upstream.AddressToUpstream(d.safeBrowsingServer, opts)
if err != nil {
return fmt.Errorf("converting safe browsing server: %w", err)
}
d.SetSafeBrowsingUpstream(sbUps)
return nil
}
/*
expire byte[4]
hash byte[32]
...
*/
func (c *sbCtx) setCache(prefix, hashes []byte) {
d := make([]byte, 4+len(hashes))
expire := uint(time.Now().Unix()) + c.cacheTime*60
binary.BigEndian.PutUint32(d[:4], uint32(expire))
copy(d[4:], hashes)
c.cache.Set(prefix, d)
log.Debug("%s: stored in cache: %v", c.svc, prefix)
}
// findInHash returns 32-byte hash if it's found in hashToHost.
func (c *sbCtx) findInHash(val []byte) (hash32 [32]byte, found bool) {
for i := 4; i < len(val); i += 32 {
hash := val[i : i+32]
copy(hash32[:], hash[0:32])
_, found = c.hashToHost[hash32]
if found {
return hash32, found
}
}
return [32]byte{}, false
}
func (c *sbCtx) getCached() int {
now := time.Now().Unix()
hashesToRequest := map[[32]byte]string{}
for k, v := range c.hashToHost {
// nolint:looppointer // The subsilce is used for a safe cache lookup.
val := c.cache.Get(k[0:2])
if val == nil || now >= int64(binary.BigEndian.Uint32(val)) {
hashesToRequest[k] = v
continue
}
if hash32, found := c.findInHash(val); found {
log.Debug("%s: found in cache: %s: blocked by %v", c.svc, c.host, hash32)
return 1
}
}
if len(hashesToRequest) == 0 {
log.Debug("%s: found in cache: %s: not blocked", c.svc, c.host)
return -1
}
c.hashToHost = hashesToRequest
return 0
}
type sbCtx struct {
host string
svc string
hashToHost map[[32]byte]string
cache cache.Cache
cacheTime uint
}
func hostnameToHashes(host string) map[[32]byte]string {
hashes := map[[32]byte]string{}
tld, icann := publicsuffix.PublicSuffix(host)
if !icann {
// private suffixes like cloudfront.net
tld = ""
}
curhost := host
nDots := 0
for i := len(curhost) - 1; i >= 0; i-- {
if curhost[i] == '.' {
nDots++
if nDots == 4 {
curhost = curhost[i+1:] // "xxx.a.b.c.d" -> "a.b.c.d"
break
}
}
}
for {
if curhost == "" {
// we've reached end of string
break
}
if tld != "" && curhost == tld {
// we've reached the TLD, don't hash it
break
}
sum := sha256.Sum256([]byte(curhost))
hashes[sum] = curhost
pos := strings.IndexByte(curhost, byte('.'))
if pos < 0 {
break
}
curhost = curhost[pos+1:]
}
return hashes
}
// convert hash array to string
func (c *sbCtx) getQuestion() string {
b := &strings.Builder{}
for hash := range c.hashToHost {
// nolint:looppointer // The subsilce is used for safe hex encoding.
stringutil.WriteToBuilder(b, hex.EncodeToString(hash[0:2]), ".")
}
if c.svc == "SafeBrowsing" {
stringutil.WriteToBuilder(b, sbTXTSuffix)
return b.String()
}
stringutil.WriteToBuilder(b, pcTXTSuffix)
return b.String()
}
// Find the target hash in TXT response
func (c *sbCtx) processTXT(resp *dns.Msg) (bool, [][]byte) {
matched := false
hashes := [][]byte{}
for _, a := range resp.Answer {
txt, ok := a.(*dns.TXT)
if !ok {
continue
}
log.Debug("%s: received hashes for %s: %v", c.svc, c.host, txt.Txt)
for _, t := range txt.Txt {
if len(t) != 32*2 {
continue
}
hash, err := hex.DecodeString(t)
if err != nil {
continue
}
hashes = append(hashes, hash)
if !matched {
var hash32 [32]byte
copy(hash32[:], hash)
var hashHost string
hashHost, ok = c.hashToHost[hash32]
if ok {
log.Debug("%s: matched %s by %s/%s", c.svc, c.host, hashHost, t)
matched = true
}
}
}
}
return matched, hashes
}
func (c *sbCtx) storeCache(hashes [][]byte) {
slices.SortFunc(hashes, func(a, b []byte) (sortsBefore bool) {
return bytes.Compare(a, b) == -1
})
var curData []byte
var prevPrefix []byte
for i, hash := range hashes {
// nolint:looppointer // The subsilce is used for a safe comparison.
if !bytes.Equal(hash[0:2], prevPrefix) {
if i != 0 {
c.setCache(prevPrefix, curData)
curData = nil
}
prevPrefix = hashes[i][0:2]
}
curData = append(curData, hash...)
}
if len(prevPrefix) != 0 {
c.setCache(prevPrefix, curData)
}
for hash := range c.hashToHost {
// nolint:looppointer // The subsilce is used for a safe cache lookup.
prefix := hash[0:2]
val := c.cache.Get(prefix)
if val == nil {
c.setCache(prefix, nil)
}
}
}
func check(c *sbCtx, r Result, u upstream.Upstream) (Result, error) {
c.hashToHost = hostnameToHashes(c.host)
switch c.getCached() {
case -1:
return Result{}, nil
case 1:
return r, nil
}
question := c.getQuestion()
log.Tracef("%s: checking %s: %s", c.svc, c.host, question)
req := (&dns.Msg{}).SetQuestion(question, dns.TypeTXT)
resp, err := u.Exchange(req)
if err != nil {
return Result{}, err
}
matched, receivedHashes := c.processTXT(resp)
c.storeCache(receivedHashes)
if matched {
return r, nil
}
return Result{}, nil
}
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/dnsforward/dnsforward_test.go |
<mask>
<mask> func TestBlockedBySafeBrowsing(t *testing.T) {
<mask> const hostname = "wmconvirus.narod.ru"
<mask>
<mask> sbUps := aghtest.NewBlockUpstream(hostname, true)
<mask> ans4, _ := (&aghtest.TestResolver{}).HostToIPs(hostname)
<mask>
<mask> filterConf := &filtering.Config{
<mask> SafeBrowsingEnabled: true,
<mask> }
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove const hostname = "wmconvirus.narod.ru"
</s> add const (
hostname = "wmconvirus.narod.ru"
cacheTime = 10 * time.Minute
cacheSize = 10000
)
sbChecker := hashprefix.New(&hashprefix.Config{
CacheTime: cacheTime,
CacheSize: cacheSize,
Upstream: aghtest.NewBlockUpstream(hostname, true),
}) </s> remove d, setts := newForTest(t, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(t, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) </s> remove ParentalEnabled: true,
SafeBrowsingEnabled: false,
</s> add ParentalEnabled: true,
SafeBrowsingEnabled: false,
SafeBrowsingChecker: newChecker(sbBlocked),
ParentalControlChecker: newChecker(pcBlocked), </s> remove d, setts := newForTest(b, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(b, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/dnsforward/dnsforward_test.go |
|
SafeBrowsingChecker: sbChecker, | <mask>
<mask> filterConf := &filtering.Config{
<mask> SafeBrowsingEnabled: true,
<mask> }
<mask> forwardConf := ServerConfig{
<mask> UDPListenAddrs: []*net.UDPAddr{{}},
<mask> TCPListenAddrs: []*net.TCPAddr{{}},
<mask> FilteringConfig: FilteringConfig{
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove sbUps := aghtest.NewBlockUpstream(hostname, true)
</s> add </s> remove const hostname = "wmconvirus.narod.ru"
</s> add const (
hostname = "wmconvirus.narod.ru"
cacheTime = 10 * time.Minute
cacheSize = 10000
)
sbChecker := hashprefix.New(&hashprefix.Config{
CacheTime: cacheTime,
CacheSize: cacheSize,
Upstream: aghtest.NewBlockUpstream(hostname, true),
}) </s> remove d, setts := newForTest(b, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(b, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) </s> remove d, setts := newForTest(t, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(t, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) </s> remove ParentalEnabled: true,
SafeBrowsingEnabled: false,
</s> add ParentalEnabled: true,
SafeBrowsingEnabled: false,
SafeBrowsingChecker: newChecker(sbBlocked),
ParentalControlChecker: newChecker(pcBlocked), | [
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/dnsforward/dnsforward_test.go |
<mask> },
<mask> },
<mask> }
<mask> s := createTestServer(t, filterConf, forwardConf, nil)
<mask> s.dnsFilter.SetSafeBrowsingUpstream(sbUps)
<mask> startDeferStop(t, s)
<mask> addr := s.dnsProxy.Addr(proxy.ProtoUDP)
<mask>
<mask> // SafeBrowsing blocking.
<mask> req := createTestMessage(hostname + ".")
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove d, setts := newForTest(b, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(b, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) </s> remove d, setts := newForTest(b, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(b, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) </s> remove d, setts := newForTest(t, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(t, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) </s> remove d, setts := newForTest(t, &Config{SafeBrowsingEnabled: true}, nil)
</s> add sbChecker := newChecker(sbBlocked)
d, setts := newForTest(t, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: sbChecker,
}, nil) </s> remove d, setts := newForTest(t, &Config{ParentalEnabled: true}, nil)
</s> add d, setts := newForTest(t, &Config{
ParentalEnabled: true,
ParentalControlChecker: newChecker(pcBlocked),
}, nil) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/dnsforward/dnsforward_test.go |
|
<mask> "sync/atomic"
<mask>
<mask> "github.com/AdguardTeam/AdGuardHome/internal/aghhttp"
<mask> "github.com/AdguardTeam/AdGuardHome/internal/aghnet"
<mask> "github.com/AdguardTeam/dnsproxy/upstream"
<mask> "github.com/AdguardTeam/golibs/cache"
<mask> "github.com/AdguardTeam/golibs/errors"
<mask> "github.com/AdguardTeam/golibs/log"
<mask> "github.com/AdguardTeam/golibs/mathutil"
<mask> "github.com/AdguardTeam/golibs/stringutil"
<mask> "github.com/AdguardTeam/urlfilter"
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove "github.com/AdguardTeam/dnsproxy/upstream"
"github.com/AdguardTeam/golibs/cache"
</s> add </s> remove "github.com/AdguardTeam/golibs/stringutil"
"github.com/miekg/dns"
"golang.org/x/exp/slices"
"golang.org/x/net/publicsuffix"
</s> add </s> remove "time"
</s> add </s> remove "github.com/AdguardTeam/golibs/cache"
</s> add "github.com/AdguardTeam/AdGuardHome/internal/filtering/hashprefix" | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering.go |
|
// SafeBrowsingChecker is the safe browsing hash-prefix checker.
SafeBrowsingChecker Checker `yaml:"-"`
// ParentControl is the parental control hash-prefix checker.
ParentalControlChecker Checker `yaml:"-"`
| <mask> // Config allows you to configure DNS filtering with New() or just change variables directly.
<mask> type Config struct {
<mask> // enabled is used to be returned within Settings.
<mask> //
<mask> // It is of type uint32 to be accessed by atomic.
<mask> //
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove refreshLock: &sync.Mutex{},
filterTitleRegexp: regexp.MustCompile(`^! Title: +(.*)$`),
</s> add refreshLock: &sync.Mutex{},
filterTitleRegexp: regexp.MustCompile(`^! Title: +(.*)$`),
safeBrowsingChecker: c.SafeBrowsingChecker,
parentalControlChecker: c.ParentalControlChecker, </s> remove parentalServer string // access via methods
safeBrowsingServer string // access via methods
parentalUpstream upstream.Upstream
safeBrowsingUpstream upstream.Upstream
safebrowsingCache cache.Cache
parentalCache cache.Cache
</s> add </s> remove safeSearch SafeSearch
</s> add | [
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering.go |
// Checker is used for safe browsing or parental control hash-prefix filtering.
type Checker interface {
// Check returns true if request for the host should be blocked.
Check(host string) (block bool, err error)
}
| <mask> check func(host string, qtype uint16, setts *Settings) (res Result, err error)
<mask> name string
<mask> }
<mask>
<mask> // DNSFilter matches hostnames and DNS requests against filtering rules.
<mask> type DNSFilter struct {
<mask> safeSearch SafeSearch
<mask>
<mask> // safeBrowsingChecker is the safe browsing hash-prefix checker.
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove safeSearch SafeSearch
</s> add </s> remove d.SetParentalUpstream(aghtest.NewBlockUpstream(pcBlocked, true))
d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add </s> remove // TODO(a.garipov): Make configurable.
const (
dnsTimeout = 3 * time.Second
defaultSafebrowsingServer = `https://family.adguard-dns.com/dns-query`
defaultParentalServer = `https://family.adguard-dns.com/dns-query`
sbTXTSuffix = `sb.dns.adguard.com.`
pcTXTSuffix = `pc.dns.adguard.com.`
)
// SetParentalUpstream sets the parental upstream for *DNSFilter.
//
// TODO(e.burkov): Remove this in v1 API to forbid the direct access.
func (d *DNSFilter) SetParentalUpstream(u upstream.Upstream) {
d.parentalUpstream = u
}
// SetSafeBrowsingUpstream sets the safe browsing upstream for *DNSFilter.
//
// TODO(e.burkov): Remove this in v1 API to forbid the direct access.
func (d *DNSFilter) SetSafeBrowsingUpstream(u upstream.Upstream) {
d.safeBrowsingUpstream = u
}
func (d *DNSFilter) initSecurityServices() error {
var err error
d.safeBrowsingServer = defaultSafebrowsingServer
d.parentalServer = defaultParentalServer
opts := &upstream.Options{
Timeout: dnsTimeout,
ServerIPAddrs: []net.IP{
{94, 140, 14, 15},
{94, 140, 15, 16},
net.ParseIP("2a10:50c0::bad1:ff"),
net.ParseIP("2a10:50c0::bad2:ff"),
},
}
parUps, err := upstream.AddressToUpstream(d.parentalServer, opts)
if err != nil {
return fmt.Errorf("converting parental server: %w", err)
}
d.SetParentalUpstream(parUps)
sbUps, err := upstream.AddressToUpstream(d.safeBrowsingServer, opts)
if err != nil {
return fmt.Errorf("converting safe browsing server: %w", err)
}
d.SetSafeBrowsingUpstream(sbUps)
return nil
}
/*
expire byte[4]
hash byte[32]
...
*/
func (c *sbCtx) setCache(prefix, hashes []byte) {
d := make([]byte, 4+len(hashes))
expire := uint(time.Now().Unix()) + c.cacheTime*60
binary.BigEndian.PutUint32(d[:4], uint32(expire))
copy(d[4:], hashes)
c.cache.Set(prefix, d)
log.Debug("%s: stored in cache: %v", c.svc, prefix)
}
// findInHash returns 32-byte hash if it's found in hashToHost.
func (c *sbCtx) findInHash(val []byte) (hash32 [32]byte, found bool) {
for i := 4; i < len(val); i += 32 {
hash := val[i : i+32]
copy(hash32[:], hash[0:32])
_, found = c.hashToHost[hash32]
if found {
return hash32, found
}
}
return [32]byte{}, false
}
func (c *sbCtx) getCached() int {
now := time.Now().Unix()
hashesToRequest := map[[32]byte]string{}
for k, v := range c.hashToHost {
// nolint:looppointer // The subsilce is used for a safe cache lookup.
val := c.cache.Get(k[0:2])
if val == nil || now >= int64(binary.BigEndian.Uint32(val)) {
hashesToRequest[k] = v
continue
}
if hash32, found := c.findInHash(val); found {
log.Debug("%s: found in cache: %s: blocked by %v", c.svc, c.host, hash32)
return 1
}
}
if len(hashesToRequest) == 0 {
log.Debug("%s: found in cache: %s: not blocked", c.svc, c.host)
return -1
}
c.hashToHost = hashesToRequest
return 0
}
type sbCtx struct {
host string
svc string
hashToHost map[[32]byte]string
cache cache.Cache
cacheTime uint
}
func hostnameToHashes(host string) map[[32]byte]string {
hashes := map[[32]byte]string{}
tld, icann := publicsuffix.PublicSuffix(host)
if !icann {
// private suffixes like cloudfront.net
tld = ""
}
curhost := host
nDots := 0
for i := len(curhost) - 1; i >= 0; i-- {
if curhost[i] == '.' {
nDots++
if nDots == 4 {
curhost = curhost[i+1:] // "xxx.a.b.c.d" -> "a.b.c.d"
break
}
}
}
for {
if curhost == "" {
// we've reached end of string
break
}
if tld != "" && curhost == tld {
// we've reached the TLD, don't hash it
break
}
sum := sha256.Sum256([]byte(curhost))
hashes[sum] = curhost
pos := strings.IndexByte(curhost, byte('.'))
if pos < 0 {
break
}
curhost = curhost[pos+1:]
}
return hashes
}
// convert hash array to string
func (c *sbCtx) getQuestion() string {
b := &strings.Builder{}
for hash := range c.hashToHost {
// nolint:looppointer // The subsilce is used for safe hex encoding.
stringutil.WriteToBuilder(b, hex.EncodeToString(hash[0:2]), ".")
}
if c.svc == "SafeBrowsing" {
stringutil.WriteToBuilder(b, sbTXTSuffix)
return b.String()
}
stringutil.WriteToBuilder(b, pcTXTSuffix)
return b.String()
}
// Find the target hash in TXT response
func (c *sbCtx) processTXT(resp *dns.Msg) (bool, [][]byte) {
matched := false
hashes := [][]byte{}
for _, a := range resp.Answer {
txt, ok := a.(*dns.TXT)
if !ok {
continue
}
log.Debug("%s: received hashes for %s: %v", c.svc, c.host, txt.Txt)
for _, t := range txt.Txt {
if len(t) != 32*2 {
continue
}
hash, err := hex.DecodeString(t)
if err != nil {
continue
}
hashes = append(hashes, hash)
if !matched {
var hash32 [32]byte
copy(hash32[:], hash)
var hashHost string
hashHost, ok = c.hashToHost[hash32]
if ok {
log.Debug("%s: matched %s by %s/%s", c.svc, c.host, hashHost, t)
matched = true
}
}
}
}
return matched, hashes
}
func (c *sbCtx) storeCache(hashes [][]byte) {
slices.SortFunc(hashes, func(a, b []byte) (sortsBefore bool) {
return bytes.Compare(a, b) == -1
})
var curData []byte
var prevPrefix []byte
for i, hash := range hashes {
// nolint:looppointer // The subsilce is used for a safe comparison.
if !bytes.Equal(hash[0:2], prevPrefix) {
if i != 0 {
c.setCache(prevPrefix, curData)
curData = nil
}
prevPrefix = hashes[i][0:2]
}
curData = append(curData, hash...)
}
if len(prevPrefix) != 0 {
c.setCache(prevPrefix, curData)
}
for hash := range c.hashToHost {
// nolint:looppointer // The subsilce is used for a safe cache lookup.
prefix := hash[0:2]
val := c.cache.Get(prefix)
if val == nil {
c.setCache(prefix, nil)
}
}
}
func check(c *sbCtx, r Result, u upstream.Upstream) (Result, error) {
c.hashToHost = hostnameToHashes(c.host)
switch c.getCached() {
case -1:
return Result{}, nil
case 1:
return r, nil
}
question := c.getQuestion()
log.Tracef("%s: checking %s: %s", c.svc, c.host, question)
req := (&dns.Msg{}).SetQuestion(question, dns.TypeTXT)
resp, err := u.Exchange(req)
if err != nil {
return Result{}, err
}
matched, receivedHashes := c.processTXT(resp)
c.storeCache(receivedHashes)
if matched {
return r, nil
}
return Result{}, nil
}
</s> add | [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering.go |
safeSearch SafeSearch
// safeBrowsingChecker is the safe browsing hash-prefix checker.
safeBrowsingChecker Checker
// parentalControl is the parental control hash-prefix checker.
parentalControlChecker Checker
| <mask> }
<mask>
<mask> // DNSFilter matches hostnames and DNS requests against filtering rules.
<mask> type DNSFilter struct {
<mask> rulesStorage *filterlist.RuleStorage
<mask> filteringEngine *urlfilter.DNSEngine
<mask>
<mask> rulesStorageAllow *filterlist.RuleStorage
<mask> filteringEngineAllow *urlfilter.DNSEngine
<mask>
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove parentalServer string // access via methods
safeBrowsingServer string // access via methods
parentalUpstream upstream.Upstream
safeBrowsingUpstream upstream.Upstream
safebrowsingCache cache.Cache
parentalCache cache.Cache
</s> add </s> remove safeSearch SafeSearch
</s> add </s> remove d.SetParentalUpstream(aghtest.NewBlockUpstream(pcBlocked, true))
d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add | [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering.go |
<mask> filteringEngineAllow *urlfilter.DNSEngine
<mask>
<mask> engineLock sync.RWMutex
<mask>
<mask> parentalServer string // access via methods
<mask> safeBrowsingServer string // access via methods
<mask> parentalUpstream upstream.Upstream
<mask> safeBrowsingUpstream upstream.Upstream
<mask>
<mask> safebrowsingCache cache.Cache
<mask> parentalCache cache.Cache
<mask>
<mask> Config // for direct access by library users, even a = assignment
<mask> // confLock protects Config.
<mask> confLock sync.RWMutex
<mask>
<mask> // Channel for passing data to filters-initializer goroutine
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove // TODO(a.garipov): Make configurable.
const (
dnsTimeout = 3 * time.Second
defaultSafebrowsingServer = `https://family.adguard-dns.com/dns-query`
defaultParentalServer = `https://family.adguard-dns.com/dns-query`
sbTXTSuffix = `sb.dns.adguard.com.`
pcTXTSuffix = `pc.dns.adguard.com.`
)
// SetParentalUpstream sets the parental upstream for *DNSFilter.
//
// TODO(e.burkov): Remove this in v1 API to forbid the direct access.
func (d *DNSFilter) SetParentalUpstream(u upstream.Upstream) {
d.parentalUpstream = u
}
// SetSafeBrowsingUpstream sets the safe browsing upstream for *DNSFilter.
//
// TODO(e.burkov): Remove this in v1 API to forbid the direct access.
func (d *DNSFilter) SetSafeBrowsingUpstream(u upstream.Upstream) {
d.safeBrowsingUpstream = u
}
func (d *DNSFilter) initSecurityServices() error {
var err error
d.safeBrowsingServer = defaultSafebrowsingServer
d.parentalServer = defaultParentalServer
opts := &upstream.Options{
Timeout: dnsTimeout,
ServerIPAddrs: []net.IP{
{94, 140, 14, 15},
{94, 140, 15, 16},
net.ParseIP("2a10:50c0::bad1:ff"),
net.ParseIP("2a10:50c0::bad2:ff"),
},
}
parUps, err := upstream.AddressToUpstream(d.parentalServer, opts)
if err != nil {
return fmt.Errorf("converting parental server: %w", err)
}
d.SetParentalUpstream(parUps)
sbUps, err := upstream.AddressToUpstream(d.safeBrowsingServer, opts)
if err != nil {
return fmt.Errorf("converting safe browsing server: %w", err)
}
d.SetSafeBrowsingUpstream(sbUps)
return nil
}
/*
expire byte[4]
hash byte[32]
...
*/
func (c *sbCtx) setCache(prefix, hashes []byte) {
d := make([]byte, 4+len(hashes))
expire := uint(time.Now().Unix()) + c.cacheTime*60
binary.BigEndian.PutUint32(d[:4], uint32(expire))
copy(d[4:], hashes)
c.cache.Set(prefix, d)
log.Debug("%s: stored in cache: %v", c.svc, prefix)
}
// findInHash returns 32-byte hash if it's found in hashToHost.
func (c *sbCtx) findInHash(val []byte) (hash32 [32]byte, found bool) {
for i := 4; i < len(val); i += 32 {
hash := val[i : i+32]
copy(hash32[:], hash[0:32])
_, found = c.hashToHost[hash32]
if found {
return hash32, found
}
}
return [32]byte{}, false
}
func (c *sbCtx) getCached() int {
now := time.Now().Unix()
hashesToRequest := map[[32]byte]string{}
for k, v := range c.hashToHost {
// nolint:looppointer // The subsilce is used for a safe cache lookup.
val := c.cache.Get(k[0:2])
if val == nil || now >= int64(binary.BigEndian.Uint32(val)) {
hashesToRequest[k] = v
continue
}
if hash32, found := c.findInHash(val); found {
log.Debug("%s: found in cache: %s: blocked by %v", c.svc, c.host, hash32)
return 1
}
}
if len(hashesToRequest) == 0 {
log.Debug("%s: found in cache: %s: not blocked", c.svc, c.host)
return -1
}
c.hashToHost = hashesToRequest
return 0
}
type sbCtx struct {
host string
svc string
hashToHost map[[32]byte]string
cache cache.Cache
cacheTime uint
}
func hostnameToHashes(host string) map[[32]byte]string {
hashes := map[[32]byte]string{}
tld, icann := publicsuffix.PublicSuffix(host)
if !icann {
// private suffixes like cloudfront.net
tld = ""
}
curhost := host
nDots := 0
for i := len(curhost) - 1; i >= 0; i-- {
if curhost[i] == '.' {
nDots++
if nDots == 4 {
curhost = curhost[i+1:] // "xxx.a.b.c.d" -> "a.b.c.d"
break
}
}
}
for {
if curhost == "" {
// we've reached end of string
break
}
if tld != "" && curhost == tld {
// we've reached the TLD, don't hash it
break
}
sum := sha256.Sum256([]byte(curhost))
hashes[sum] = curhost
pos := strings.IndexByte(curhost, byte('.'))
if pos < 0 {
break
}
curhost = curhost[pos+1:]
}
return hashes
}
// convert hash array to string
func (c *sbCtx) getQuestion() string {
b := &strings.Builder{}
for hash := range c.hashToHost {
// nolint:looppointer // The subsilce is used for safe hex encoding.
stringutil.WriteToBuilder(b, hex.EncodeToString(hash[0:2]), ".")
}
if c.svc == "SafeBrowsing" {
stringutil.WriteToBuilder(b, sbTXTSuffix)
return b.String()
}
stringutil.WriteToBuilder(b, pcTXTSuffix)
return b.String()
}
// Find the target hash in TXT response
func (c *sbCtx) processTXT(resp *dns.Msg) (bool, [][]byte) {
matched := false
hashes := [][]byte{}
for _, a := range resp.Answer {
txt, ok := a.(*dns.TXT)
if !ok {
continue
}
log.Debug("%s: received hashes for %s: %v", c.svc, c.host, txt.Txt)
for _, t := range txt.Txt {
if len(t) != 32*2 {
continue
}
hash, err := hex.DecodeString(t)
if err != nil {
continue
}
hashes = append(hashes, hash)
if !matched {
var hash32 [32]byte
copy(hash32[:], hash)
var hashHost string
hashHost, ok = c.hashToHost[hash32]
if ok {
log.Debug("%s: matched %s by %s/%s", c.svc, c.host, hashHost, t)
matched = true
}
}
}
}
return matched, hashes
}
func (c *sbCtx) storeCache(hashes [][]byte) {
slices.SortFunc(hashes, func(a, b []byte) (sortsBefore bool) {
return bytes.Compare(a, b) == -1
})
var curData []byte
var prevPrefix []byte
for i, hash := range hashes {
// nolint:looppointer // The subsilce is used for a safe comparison.
if !bytes.Equal(hash[0:2], prevPrefix) {
if i != 0 {
c.setCache(prevPrefix, curData)
curData = nil
}
prevPrefix = hashes[i][0:2]
}
curData = append(curData, hash...)
}
if len(prevPrefix) != 0 {
c.setCache(prevPrefix, curData)
}
for hash := range c.hashToHost {
// nolint:looppointer // The subsilce is used for a safe cache lookup.
prefix := hash[0:2]
val := c.cache.Get(prefix)
if val == nil {
c.setCache(prefix, nil)
}
}
}
func check(c *sbCtx, r Result, u upstream.Upstream) (Result, error) {
c.hashToHost = hostnameToHashes(c.host)
switch c.getCached() {
case -1:
return Result{}, nil
case 1:
return r, nil
}
question := c.getQuestion()
log.Tracef("%s: checking %s: %s", c.svc, c.host, question)
req := (&dns.Msg{}).SetQuestion(question, dns.TypeTXT)
resp, err := u.Exchange(req)
if err != nil {
return Result{}, err
}
matched, receivedHashes := c.processTXT(resp)
c.storeCache(receivedHashes)
if matched {
return r, nil
}
return Result{}, nil
}
</s> add </s> remove safeSearch SafeSearch
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering.go |
|
<mask> //
<mask> // TODO(e.burkov): Don't use regexp for such a simple text processing task.
<mask> filterTitleRegexp *regexp.Regexp
<mask>
<mask> safeSearch SafeSearch
<mask> hostCheckers []hostChecker
<mask> }
<mask>
<mask> // Filter represents a filter list
<mask> type Filter struct {
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove return check(sctx, res, d.parentalUpstream)
</s> add block, err := d.parentalControlChecker.Check(host)
if !block || err != nil {
return Result{}, err
}
return res, nil </s> remove refreshLock: &sync.Mutex{},
filterTitleRegexp: regexp.MustCompile(`^! Title: +(.*)$`),
</s> add refreshLock: &sync.Mutex{},
filterTitleRegexp: regexp.MustCompile(`^! Title: +(.*)$`),
safeBrowsingChecker: c.SafeBrowsingChecker,
parentalControlChecker: c.ParentalControlChecker, </s> remove // TODO(a.garipov): Make configurable.
const (
dnsTimeout = 3 * time.Second
defaultSafebrowsingServer = `https://family.adguard-dns.com/dns-query`
defaultParentalServer = `https://family.adguard-dns.com/dns-query`
sbTXTSuffix = `sb.dns.adguard.com.`
pcTXTSuffix = `pc.dns.adguard.com.`
)
// SetParentalUpstream sets the parental upstream for *DNSFilter.
//
// TODO(e.burkov): Remove this in v1 API to forbid the direct access.
func (d *DNSFilter) SetParentalUpstream(u upstream.Upstream) {
d.parentalUpstream = u
}
// SetSafeBrowsingUpstream sets the safe browsing upstream for *DNSFilter.
//
// TODO(e.burkov): Remove this in v1 API to forbid the direct access.
func (d *DNSFilter) SetSafeBrowsingUpstream(u upstream.Upstream) {
d.safeBrowsingUpstream = u
}
func (d *DNSFilter) initSecurityServices() error {
var err error
d.safeBrowsingServer = defaultSafebrowsingServer
d.parentalServer = defaultParentalServer
opts := &upstream.Options{
Timeout: dnsTimeout,
ServerIPAddrs: []net.IP{
{94, 140, 14, 15},
{94, 140, 15, 16},
net.ParseIP("2a10:50c0::bad1:ff"),
net.ParseIP("2a10:50c0::bad2:ff"),
},
}
parUps, err := upstream.AddressToUpstream(d.parentalServer, opts)
if err != nil {
return fmt.Errorf("converting parental server: %w", err)
}
d.SetParentalUpstream(parUps)
sbUps, err := upstream.AddressToUpstream(d.safeBrowsingServer, opts)
if err != nil {
return fmt.Errorf("converting safe browsing server: %w", err)
}
d.SetSafeBrowsingUpstream(sbUps)
return nil
}
/*
expire byte[4]
hash byte[32]
...
*/
func (c *sbCtx) setCache(prefix, hashes []byte) {
d := make([]byte, 4+len(hashes))
expire := uint(time.Now().Unix()) + c.cacheTime*60
binary.BigEndian.PutUint32(d[:4], uint32(expire))
copy(d[4:], hashes)
c.cache.Set(prefix, d)
log.Debug("%s: stored in cache: %v", c.svc, prefix)
}
// findInHash returns 32-byte hash if it's found in hashToHost.
func (c *sbCtx) findInHash(val []byte) (hash32 [32]byte, found bool) {
for i := 4; i < len(val); i += 32 {
hash := val[i : i+32]
copy(hash32[:], hash[0:32])
_, found = c.hashToHost[hash32]
if found {
return hash32, found
}
}
return [32]byte{}, false
}
func (c *sbCtx) getCached() int {
now := time.Now().Unix()
hashesToRequest := map[[32]byte]string{}
for k, v := range c.hashToHost {
// nolint:looppointer // The subsilce is used for a safe cache lookup.
val := c.cache.Get(k[0:2])
if val == nil || now >= int64(binary.BigEndian.Uint32(val)) {
hashesToRequest[k] = v
continue
}
if hash32, found := c.findInHash(val); found {
log.Debug("%s: found in cache: %s: blocked by %v", c.svc, c.host, hash32)
return 1
}
}
if len(hashesToRequest) == 0 {
log.Debug("%s: found in cache: %s: not blocked", c.svc, c.host)
return -1
}
c.hashToHost = hashesToRequest
return 0
}
type sbCtx struct {
host string
svc string
hashToHost map[[32]byte]string
cache cache.Cache
cacheTime uint
}
func hostnameToHashes(host string) map[[32]byte]string {
hashes := map[[32]byte]string{}
tld, icann := publicsuffix.PublicSuffix(host)
if !icann {
// private suffixes like cloudfront.net
tld = ""
}
curhost := host
nDots := 0
for i := len(curhost) - 1; i >= 0; i-- {
if curhost[i] == '.' {
nDots++
if nDots == 4 {
curhost = curhost[i+1:] // "xxx.a.b.c.d" -> "a.b.c.d"
break
}
}
}
for {
if curhost == "" {
// we've reached end of string
break
}
if tld != "" && curhost == tld {
// we've reached the TLD, don't hash it
break
}
sum := sha256.Sum256([]byte(curhost))
hashes[sum] = curhost
pos := strings.IndexByte(curhost, byte('.'))
if pos < 0 {
break
}
curhost = curhost[pos+1:]
}
return hashes
}
// convert hash array to string
func (c *sbCtx) getQuestion() string {
b := &strings.Builder{}
for hash := range c.hashToHost {
// nolint:looppointer // The subsilce is used for safe hex encoding.
stringutil.WriteToBuilder(b, hex.EncodeToString(hash[0:2]), ".")
}
if c.svc == "SafeBrowsing" {
stringutil.WriteToBuilder(b, sbTXTSuffix)
return b.String()
}
stringutil.WriteToBuilder(b, pcTXTSuffix)
return b.String()
}
// Find the target hash in TXT response
func (c *sbCtx) processTXT(resp *dns.Msg) (bool, [][]byte) {
matched := false
hashes := [][]byte{}
for _, a := range resp.Answer {
txt, ok := a.(*dns.TXT)
if !ok {
continue
}
log.Debug("%s: received hashes for %s: %v", c.svc, c.host, txt.Txt)
for _, t := range txt.Txt {
if len(t) != 32*2 {
continue
}
hash, err := hex.DecodeString(t)
if err != nil {
continue
}
hashes = append(hashes, hash)
if !matched {
var hash32 [32]byte
copy(hash32[:], hash)
var hashHost string
hashHost, ok = c.hashToHost[hash32]
if ok {
log.Debug("%s: matched %s by %s/%s", c.svc, c.host, hashHost, t)
matched = true
}
}
}
}
return matched, hashes
}
func (c *sbCtx) storeCache(hashes [][]byte) {
slices.SortFunc(hashes, func(a, b []byte) (sortsBefore bool) {
return bytes.Compare(a, b) == -1
})
var curData []byte
var prevPrefix []byte
for i, hash := range hashes {
// nolint:looppointer // The subsilce is used for a safe comparison.
if !bytes.Equal(hash[0:2], prevPrefix) {
if i != 0 {
c.setCache(prevPrefix, curData)
curData = nil
}
prevPrefix = hashes[i][0:2]
}
curData = append(curData, hash...)
}
if len(prevPrefix) != 0 {
c.setCache(prevPrefix, curData)
}
for hash := range c.hashToHost {
// nolint:looppointer // The subsilce is used for a safe cache lookup.
prefix := hash[0:2]
val := c.cache.Get(prefix)
if val == nil {
c.setCache(prefix, nil)
}
}
}
func check(c *sbCtx, r Result, u upstream.Upstream) (Result, error) {
c.hashToHost = hostnameToHashes(c.host)
switch c.getCached() {
case -1:
return Result{}, nil
case 1:
return r, nil
}
question := c.getQuestion()
log.Tracef("%s: checking %s: %s", c.svc, c.host, question)
req := (&dns.Msg{}).SetQuestion(question, dns.TypeTXT)
resp, err := u.Exchange(req)
if err != nil {
return Result{}, err
}
matched, receivedHashes := c.processTXT(resp)
c.storeCache(receivedHashes)
if matched {
return r, nil
}
return Result{}, nil
}
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering.go |
|
refreshLock: &sync.Mutex{},
filterTitleRegexp: regexp.MustCompile(`^! Title: +(.*)$`),
safeBrowsingChecker: c.SafeBrowsingChecker,
parentalControlChecker: c.ParentalControlChecker, | <mask> // New creates properly initialized DNS Filter that is ready to be used. c must
<mask> // be non-nil.
<mask> func New(c *Config, blockFilters []Filter) (d *DNSFilter, err error) {
<mask> d = &DNSFilter{
<mask> refreshLock: &sync.Mutex{},
<mask> filterTitleRegexp: regexp.MustCompile(`^! Title: +(.*)$`),
<mask> }
<mask>
<mask> d.safebrowsingCache = cache.New(cache.Config{
<mask> EnableLRU: true,
<mask> MaxSize: c.SafeBrowsingCacheSize,
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove d.safebrowsingCache = cache.New(cache.Config{
EnableLRU: true,
MaxSize: c.SafeBrowsingCacheSize,
})
d.parentalCache = cache.New(cache.Config{
EnableLRU: true,
MaxSize: c.ParentalCacheSize,
})
</s> add </s> remove func purgeCaches(d *DNSFilter) {
for _, c := range []cache.Cache{
d.safebrowsingCache,
d.parentalCache,
} {
if c != nil {
c.Clear()
}
}
}
</s> add </s> remove // TODO(a.garipov): Make configurable.
const (
dnsTimeout = 3 * time.Second
defaultSafebrowsingServer = `https://family.adguard-dns.com/dns-query`
defaultParentalServer = `https://family.adguard-dns.com/dns-query`
sbTXTSuffix = `sb.dns.adguard.com.`
pcTXTSuffix = `pc.dns.adguard.com.`
)
// SetParentalUpstream sets the parental upstream for *DNSFilter.
//
// TODO(e.burkov): Remove this in v1 API to forbid the direct access.
func (d *DNSFilter) SetParentalUpstream(u upstream.Upstream) {
d.parentalUpstream = u
}
// SetSafeBrowsingUpstream sets the safe browsing upstream for *DNSFilter.
//
// TODO(e.burkov): Remove this in v1 API to forbid the direct access.
func (d *DNSFilter) SetSafeBrowsingUpstream(u upstream.Upstream) {
d.safeBrowsingUpstream = u
}
func (d *DNSFilter) initSecurityServices() error {
var err error
d.safeBrowsingServer = defaultSafebrowsingServer
d.parentalServer = defaultParentalServer
opts := &upstream.Options{
Timeout: dnsTimeout,
ServerIPAddrs: []net.IP{
{94, 140, 14, 15},
{94, 140, 15, 16},
net.ParseIP("2a10:50c0::bad1:ff"),
net.ParseIP("2a10:50c0::bad2:ff"),
},
}
parUps, err := upstream.AddressToUpstream(d.parentalServer, opts)
if err != nil {
return fmt.Errorf("converting parental server: %w", err)
}
d.SetParentalUpstream(parUps)
sbUps, err := upstream.AddressToUpstream(d.safeBrowsingServer, opts)
if err != nil {
return fmt.Errorf("converting safe browsing server: %w", err)
}
d.SetSafeBrowsingUpstream(sbUps)
return nil
}
/*
expire byte[4]
hash byte[32]
...
*/
func (c *sbCtx) setCache(prefix, hashes []byte) {
d := make([]byte, 4+len(hashes))
expire := uint(time.Now().Unix()) + c.cacheTime*60
binary.BigEndian.PutUint32(d[:4], uint32(expire))
copy(d[4:], hashes)
c.cache.Set(prefix, d)
log.Debug("%s: stored in cache: %v", c.svc, prefix)
}
// findInHash returns 32-byte hash if it's found in hashToHost.
func (c *sbCtx) findInHash(val []byte) (hash32 [32]byte, found bool) {
for i := 4; i < len(val); i += 32 {
hash := val[i : i+32]
copy(hash32[:], hash[0:32])
_, found = c.hashToHost[hash32]
if found {
return hash32, found
}
}
return [32]byte{}, false
}
func (c *sbCtx) getCached() int {
now := time.Now().Unix()
hashesToRequest := map[[32]byte]string{}
for k, v := range c.hashToHost {
// nolint:looppointer // The subsilce is used for a safe cache lookup.
val := c.cache.Get(k[0:2])
if val == nil || now >= int64(binary.BigEndian.Uint32(val)) {
hashesToRequest[k] = v
continue
}
if hash32, found := c.findInHash(val); found {
log.Debug("%s: found in cache: %s: blocked by %v", c.svc, c.host, hash32)
return 1
}
}
if len(hashesToRequest) == 0 {
log.Debug("%s: found in cache: %s: not blocked", c.svc, c.host)
return -1
}
c.hashToHost = hashesToRequest
return 0
}
type sbCtx struct {
host string
svc string
hashToHost map[[32]byte]string
cache cache.Cache
cacheTime uint
}
func hostnameToHashes(host string) map[[32]byte]string {
hashes := map[[32]byte]string{}
tld, icann := publicsuffix.PublicSuffix(host)
if !icann {
// private suffixes like cloudfront.net
tld = ""
}
curhost := host
nDots := 0
for i := len(curhost) - 1; i >= 0; i-- {
if curhost[i] == '.' {
nDots++
if nDots == 4 {
curhost = curhost[i+1:] // "xxx.a.b.c.d" -> "a.b.c.d"
break
}
}
}
for {
if curhost == "" {
// we've reached end of string
break
}
if tld != "" && curhost == tld {
// we've reached the TLD, don't hash it
break
}
sum := sha256.Sum256([]byte(curhost))
hashes[sum] = curhost
pos := strings.IndexByte(curhost, byte('.'))
if pos < 0 {
break
}
curhost = curhost[pos+1:]
}
return hashes
}
// convert hash array to string
func (c *sbCtx) getQuestion() string {
b := &strings.Builder{}
for hash := range c.hashToHost {
// nolint:looppointer // The subsilce is used for safe hex encoding.
stringutil.WriteToBuilder(b, hex.EncodeToString(hash[0:2]), ".")
}
if c.svc == "SafeBrowsing" {
stringutil.WriteToBuilder(b, sbTXTSuffix)
return b.String()
}
stringutil.WriteToBuilder(b, pcTXTSuffix)
return b.String()
}
// Find the target hash in TXT response
func (c *sbCtx) processTXT(resp *dns.Msg) (bool, [][]byte) {
matched := false
hashes := [][]byte{}
for _, a := range resp.Answer {
txt, ok := a.(*dns.TXT)
if !ok {
continue
}
log.Debug("%s: received hashes for %s: %v", c.svc, c.host, txt.Txt)
for _, t := range txt.Txt {
if len(t) != 32*2 {
continue
}
hash, err := hex.DecodeString(t)
if err != nil {
continue
}
hashes = append(hashes, hash)
if !matched {
var hash32 [32]byte
copy(hash32[:], hash)
var hashHost string
hashHost, ok = c.hashToHost[hash32]
if ok {
log.Debug("%s: matched %s by %s/%s", c.svc, c.host, hashHost, t)
matched = true
}
}
}
}
return matched, hashes
}
func (c *sbCtx) storeCache(hashes [][]byte) {
slices.SortFunc(hashes, func(a, b []byte) (sortsBefore bool) {
return bytes.Compare(a, b) == -1
})
var curData []byte
var prevPrefix []byte
for i, hash := range hashes {
// nolint:looppointer // The subsilce is used for a safe comparison.
if !bytes.Equal(hash[0:2], prevPrefix) {
if i != 0 {
c.setCache(prevPrefix, curData)
curData = nil
}
prevPrefix = hashes[i][0:2]
}
curData = append(curData, hash...)
}
if len(prevPrefix) != 0 {
c.setCache(prevPrefix, curData)
}
for hash := range c.hashToHost {
// nolint:looppointer // The subsilce is used for a safe cache lookup.
prefix := hash[0:2]
val := c.cache.Get(prefix)
if val == nil {
c.setCache(prefix, nil)
}
}
}
func check(c *sbCtx, r Result, u upstream.Upstream) (Result, error) {
c.hashToHost = hostnameToHashes(c.host)
switch c.getCached() {
case -1:
return Result{}, nil
case 1:
return r, nil
}
question := c.getQuestion()
log.Tracef("%s: checking %s: %s", c.svc, c.host, question)
req := (&dns.Msg{}).SetQuestion(question, dns.TypeTXT)
resp, err := u.Exchange(req)
if err != nil {
return Result{}, err
}
matched, receivedHashes := c.processTXT(resp)
c.storeCache(receivedHashes)
if matched {
return r, nil
}
return Result{}, nil
}
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering.go |
<mask> refreshLock: &sync.Mutex{},
<mask> filterTitleRegexp: regexp.MustCompile(`^! Title: +(.*)$`),
<mask> }
<mask>
<mask> d.safebrowsingCache = cache.New(cache.Config{
<mask> EnableLRU: true,
<mask> MaxSize: c.SafeBrowsingCacheSize,
<mask> })
<mask> d.parentalCache = cache.New(cache.Config{
<mask> EnableLRU: true,
<mask> MaxSize: c.ParentalCacheSize,
<mask> })
<mask>
<mask> d.safeSearch = c.SafeSearch
<mask>
<mask> d.hostCheckers = []hostChecker{{
<mask> check: d.matchSysHosts,
<mask> name: "hosts container",
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove refreshLock: &sync.Mutex{},
filterTitleRegexp: regexp.MustCompile(`^! Title: +(.*)$`),
</s> add refreshLock: &sync.Mutex{},
filterTitleRegexp: regexp.MustCompile(`^! Title: +(.*)$`),
safeBrowsingChecker: c.SafeBrowsingChecker,
parentalControlChecker: c.ParentalControlChecker, </s> remove const hostname = "wmconvirus.narod.ru"
</s> add const (
hostname = "wmconvirus.narod.ru"
cacheTime = 10 * time.Minute
cacheSize = 10000
)
sbChecker := hashprefix.New(&hashprefix.Config{
CacheTime: cacheTime,
CacheSize: cacheSize,
Upstream: aghtest.NewBlockUpstream(hostname, true),
}) </s> remove sbUps := aghtest.NewBlockUpstream(hostname, true)
</s> add </s> remove func purgeCaches(d *DNSFilter) {
for _, c := range []cache.Cache{
d.safebrowsingCache,
d.parentalCache,
} {
if c != nil {
c.Clear()
}
}
}
</s> add </s> remove err = d.initSecurityServices()
if err != nil {
return nil, fmt.Errorf("initializing services: %s", err)
}
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering.go |
|
<mask> }}
<mask>
<mask> defer func() { err = errors.Annotate(err, "filtering: %w") }()
<mask>
<mask> err = d.initSecurityServices()
<mask> if err != nil {
<mask> return nil, fmt.Errorf("initializing services: %s", err)
<mask> }
<mask>
<mask> d.Config = *c
<mask> d.filtersMu = &sync.RWMutex{}
<mask>
<mask> err = d.prepareRewrites()
<mask> if err != nil {
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove return check(sctx, res, d.safeBrowsingUpstream)
</s> add block, err := d.safeBrowsingChecker.Check(host)
if !block || err != nil {
return Result{}, err
}
return res, nil </s> remove // TODO(a.garipov): Make configurable.
const (
dnsTimeout = 3 * time.Second
defaultSafebrowsingServer = `https://family.adguard-dns.com/dns-query`
defaultParentalServer = `https://family.adguard-dns.com/dns-query`
sbTXTSuffix = `sb.dns.adguard.com.`
pcTXTSuffix = `pc.dns.adguard.com.`
)
// SetParentalUpstream sets the parental upstream for *DNSFilter.
//
// TODO(e.burkov): Remove this in v1 API to forbid the direct access.
func (d *DNSFilter) SetParentalUpstream(u upstream.Upstream) {
d.parentalUpstream = u
}
// SetSafeBrowsingUpstream sets the safe browsing upstream for *DNSFilter.
//
// TODO(e.burkov): Remove this in v1 API to forbid the direct access.
func (d *DNSFilter) SetSafeBrowsingUpstream(u upstream.Upstream) {
d.safeBrowsingUpstream = u
}
func (d *DNSFilter) initSecurityServices() error {
var err error
d.safeBrowsingServer = defaultSafebrowsingServer
d.parentalServer = defaultParentalServer
opts := &upstream.Options{
Timeout: dnsTimeout,
ServerIPAddrs: []net.IP{
{94, 140, 14, 15},
{94, 140, 15, 16},
net.ParseIP("2a10:50c0::bad1:ff"),
net.ParseIP("2a10:50c0::bad2:ff"),
},
}
parUps, err := upstream.AddressToUpstream(d.parentalServer, opts)
if err != nil {
return fmt.Errorf("converting parental server: %w", err)
}
d.SetParentalUpstream(parUps)
sbUps, err := upstream.AddressToUpstream(d.safeBrowsingServer, opts)
if err != nil {
return fmt.Errorf("converting safe browsing server: %w", err)
}
d.SetSafeBrowsingUpstream(sbUps)
return nil
}
/*
expire byte[4]
hash byte[32]
...
*/
func (c *sbCtx) setCache(prefix, hashes []byte) {
d := make([]byte, 4+len(hashes))
expire := uint(time.Now().Unix()) + c.cacheTime*60
binary.BigEndian.PutUint32(d[:4], uint32(expire))
copy(d[4:], hashes)
c.cache.Set(prefix, d)
log.Debug("%s: stored in cache: %v", c.svc, prefix)
}
// findInHash returns 32-byte hash if it's found in hashToHost.
func (c *sbCtx) findInHash(val []byte) (hash32 [32]byte, found bool) {
for i := 4; i < len(val); i += 32 {
hash := val[i : i+32]
copy(hash32[:], hash[0:32])
_, found = c.hashToHost[hash32]
if found {
return hash32, found
}
}
return [32]byte{}, false
}
func (c *sbCtx) getCached() int {
now := time.Now().Unix()
hashesToRequest := map[[32]byte]string{}
for k, v := range c.hashToHost {
// nolint:looppointer // The subsilce is used for a safe cache lookup.
val := c.cache.Get(k[0:2])
if val == nil || now >= int64(binary.BigEndian.Uint32(val)) {
hashesToRequest[k] = v
continue
}
if hash32, found := c.findInHash(val); found {
log.Debug("%s: found in cache: %s: blocked by %v", c.svc, c.host, hash32)
return 1
}
}
if len(hashesToRequest) == 0 {
log.Debug("%s: found in cache: %s: not blocked", c.svc, c.host)
return -1
}
c.hashToHost = hashesToRequest
return 0
}
type sbCtx struct {
host string
svc string
hashToHost map[[32]byte]string
cache cache.Cache
cacheTime uint
}
func hostnameToHashes(host string) map[[32]byte]string {
hashes := map[[32]byte]string{}
tld, icann := publicsuffix.PublicSuffix(host)
if !icann {
// private suffixes like cloudfront.net
tld = ""
}
curhost := host
nDots := 0
for i := len(curhost) - 1; i >= 0; i-- {
if curhost[i] == '.' {
nDots++
if nDots == 4 {
curhost = curhost[i+1:] // "xxx.a.b.c.d" -> "a.b.c.d"
break
}
}
}
for {
if curhost == "" {
// we've reached end of string
break
}
if tld != "" && curhost == tld {
// we've reached the TLD, don't hash it
break
}
sum := sha256.Sum256([]byte(curhost))
hashes[sum] = curhost
pos := strings.IndexByte(curhost, byte('.'))
if pos < 0 {
break
}
curhost = curhost[pos+1:]
}
return hashes
}
// convert hash array to string
func (c *sbCtx) getQuestion() string {
b := &strings.Builder{}
for hash := range c.hashToHost {
// nolint:looppointer // The subsilce is used for safe hex encoding.
stringutil.WriteToBuilder(b, hex.EncodeToString(hash[0:2]), ".")
}
if c.svc == "SafeBrowsing" {
stringutil.WriteToBuilder(b, sbTXTSuffix)
return b.String()
}
stringutil.WriteToBuilder(b, pcTXTSuffix)
return b.String()
}
// Find the target hash in TXT response
func (c *sbCtx) processTXT(resp *dns.Msg) (bool, [][]byte) {
matched := false
hashes := [][]byte{}
for _, a := range resp.Answer {
txt, ok := a.(*dns.TXT)
if !ok {
continue
}
log.Debug("%s: received hashes for %s: %v", c.svc, c.host, txt.Txt)
for _, t := range txt.Txt {
if len(t) != 32*2 {
continue
}
hash, err := hex.DecodeString(t)
if err != nil {
continue
}
hashes = append(hashes, hash)
if !matched {
var hash32 [32]byte
copy(hash32[:], hash)
var hashHost string
hashHost, ok = c.hashToHost[hash32]
if ok {
log.Debug("%s: matched %s by %s/%s", c.svc, c.host, hashHost, t)
matched = true
}
}
}
}
return matched, hashes
}
func (c *sbCtx) storeCache(hashes [][]byte) {
slices.SortFunc(hashes, func(a, b []byte) (sortsBefore bool) {
return bytes.Compare(a, b) == -1
})
var curData []byte
var prevPrefix []byte
for i, hash := range hashes {
// nolint:looppointer // The subsilce is used for a safe comparison.
if !bytes.Equal(hash[0:2], prevPrefix) {
if i != 0 {
c.setCache(prevPrefix, curData)
curData = nil
}
prevPrefix = hashes[i][0:2]
}
curData = append(curData, hash...)
}
if len(prevPrefix) != 0 {
c.setCache(prevPrefix, curData)
}
for hash := range c.hashToHost {
// nolint:looppointer // The subsilce is used for a safe cache lookup.
prefix := hash[0:2]
val := c.cache.Get(prefix)
if val == nil {
c.setCache(prefix, nil)
}
}
}
func check(c *sbCtx, r Result, u upstream.Upstream) (Result, error) {
c.hashToHost = hostnameToHashes(c.host)
switch c.getCached() {
case -1:
return Result{}, nil
case 1:
return r, nil
}
question := c.getQuestion()
log.Tracef("%s: checking %s: %s", c.svc, c.host, question)
req := (&dns.Msg{}).SetQuestion(question, dns.TypeTXT)
resp, err := u.Exchange(req)
if err != nil {
return Result{}, err
}
matched, receivedHashes := c.processTXT(resp)
c.storeCache(receivedHashes)
if matched {
return r, nil
}
return Result{}, nil
}
</s> add </s> remove return check(sctx, res, d.parentalUpstream)
</s> add block, err := d.parentalControlChecker.Check(host)
if !block || err != nil {
return Result{}, err
}
return res, nil </s> remove func purgeCaches(d *DNSFilter) {
for _, c := range []cache.Cache{
d.safebrowsingCache,
d.parentalCache,
} {
if c != nil {
c.Clear()
}
}
}
</s> add </s> remove purgeCaches(f)
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering.go |
|
"github.com/AdguardTeam/AdGuardHome/internal/filtering/hashprefix" | <mask> "net"
<mask> "testing"
<mask>
<mask> "github.com/AdguardTeam/AdGuardHome/internal/aghtest"
<mask> "github.com/AdguardTeam/golibs/cache"
<mask> "github.com/AdguardTeam/golibs/log"
<mask> "github.com/AdguardTeam/golibs/testutil"
<mask> "github.com/AdguardTeam/urlfilter/rules"
<mask> "github.com/miekg/dns"
<mask> "github.com/stretchr/testify/assert"
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove "github.com/AdguardTeam/dnsproxy/upstream"
"github.com/AdguardTeam/golibs/cache"
</s> add </s> remove "time"
</s> add </s> remove "github.com/AdguardTeam/golibs/stringutil"
"github.com/miekg/dns"
"golang.org/x/exp/slices"
"golang.org/x/net/publicsuffix"
</s> add </s> remove "github.com/AdguardTeam/dnsproxy/upstream"
"github.com/AdguardTeam/golibs/cache"
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering_test.go |
<mask> )
<mask>
<mask> // Helpers.
<mask>
<mask> func purgeCaches(d *DNSFilter) {
<mask> for _, c := range []cache.Cache{
<mask> d.safebrowsingCache,
<mask> d.parentalCache,
<mask> } {
<mask> if c != nil {
<mask> c.Clear()
<mask> }
<mask> }
<mask> }
<mask>
<mask> func newForTest(t testing.TB, c *Config, filters []Filter) (f *DNSFilter, setts *Settings) {
<mask> setts = &Settings{
<mask> ProtectionEnabled: true,
<mask> FilteringEnabled: true,
<mask> }
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove refreshLock: &sync.Mutex{},
filterTitleRegexp: regexp.MustCompile(`^! Title: +(.*)$`),
</s> add refreshLock: &sync.Mutex{},
filterTitleRegexp: regexp.MustCompile(`^! Title: +(.*)$`),
safeBrowsingChecker: c.SafeBrowsingChecker,
parentalControlChecker: c.ParentalControlChecker, </s> remove // TODO(a.garipov): Make configurable.
const (
dnsTimeout = 3 * time.Second
defaultSafebrowsingServer = `https://family.adguard-dns.com/dns-query`
defaultParentalServer = `https://family.adguard-dns.com/dns-query`
sbTXTSuffix = `sb.dns.adguard.com.`
pcTXTSuffix = `pc.dns.adguard.com.`
)
// SetParentalUpstream sets the parental upstream for *DNSFilter.
//
// TODO(e.burkov): Remove this in v1 API to forbid the direct access.
func (d *DNSFilter) SetParentalUpstream(u upstream.Upstream) {
d.parentalUpstream = u
}
// SetSafeBrowsingUpstream sets the safe browsing upstream for *DNSFilter.
//
// TODO(e.burkov): Remove this in v1 API to forbid the direct access.
func (d *DNSFilter) SetSafeBrowsingUpstream(u upstream.Upstream) {
d.safeBrowsingUpstream = u
}
func (d *DNSFilter) initSecurityServices() error {
var err error
d.safeBrowsingServer = defaultSafebrowsingServer
d.parentalServer = defaultParentalServer
opts := &upstream.Options{
Timeout: dnsTimeout,
ServerIPAddrs: []net.IP{
{94, 140, 14, 15},
{94, 140, 15, 16},
net.ParseIP("2a10:50c0::bad1:ff"),
net.ParseIP("2a10:50c0::bad2:ff"),
},
}
parUps, err := upstream.AddressToUpstream(d.parentalServer, opts)
if err != nil {
return fmt.Errorf("converting parental server: %w", err)
}
d.SetParentalUpstream(parUps)
sbUps, err := upstream.AddressToUpstream(d.safeBrowsingServer, opts)
if err != nil {
return fmt.Errorf("converting safe browsing server: %w", err)
}
d.SetSafeBrowsingUpstream(sbUps)
return nil
}
/*
expire byte[4]
hash byte[32]
...
*/
func (c *sbCtx) setCache(prefix, hashes []byte) {
d := make([]byte, 4+len(hashes))
expire := uint(time.Now().Unix()) + c.cacheTime*60
binary.BigEndian.PutUint32(d[:4], uint32(expire))
copy(d[4:], hashes)
c.cache.Set(prefix, d)
log.Debug("%s: stored in cache: %v", c.svc, prefix)
}
// findInHash returns 32-byte hash if it's found in hashToHost.
func (c *sbCtx) findInHash(val []byte) (hash32 [32]byte, found bool) {
for i := 4; i < len(val); i += 32 {
hash := val[i : i+32]
copy(hash32[:], hash[0:32])
_, found = c.hashToHost[hash32]
if found {
return hash32, found
}
}
return [32]byte{}, false
}
func (c *sbCtx) getCached() int {
now := time.Now().Unix()
hashesToRequest := map[[32]byte]string{}
for k, v := range c.hashToHost {
// nolint:looppointer // The subsilce is used for a safe cache lookup.
val := c.cache.Get(k[0:2])
if val == nil || now >= int64(binary.BigEndian.Uint32(val)) {
hashesToRequest[k] = v
continue
}
if hash32, found := c.findInHash(val); found {
log.Debug("%s: found in cache: %s: blocked by %v", c.svc, c.host, hash32)
return 1
}
}
if len(hashesToRequest) == 0 {
log.Debug("%s: found in cache: %s: not blocked", c.svc, c.host)
return -1
}
c.hashToHost = hashesToRequest
return 0
}
type sbCtx struct {
host string
svc string
hashToHost map[[32]byte]string
cache cache.Cache
cacheTime uint
}
func hostnameToHashes(host string) map[[32]byte]string {
hashes := map[[32]byte]string{}
tld, icann := publicsuffix.PublicSuffix(host)
if !icann {
// private suffixes like cloudfront.net
tld = ""
}
curhost := host
nDots := 0
for i := len(curhost) - 1; i >= 0; i-- {
if curhost[i] == '.' {
nDots++
if nDots == 4 {
curhost = curhost[i+1:] // "xxx.a.b.c.d" -> "a.b.c.d"
break
}
}
}
for {
if curhost == "" {
// we've reached end of string
break
}
if tld != "" && curhost == tld {
// we've reached the TLD, don't hash it
break
}
sum := sha256.Sum256([]byte(curhost))
hashes[sum] = curhost
pos := strings.IndexByte(curhost, byte('.'))
if pos < 0 {
break
}
curhost = curhost[pos+1:]
}
return hashes
}
// convert hash array to string
func (c *sbCtx) getQuestion() string {
b := &strings.Builder{}
for hash := range c.hashToHost {
// nolint:looppointer // The subsilce is used for safe hex encoding.
stringutil.WriteToBuilder(b, hex.EncodeToString(hash[0:2]), ".")
}
if c.svc == "SafeBrowsing" {
stringutil.WriteToBuilder(b, sbTXTSuffix)
return b.String()
}
stringutil.WriteToBuilder(b, pcTXTSuffix)
return b.String()
}
// Find the target hash in TXT response
func (c *sbCtx) processTXT(resp *dns.Msg) (bool, [][]byte) {
matched := false
hashes := [][]byte{}
for _, a := range resp.Answer {
txt, ok := a.(*dns.TXT)
if !ok {
continue
}
log.Debug("%s: received hashes for %s: %v", c.svc, c.host, txt.Txt)
for _, t := range txt.Txt {
if len(t) != 32*2 {
continue
}
hash, err := hex.DecodeString(t)
if err != nil {
continue
}
hashes = append(hashes, hash)
if !matched {
var hash32 [32]byte
copy(hash32[:], hash)
var hashHost string
hashHost, ok = c.hashToHost[hash32]
if ok {
log.Debug("%s: matched %s by %s/%s", c.svc, c.host, hashHost, t)
matched = true
}
}
}
}
return matched, hashes
}
func (c *sbCtx) storeCache(hashes [][]byte) {
slices.SortFunc(hashes, func(a, b []byte) (sortsBefore bool) {
return bytes.Compare(a, b) == -1
})
var curData []byte
var prevPrefix []byte
for i, hash := range hashes {
// nolint:looppointer // The subsilce is used for a safe comparison.
if !bytes.Equal(hash[0:2], prevPrefix) {
if i != 0 {
c.setCache(prevPrefix, curData)
curData = nil
}
prevPrefix = hashes[i][0:2]
}
curData = append(curData, hash...)
}
if len(prevPrefix) != 0 {
c.setCache(prevPrefix, curData)
}
for hash := range c.hashToHost {
// nolint:looppointer // The subsilce is used for a safe cache lookup.
prefix := hash[0:2]
val := c.cache.Get(prefix)
if val == nil {
c.setCache(prefix, nil)
}
}
}
func check(c *sbCtx, r Result, u upstream.Upstream) (Result, error) {
c.hashToHost = hostnameToHashes(c.host)
switch c.getCached() {
case -1:
return Result{}, nil
case 1:
return r, nil
}
question := c.getQuestion()
log.Tracef("%s: checking %s: %s", c.svc, c.host, question)
req := (&dns.Msg{}).SetQuestion(question, dns.TypeTXT)
resp, err := u.Exchange(req)
if err != nil {
return Result{}, err
}
matched, receivedHashes := c.processTXT(resp)
c.storeCache(receivedHashes)
if matched {
return r, nil
}
return Result{}, nil
}
</s> add </s> remove return check(sctx, res, d.safeBrowsingUpstream)
</s> add block, err := d.safeBrowsingChecker.Check(host)
if !block || err != nil {
return Result{}, err
}
return res, nil </s> remove purgeCaches(f)
</s> add </s> remove err = d.initSecurityServices()
if err != nil {
return nil, fmt.Errorf("initializing services: %s", err)
}
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering_test.go |
|
<mask> }
<mask> f, err := New(c, filters)
<mask> require.NoError(t, err)
<mask>
<mask> purgeCaches(f)
<mask>
<mask> return f, setts
<mask> }
<mask>
<mask> func (d *DNSFilter) checkMatch(t *testing.T, hostname string, setts *Settings) {
<mask> t.Helper()
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove return check(sctx, res, d.safeBrowsingUpstream)
</s> add block, err := d.safeBrowsingChecker.Check(host)
if !block || err != nil {
return Result{}, err
}
return res, nil </s> remove func purgeCaches(d *DNSFilter) {
for _, c := range []cache.Cache{
d.safebrowsingCache,
d.parentalCache,
} {
if c != nil {
c.Clear()
}
}
}
</s> add </s> remove // TODO(a.garipov): Make configurable.
const (
dnsTimeout = 3 * time.Second
defaultSafebrowsingServer = `https://family.adguard-dns.com/dns-query`
defaultParentalServer = `https://family.adguard-dns.com/dns-query`
sbTXTSuffix = `sb.dns.adguard.com.`
pcTXTSuffix = `pc.dns.adguard.com.`
)
// SetParentalUpstream sets the parental upstream for *DNSFilter.
//
// TODO(e.burkov): Remove this in v1 API to forbid the direct access.
func (d *DNSFilter) SetParentalUpstream(u upstream.Upstream) {
d.parentalUpstream = u
}
// SetSafeBrowsingUpstream sets the safe browsing upstream for *DNSFilter.
//
// TODO(e.burkov): Remove this in v1 API to forbid the direct access.
func (d *DNSFilter) SetSafeBrowsingUpstream(u upstream.Upstream) {
d.safeBrowsingUpstream = u
}
func (d *DNSFilter) initSecurityServices() error {
var err error
d.safeBrowsingServer = defaultSafebrowsingServer
d.parentalServer = defaultParentalServer
opts := &upstream.Options{
Timeout: dnsTimeout,
ServerIPAddrs: []net.IP{
{94, 140, 14, 15},
{94, 140, 15, 16},
net.ParseIP("2a10:50c0::bad1:ff"),
net.ParseIP("2a10:50c0::bad2:ff"),
},
}
parUps, err := upstream.AddressToUpstream(d.parentalServer, opts)
if err != nil {
return fmt.Errorf("converting parental server: %w", err)
}
d.SetParentalUpstream(parUps)
sbUps, err := upstream.AddressToUpstream(d.safeBrowsingServer, opts)
if err != nil {
return fmt.Errorf("converting safe browsing server: %w", err)
}
d.SetSafeBrowsingUpstream(sbUps)
return nil
}
/*
expire byte[4]
hash byte[32]
...
*/
func (c *sbCtx) setCache(prefix, hashes []byte) {
d := make([]byte, 4+len(hashes))
expire := uint(time.Now().Unix()) + c.cacheTime*60
binary.BigEndian.PutUint32(d[:4], uint32(expire))
copy(d[4:], hashes)
c.cache.Set(prefix, d)
log.Debug("%s: stored in cache: %v", c.svc, prefix)
}
// findInHash returns 32-byte hash if it's found in hashToHost.
func (c *sbCtx) findInHash(val []byte) (hash32 [32]byte, found bool) {
for i := 4; i < len(val); i += 32 {
hash := val[i : i+32]
copy(hash32[:], hash[0:32])
_, found = c.hashToHost[hash32]
if found {
return hash32, found
}
}
return [32]byte{}, false
}
func (c *sbCtx) getCached() int {
now := time.Now().Unix()
hashesToRequest := map[[32]byte]string{}
for k, v := range c.hashToHost {
// nolint:looppointer // The subsilce is used for a safe cache lookup.
val := c.cache.Get(k[0:2])
if val == nil || now >= int64(binary.BigEndian.Uint32(val)) {
hashesToRequest[k] = v
continue
}
if hash32, found := c.findInHash(val); found {
log.Debug("%s: found in cache: %s: blocked by %v", c.svc, c.host, hash32)
return 1
}
}
if len(hashesToRequest) == 0 {
log.Debug("%s: found in cache: %s: not blocked", c.svc, c.host)
return -1
}
c.hashToHost = hashesToRequest
return 0
}
type sbCtx struct {
host string
svc string
hashToHost map[[32]byte]string
cache cache.Cache
cacheTime uint
}
func hostnameToHashes(host string) map[[32]byte]string {
hashes := map[[32]byte]string{}
tld, icann := publicsuffix.PublicSuffix(host)
if !icann {
// private suffixes like cloudfront.net
tld = ""
}
curhost := host
nDots := 0
for i := len(curhost) - 1; i >= 0; i-- {
if curhost[i] == '.' {
nDots++
if nDots == 4 {
curhost = curhost[i+1:] // "xxx.a.b.c.d" -> "a.b.c.d"
break
}
}
}
for {
if curhost == "" {
// we've reached end of string
break
}
if tld != "" && curhost == tld {
// we've reached the TLD, don't hash it
break
}
sum := sha256.Sum256([]byte(curhost))
hashes[sum] = curhost
pos := strings.IndexByte(curhost, byte('.'))
if pos < 0 {
break
}
curhost = curhost[pos+1:]
}
return hashes
}
// convert hash array to string
func (c *sbCtx) getQuestion() string {
b := &strings.Builder{}
for hash := range c.hashToHost {
// nolint:looppointer // The subsilce is used for safe hex encoding.
stringutil.WriteToBuilder(b, hex.EncodeToString(hash[0:2]), ".")
}
if c.svc == "SafeBrowsing" {
stringutil.WriteToBuilder(b, sbTXTSuffix)
return b.String()
}
stringutil.WriteToBuilder(b, pcTXTSuffix)
return b.String()
}
// Find the target hash in TXT response
func (c *sbCtx) processTXT(resp *dns.Msg) (bool, [][]byte) {
matched := false
hashes := [][]byte{}
for _, a := range resp.Answer {
txt, ok := a.(*dns.TXT)
if !ok {
continue
}
log.Debug("%s: received hashes for %s: %v", c.svc, c.host, txt.Txt)
for _, t := range txt.Txt {
if len(t) != 32*2 {
continue
}
hash, err := hex.DecodeString(t)
if err != nil {
continue
}
hashes = append(hashes, hash)
if !matched {
var hash32 [32]byte
copy(hash32[:], hash)
var hashHost string
hashHost, ok = c.hashToHost[hash32]
if ok {
log.Debug("%s: matched %s by %s/%s", c.svc, c.host, hashHost, t)
matched = true
}
}
}
}
return matched, hashes
}
func (c *sbCtx) storeCache(hashes [][]byte) {
slices.SortFunc(hashes, func(a, b []byte) (sortsBefore bool) {
return bytes.Compare(a, b) == -1
})
var curData []byte
var prevPrefix []byte
for i, hash := range hashes {
// nolint:looppointer // The subsilce is used for a safe comparison.
if !bytes.Equal(hash[0:2], prevPrefix) {
if i != 0 {
c.setCache(prevPrefix, curData)
curData = nil
}
prevPrefix = hashes[i][0:2]
}
curData = append(curData, hash...)
}
if len(prevPrefix) != 0 {
c.setCache(prevPrefix, curData)
}
for hash := range c.hashToHost {
// nolint:looppointer // The subsilce is used for a safe cache lookup.
prefix := hash[0:2]
val := c.cache.Get(prefix)
if val == nil {
c.setCache(prefix, nil)
}
}
}
func check(c *sbCtx, r Result, u upstream.Upstream) (Result, error) {
c.hashToHost = hostnameToHashes(c.host)
switch c.getCached() {
case -1:
return Result{}, nil
case 1:
return r, nil
}
question := c.getQuestion()
log.Tracef("%s: checking %s: %s", c.svc, c.host, question)
req := (&dns.Msg{}).SetQuestion(question, dns.TypeTXT)
resp, err := u.Exchange(req)
if err != nil {
return Result{}, err
}
matched, receivedHashes := c.processTXT(resp)
c.storeCache(receivedHashes)
if matched {
return r, nil
}
return Result{}, nil
}
</s> add </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering_test.go |
|
func newChecker(host string) Checker {
return hashprefix.New(&hashprefix.Config{
CacheTime: 10,
CacheSize: 100000,
Upstream: aghtest.NewBlockUpstream(host, true),
})
}
| <mask> }
<mask>
<mask> func (d *DNSFilter) checkMatch(t *testing.T, hostname string, setts *Settings) {
<mask> t.Helper()
<mask>
<mask> res, err := d.CheckHost(hostname, dns.TypeA, setts)
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove purgeCaches(f)
</s> add </s> remove return check(sctx, res, d.safeBrowsingUpstream)
</s> add block, err := d.safeBrowsingChecker.Check(host)
if !block || err != nil {
return Result{}, err
}
return res, nil </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add </s> remove func purgeCaches(d *DNSFilter) {
for _, c := range []cache.Cache{
d.safebrowsingCache,
d.parentalCache,
} {
if c != nil {
c.Clear()
}
}
}
</s> add | [
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering_test.go |
sbChecker := newChecker(sbBlocked)
d, setts := newForTest(t, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: sbChecker,
}, nil) | <mask> logOutput := &bytes.Buffer{}
<mask> aghtest.ReplaceLogWriter(t, logOutput)
<mask> aghtest.ReplaceLogLevel(t, log.DEBUG)
<mask>
<mask> d, setts := newForTest(t, &Config{SafeBrowsingEnabled: true}, nil)
<mask> t.Cleanup(d.Close)
<mask>
<mask> d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
<mask> d.checkMatch(t, sbBlocked, setts)
<mask>
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove d, setts := newForTest(t, &Config{ParentalEnabled: true}, nil)
</s> add d, setts := newForTest(t, &Config{
ParentalEnabled: true,
ParentalControlChecker: newChecker(pcBlocked),
}, nil) </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add </s> remove d.safeBrowsingServer = defaultSafebrowsingServer
</s> add </s> remove d.SetParentalUpstream(aghtest.NewBlockUpstream(pcBlocked, true))
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering_test.go |
<mask>
<mask> d, setts := newForTest(t, &Config{SafeBrowsingEnabled: true}, nil)
<mask> t.Cleanup(d.Close)
<mask>
<mask> d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
<mask> d.checkMatch(t, sbBlocked, setts)
<mask>
<mask> require.Contains(t, logOutput.String(), fmt.Sprintf("safebrowsing lookup for %q", sbBlocked))
<mask>
<mask> d.checkMatch(t, "test."+sbBlocked, setts)
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove d.SetParentalUpstream(aghtest.NewBlockUpstream(pcBlocked, true))
</s> add </s> remove d, setts := newForTest(t, &Config{ParentalEnabled: true}, nil)
</s> add d, setts := newForTest(t, &Config{
ParentalEnabled: true,
ParentalControlChecker: newChecker(pcBlocked),
}, nil) </s> remove d, setts := newForTest(t, &Config{SafeBrowsingEnabled: true}, nil)
</s> add sbChecker := newChecker(sbBlocked)
d, setts := newForTest(t, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: sbChecker,
}, nil) </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add </s> remove d.safeBrowsingServer = defaultSafebrowsingServer
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering_test.go |
|
<mask> d.checkMatchEmpty(t, "yandex.ru", setts)
<mask> d.checkMatchEmpty(t, pcBlocked, setts)
<mask>
<mask> // Cached result.
<mask> d.safeBrowsingServer = "127.0.0.1"
<mask> d.checkMatch(t, sbBlocked, setts)
<mask> d.checkMatchEmpty(t, pcBlocked, setts)
<mask> d.safeBrowsingServer = defaultSafebrowsingServer
<mask> }
<mask>
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove d.safeBrowsingServer = defaultSafebrowsingServer
</s> add </s> remove d.parentalServer = "127.0.0.1"
</s> add </s> remove d.SetParentalUpstream(aghtest.NewBlockUpstream(pcBlocked, true))
</s> add </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add </s> remove d, setts := newForTest(t, &Config{ParentalEnabled: true}, nil)
</s> add d, setts := newForTest(t, &Config{
ParentalEnabled: true,
ParentalControlChecker: newChecker(pcBlocked),
}, nil) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering_test.go |
|
<mask> // Cached result.
<mask> d.safeBrowsingServer = "127.0.0.1"
<mask> d.checkMatch(t, sbBlocked, setts)
<mask> d.checkMatchEmpty(t, pcBlocked, setts)
<mask> d.safeBrowsingServer = defaultSafebrowsingServer
<mask> }
<mask>
<mask> func TestParallelSB(t *testing.T) {
<mask> d, setts := newForTest(t, &Config{SafeBrowsingEnabled: true}, nil)
<mask> t.Cleanup(d.Close)
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove d.safeBrowsingServer = "127.0.0.1"
</s> add </s> remove d, setts := newForTest(t, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(t, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add </s> remove d.parentalServer = "127.0.0.1"
</s> add </s> remove d.SetParentalUpstream(aghtest.NewBlockUpstream(pcBlocked, true))
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering_test.go |
|
d, setts := newForTest(t, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) | <mask> d.safeBrowsingServer = defaultSafebrowsingServer
<mask> }
<mask>
<mask> func TestParallelSB(t *testing.T) {
<mask> d, setts := newForTest(t, &Config{SafeBrowsingEnabled: true}, nil)
<mask> t.Cleanup(d.Close)
<mask>
<mask> d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
<mask>
<mask> t.Run("group", func(t *testing.T) {
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add </s> remove d.safeBrowsingServer = defaultSafebrowsingServer
</s> add </s> remove d, setts := newForTest(b, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(b, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) </s> remove d, setts := newForTest(t, &Config{SafeBrowsingEnabled: true}, nil)
</s> add sbChecker := newChecker(sbBlocked)
d, setts := newForTest(t, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: sbChecker,
}, nil) </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering_test.go |
<mask> func TestParallelSB(t *testing.T) {
<mask> d, setts := newForTest(t, &Config{SafeBrowsingEnabled: true}, nil)
<mask> t.Cleanup(d.Close)
<mask>
<mask> d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
<mask>
<mask> t.Run("group", func(t *testing.T) {
<mask> for i := 0; i < 100; i++ {
<mask> t.Run(fmt.Sprintf("aaa%d", i), func(t *testing.T) {
<mask> t.Parallel()
<mask> d.checkMatch(t, sbBlocked, setts)
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove d, setts := newForTest(t, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(t, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) </s> remove d.safeBrowsingServer = defaultSafebrowsingServer
</s> add </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add </s> remove d, setts := newForTest(b, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(b, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering_test.go |
|
d, setts := newForTest(t, &Config{
ParentalEnabled: true,
ParentalControlChecker: newChecker(pcBlocked),
}, nil) | <mask> logOutput := &bytes.Buffer{}
<mask> aghtest.ReplaceLogWriter(t, logOutput)
<mask> aghtest.ReplaceLogLevel(t, log.DEBUG)
<mask>
<mask> d, setts := newForTest(t, &Config{ParentalEnabled: true}, nil)
<mask> t.Cleanup(d.Close)
<mask>
<mask> d.SetParentalUpstream(aghtest.NewBlockUpstream(pcBlocked, true))
<mask> d.checkMatch(t, pcBlocked, setts)
<mask> require.Contains(t, logOutput.String(), fmt.Sprintf("parental lookup for %q", pcBlocked))
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove d.SetParentalUpstream(aghtest.NewBlockUpstream(pcBlocked, true))
</s> add </s> remove d, setts := newForTest(t, &Config{SafeBrowsingEnabled: true}, nil)
</s> add sbChecker := newChecker(sbBlocked)
d, setts := newForTest(t, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: sbChecker,
}, nil) </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add </s> remove d.safeBrowsingServer = defaultSafebrowsingServer
</s> add </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering_test.go |
<mask>
<mask> d, setts := newForTest(t, &Config{ParentalEnabled: true}, nil)
<mask> t.Cleanup(d.Close)
<mask>
<mask> d.SetParentalUpstream(aghtest.NewBlockUpstream(pcBlocked, true))
<mask> d.checkMatch(t, pcBlocked, setts)
<mask> require.Contains(t, logOutput.String(), fmt.Sprintf("parental lookup for %q", pcBlocked))
<mask>
<mask> d.checkMatch(t, "www."+pcBlocked, setts)
<mask> d.checkMatchEmpty(t, "www.yandex.ru", setts)
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove d, setts := newForTest(t, &Config{ParentalEnabled: true}, nil)
</s> add d, setts := newForTest(t, &Config{
ParentalEnabled: true,
ParentalControlChecker: newChecker(pcBlocked),
}, nil) </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add </s> remove d.safeBrowsingServer = defaultSafebrowsingServer
</s> add </s> remove d, setts := newForTest(t, &Config{SafeBrowsingEnabled: true}, nil)
</s> add sbChecker := newChecker(sbBlocked)
d, setts := newForTest(t, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: sbChecker,
}, nil) </s> remove d.safeBrowsingServer = "127.0.0.1"
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering_test.go |
|
<mask> d.checkMatchEmpty(t, "yandex.ru", setts)
<mask> d.checkMatchEmpty(t, "api.jquery.com", setts)
<mask>
<mask> // Test cached result.
<mask> d.parentalServer = "127.0.0.1"
<mask> d.checkMatch(t, pcBlocked, setts)
<mask> d.checkMatchEmpty(t, "yandex.ru", setts)
<mask> }
<mask>
<mask> // Filtering.
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove d.safeBrowsingServer = "127.0.0.1"
</s> add </s> remove d.safeBrowsingServer = defaultSafebrowsingServer
</s> add </s> remove d.SetParentalUpstream(aghtest.NewBlockUpstream(pcBlocked, true))
</s> add </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add </s> remove d, setts := newForTest(t, &Config{ParentalEnabled: true}, nil)
</s> add d, setts := newForTest(t, &Config{
ParentalEnabled: true,
ParentalControlChecker: newChecker(pcBlocked),
}, nil) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering_test.go |
|
ParentalEnabled: true,
SafeBrowsingEnabled: false,
SafeBrowsingChecker: newChecker(sbBlocked),
ParentalControlChecker: newChecker(pcBlocked), | <mask>
<mask> func TestClientSettings(t *testing.T) {
<mask> d, setts := newForTest(t,
<mask> &Config{
<mask> ParentalEnabled: true,
<mask> SafeBrowsingEnabled: false,
<mask> },
<mask> []Filter{{
<mask> ID: 0, Data: []byte("||example.org^\n"),
<mask> }},
<mask> )
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove d, setts := newForTest(t, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(t, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) </s> remove d, setts := newForTest(t, &Config{ParentalEnabled: true}, nil)
</s> add d, setts := newForTest(t, &Config{
ParentalEnabled: true,
ParentalControlChecker: newChecker(pcBlocked),
}, nil) </s> remove d, setts := newForTest(t, &Config{SafeBrowsingEnabled: true}, nil)
</s> add sbChecker := newChecker(sbBlocked)
d, setts := newForTest(t, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: sbChecker,
}, nil) </s> remove d, setts := newForTest(b, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(b, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) </s> remove d, setts := newForTest(b, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(b, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering_test.go |
<mask> }},
<mask> )
<mask> t.Cleanup(d.Close)
<mask>
<mask> d.SetParentalUpstream(aghtest.NewBlockUpstream(pcBlocked, true))
<mask> d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
<mask>
<mask> type testCase struct {
<mask> name string
<mask> host string
<mask> before bool
<mask> wantReason Reason
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove d.SetParentalUpstream(aghtest.NewBlockUpstream(pcBlocked, true))
</s> add </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add </s> remove d, setts := newForTest(t, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(t, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) </s> remove d, setts := newForTest(t, &Config{ParentalEnabled: true}, nil)
</s> add d, setts := newForTest(t, &Config{
ParentalEnabled: true,
ParentalControlChecker: newChecker(pcBlocked),
}, nil) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering_test.go |
|
d, setts := newForTest(b, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) | <mask>
<mask> // Benchmarks.
<mask>
<mask> func BenchmarkSafeBrowsing(b *testing.B) {
<mask> d, setts := newForTest(b, &Config{SafeBrowsingEnabled: true}, nil)
<mask> b.Cleanup(d.Close)
<mask>
<mask> d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
<mask>
<mask> for n := 0; n < b.N; n++ {
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add </s> remove d, setts := newForTest(b, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(b, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add </s> remove d, setts := newForTest(t, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(t, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering_test.go |
<mask> func BenchmarkSafeBrowsing(b *testing.B) {
<mask> d, setts := newForTest(b, &Config{SafeBrowsingEnabled: true}, nil)
<mask> b.Cleanup(d.Close)
<mask>
<mask> d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
<mask>
<mask> for n := 0; n < b.N; n++ {
<mask> res, err := d.CheckHost(sbBlocked, dns.TypeA, setts)
<mask> require.NoError(b, err)
<mask>
<mask> assert.Truef(b, res.IsFiltered, "expected hostname %q to match", sbBlocked)
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove d, setts := newForTest(b, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(b, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add </s> remove d, setts := newForTest(b, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(b, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering_test.go |
|
d, setts := newForTest(b, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) | <mask> }
<mask> }
<mask>
<mask> func BenchmarkSafeBrowsingParallel(b *testing.B) {
<mask> d, setts := newForTest(b, &Config{SafeBrowsingEnabled: true}, nil)
<mask> b.Cleanup(d.Close)
<mask>
<mask> d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
<mask>
<mask> b.RunParallel(func(pb *testing.PB) {
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add </s> remove d, setts := newForTest(b, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(b, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add </s> remove d, setts := newForTest(t, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(t, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering_test.go |
<mask> func BenchmarkSafeBrowsingParallel(b *testing.B) {
<mask> d, setts := newForTest(b, &Config{SafeBrowsingEnabled: true}, nil)
<mask> b.Cleanup(d.Close)
<mask>
<mask> d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
<mask>
<mask> b.RunParallel(func(pb *testing.PB) {
<mask> for pb.Next() {
<mask> res, err := d.CheckHost(sbBlocked, dns.TypeA, setts)
<mask> require.NoError(b, err)
<mask>
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add </s> remove d, setts := newForTest(b, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(b, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) </s> remove d, setts := newForTest(b, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(b, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) </s> remove d.SetSafeBrowsingUpstream(aghtest.NewBlockUpstream(sbBlocked, true))
</s> add </s> remove d, setts := newForTest(t, &Config{SafeBrowsingEnabled: true}, nil)
</s> add d, setts := newForTest(t, &Config{
SafeBrowsingEnabled: true,
SafeBrowsingChecker: newChecker(sbBlocked),
}, nil) | [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/filtering_test.go |
|
<mask> package filtering
<mask>
<mask> import (
<mask> "bytes"
<mask> "crypto/sha256"
<mask> "encoding/binary"
<mask> "encoding/hex"
<mask> "fmt"
<mask> "net"
<mask> "net/http"
<mask> "strings"
<mask> "sync"
<mask> "time"
<mask>
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove "strings"
</s> add </s> remove "time"
</s> add </s> remove "github.com/AdguardTeam/dnsproxy/upstream"
"github.com/AdguardTeam/golibs/cache"
</s> add </s> remove "github.com/AdguardTeam/golibs/cache"
</s> add "github.com/AdguardTeam/AdGuardHome/internal/filtering/hashprefix" | [
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/safebrowsing.go |
|
<mask> "encoding/hex"
<mask> "fmt"
<mask> "net"
<mask> "net/http"
<mask> "strings"
<mask> "sync"
<mask> "time"
<mask>
<mask> "github.com/AdguardTeam/AdGuardHome/internal/aghhttp"
<mask> "github.com/AdguardTeam/dnsproxy/upstream"
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove "bytes"
"crypto/sha256"
"encoding/binary"
"encoding/hex"
"fmt"
"net"
</s> add </s> remove "time"
</s> add </s> remove "github.com/AdguardTeam/dnsproxy/upstream"
"github.com/AdguardTeam/golibs/cache"
</s> add </s> remove "github.com/AdguardTeam/dnsproxy/upstream"
"github.com/AdguardTeam/golibs/cache"
</s> add </s> remove "github.com/AdguardTeam/golibs/stringutil"
"github.com/miekg/dns"
"golang.org/x/exp/slices"
"golang.org/x/net/publicsuffix"
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/safebrowsing.go |
|
<mask> "net"
<mask> "net/http"
<mask> "strings"
<mask> "sync"
<mask> "time"
<mask>
<mask> "github.com/AdguardTeam/AdGuardHome/internal/aghhttp"
<mask> "github.com/AdguardTeam/dnsproxy/upstream"
<mask> "github.com/AdguardTeam/golibs/cache"
<mask> "github.com/AdguardTeam/golibs/log"
</s> Pull request 1837: AG-21462-imp-safebrowsing-parental
Merge in DNS/adguard-home from AG-21462-imp-safebrowsing-parental to master
Squashed commit of the following:
commit 85016d4f1105e21a407efade0bd45b8362808061
Merge: 0e61edade 620b51e3e
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 16:36:30 2023 +0300
Merge branch 'master' into AG-21462-imp-safebrowsing-parental
commit 0e61edadeff34f6305e941c1db94575c82f238d9
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 14:51:37 2023 +0300
filtering: imp tests
commit 994255514cc0f67dfe33d5a0892432e8924d1e36
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:13:19 2023 +0300
filtering: fix typo
commit 96d1069573171538333330d6af94ef0f4208a9c4
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 27 11:00:18 2023 +0300
filtering: imp code more
commit c2a5620b04c4a529eea69983f1520cd2bc82ea9b
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 19:13:26 2023 +0300
all: add todo
commit e5dcc2e9701f8bccfde6ef8c01a4a2e7eb31599e
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 26 14:36:08 2023 +0300
all: imp code more
commit b6e734ccbeda82669023f6578481260b7c1f7161
Author: Stanislav Chzhen <[email protected]>
Date: Tue Apr 25 15:01:56 2023 +0300
filtering: imp code
commit 530648dadf836c1a4bd9917e0d3b47256fa8ff52
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 20:06:36 2023 +0300
all: imp code
commit 49fa6e587052a40bb431fea457701ee860493527
Author: Stanislav Chzhen <[email protected]>
Date: Mon Apr 24 14:57:19 2023 +0300
all: rm safe browsing ctx
commit bbcb66cb03e18fa875e3c33cf16295892739e507
Author: Stanislav Chzhen <[email protected]>
Date: Fri Apr 21 17:54:18 2023 +0300
filtering: add cache item
commit cb7c9fffe8c4ff5e7a21ca912c223c799f61385f
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 18:43:02 2023 +0300
filtering: fix hashes
commit 153fec46270212af03f3631bfb42c5d680c4e142
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 16:15:15 2023 +0300
filtering: add test cases
commit 09372f92bbb1fc082f1b1283594ee589100209c5
Author: Stanislav Chzhen <[email protected]>
Date: Thu Apr 20 15:38:05 2023 +0300
filtering: imp code
commit 466bc26d524ea6d1c3efb33692a7785d39e491ca
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 18:38:40 2023 +0300
filtering: add tests
commit 24365ecf8c60512fdac65833ee603c80864ae018
Author: Stanislav Chzhen <[email protected]>
Date: Wed Apr 19 11:38:57 2023 +0300
filtering: add hashprefix </s> remove "strings"
</s> add </s> remove "github.com/AdguardTeam/dnsproxy/upstream"
"github.com/AdguardTeam/golibs/cache"
</s> add </s> remove "bytes"
"crypto/sha256"
"encoding/binary"
"encoding/hex"
"fmt"
"net"
</s> add </s> remove "github.com/AdguardTeam/dnsproxy/upstream"
"github.com/AdguardTeam/golibs/cache"
</s> add </s> remove "github.com/AdguardTeam/golibs/stringutil"
"github.com/miekg/dns"
"golang.org/x/exp/slices"
"golang.org/x/net/publicsuffix"
</s> add | [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
] | https://github.com/AdguardTeam/AdGuardHome/commit/381f2f651d1a94d29bf534185e531b05a755f16c | internal/filtering/safebrowsing.go |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.