aboutsummaryrefslogtreecommitdiff
path: root/main.go
blob: 5bb387d206a3e213fd99ab72c6c1df32aa861451 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
package main

import (
	"bufio"
	"fmt"
	"log/slog"
	"net/http"
	"os"
	"strings"
)

func main() {
	if len(os.Args) != 3 {
		slog.Error("usage: program <url-list> <output-file>")
		os.Exit(1)
	}

	urls, err := fetchURLList(os.Args[1])
	if err != nil {
		slog.Error("failed to fetch URL list", "error", err)
		os.Exit(1)
	}

	f, err := os.Create(os.Args[2])
	if err != nil {
		slog.Error("failed to create output file", "error", err)
		os.Exit(1)
	}
	defer f.Close()

	w := bufio.NewWriter(f)
	defer w.Flush()

	domains := make(map[string]struct{})
	for i, url := range urls {
		slog.Info("fetching domains", "url", url, "progress", fmt.Sprintf("%d/%d", i+1, len(urls)))
		if err := fetchDomainsAndWrite(url, w, domains); err != nil {
			slog.Warn("failed to process url", "url", url, "error", err)
			continue
		}
	}
	slog.Info("completed", "total_domains", len(domains))
}

func fetchURLList(url string) ([]string, error) {
	resp, err := http.Get(url)
	if err != nil {
		return nil, fmt.Errorf("http get failed: %w", err)
	}
	defer resp.Body.Close()

	var urls []string
	scanner := bufio.NewScanner(resp.Body)
	for scanner.Scan() {
		line := strings.TrimSpace(scanner.Text())
		if line == "" || strings.HasPrefix(line, "#") {
			continue
		}
		urls = append(urls, line)
	}
	return urls, scanner.Err()
}

func fetchDomainsAndWrite(url string, w *bufio.Writer, seen map[string]struct{}) error {
	resp, err := http.Get(url)
	if err != nil {
		return fmt.Errorf("http get failed: %w", err)
	}
	defer resp.Body.Close()

	var count int
	scanner := bufio.NewScanner(resp.Body)
	for scanner.Scan() {
		line := strings.TrimSpace(scanner.Text())
		if line == "" || strings.HasPrefix(line, "#") {
			continue
		}

		var domain string
		// Handle "0.0.0.0 domain.com" format
		if strings.Contains(line, " ") {
			parts := strings.Fields(line)
			if len(parts) >= 2 {
				domain = parts[1]
			}
		} else {
			domain = line
		}

		// Basic domain validation and normalization
		domain = strings.ToLower(strings.TrimSpace(domain))
		if domain == "" || !strings.Contains(domain, ".") || strings.HasPrefix(domain, ".") || strings.HasSuffix(domain, ".") {
			continue
		}

		// Skip if we've seen this domain before
		if _, exists := seen[domain]; exists {
			continue
		}
		seen[domain] = struct{}{}
		count++

		if _, err := fmt.Fprintf(w, "local-zone: %q refuse\n", domain); err != nil {
			return fmt.Errorf("failed to write domain: %w", err)
		}
	}

	if err := scanner.Err(); err != nil {
		return fmt.Errorf("scanner error: %w", err)
	}

	slog.Info("processed url", "url", url, "new_domains", count)
	return nil
}