package main
import (
"fmt"
"io"
"net/http"
"os"
"regexp"
"strconv"
"strings"
)
func HttpGet(url string) (result string, err error) {
client := &http.Client{}
req, err1 := http.NewRequest("GET", url, nil)
if err1 != nil {
err = err1
return
}
req.Header.Add("User-Agent", "test2")
resp, err2 := client.Do(req)
if err2 != nil {
err = err2
return
}
defer resp.Body.Close()
buf := make([]byte, 4096)
for {
n, err3 := resp.Body.Read(buf)
if n == 0 {
break
}
if err3 != nil && err3 != io.EOF {
err = err3
return
}
result += string(buf[:n])
}
return
}
func SpideJokePage(url string) (title, content string, err error) {
result, err1 := HttpGet(url)
if err1 != nil {
err = err1
return
}
titleExp := regexp.MustCompile(`<h1>(.*?)</h1>`)
// 有两处,只取第一个
titles := titleExp.FindAllStringSubmatch(result, 1)
for _, value := range titles {
title = value[1]
title = strings.Replace(title, "\t", "", -1)
break
}
contentExp := regexp.MustCompile(`<div class="content-txt pt10">(?s:(.*?))<a id="prev"`)
contents := contentExp.FindAllStringSubmatch(result, -1)
for _, value := range contents {
content = value[1]
content = strings.Replace(content, "\t", "", -1)
content = strings.Replace(content, "\n", "", -1)
break
}
return
}
func SaveJoke2File(idx int, titles, contents []string) {
f, err := os.Create("第" + strconv.Itoa(idx) + "页.txt")
if err != nil {
fmt.Println("os.Create file error:", err)
return
}
defer f.Close()
n := len(titles)
for i := 0; i < n; i++ {
f.WriteString(titles[i] + "\n")
f.WriteString(contents[i] + "\n")
f.WriteString("\n")
}
}
func SpidePage(idx int, page chan int) {
url := "https://www.pengfue.com/index_" + strconv.Itoa(idx) + ".html"
result, err := HttpGet(url)
if err != nil {
fmt.Println("HttpGet error:", err)
return
}
jokeURLExp := regexp.MustCompile(`<h1 class="dp-b"><a href="(?s:(.*?))"`)
jokeURLs := jokeURLExp.FindAllStringSubmatch(result, -1)
titles := make([]string, 0)
contents := make([]string, 0)
for _, jokeURL := range jokeURLs {
title, content, err := SpideJokePage(jokeURL[1])
if err != nil {
fmt.Println("Spide joke page error:", err)
continue
}
titles = append(titles, title)
contents = append(contents, content)
}
SaveJoke2File(idx, titles, contents)
page <- idx
}
func toWork(start, end int) {
fmt.Printf("正在爬取第%d页到第%d页\n", start, end)
page := make(chan int)
for i := start; i <= end; i++ {
go SpidePage(i, page)
}
for i := start; i <= end; i++ {
fmt.Printf("第%d页爬取完毕\n", <-page)
}
}
func main() {
var start, end int
fmt.Print("请输入开始爬取的起始页(>=1):")
fmt.Scan(&start)
fmt.Print("请输入结束爬取的终止页(>=start):")
fmt.Scan(&end)
toWork(start, end)
}
网站不怎么行。爬取前4页的效果如下,
image.png
网友评论