去重, 无重复选择(3次内), 洗牌算法

This commit is contained in:
wood chen 2024-10-13 11:57:07 +08:00
parent adf819791c
commit f8737684ef

75
main.go
View File

@ -4,7 +4,6 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"io" "io"
"io/ioutil"
"log" "log"
"math/rand" "math/rand"
"net" "net"
@ -20,16 +19,62 @@ const (
port = ":5003" port = ":5003"
cacheDuration = 24 * time.Hour cacheDuration = 24 * time.Hour
requestTimeout = 10 * time.Second requestTimeout = 10 * time.Second
noRepeatCount = 3 // 在这个次数内不重复选择
) )
var ( var (
csvPathsCache map[string]map[string]string csvPathsCache map[string]map[string]string
lastFetchTime time.Time lastFetchTime time.Time
csvCache = make(map[string][]string) csvCache = make(map[string]*URLSelector)
mu sync.RWMutex mu sync.RWMutex
rng *rand.Rand rng *rand.Rand
) )
type URLSelector struct {
URLs []string
CurrentIndex int
RecentUsed map[string]int
}
func NewURLSelector(urls []string) *URLSelector {
return &URLSelector{
URLs: urls,
CurrentIndex: 0,
RecentUsed: make(map[string]int),
}
}
func (us *URLSelector) ShuffleURLs() {
for i := len(us.URLs) - 1; i > 0; i-- {
j := rng.Intn(i + 1)
us.URLs[i], us.URLs[j] = us.URLs[j], us.URLs[i]
}
}
func (us *URLSelector) GetRandomURL() string {
if us.CurrentIndex == 0 {
us.ShuffleURLs()
}
for i := 0; i < len(us.URLs); i++ {
url := us.URLs[us.CurrentIndex]
us.CurrentIndex = (us.CurrentIndex + 1) % len(us.URLs)
if us.RecentUsed[url] < noRepeatCount {
us.RecentUsed[url]++
// 如果某个URL使用次数达到上限从RecentUsed中移除
if us.RecentUsed[url] == noRepeatCount {
delete(us.RecentUsed, url)
}
return url
}
}
// 如果所有URL都被最近使用过重置RecentUsed并返回第一个URL
us.RecentUsed = make(map[string]int)
return us.URLs[0]
}
func main() { func main() {
source := rand.NewSource(time.Now().UnixNano()) source := rand.NewSource(time.Now().UnixNano())
rng = rand.New(source) rng = rand.New(source)
@ -89,7 +134,7 @@ func loadCSVPaths() error {
jsonPath := filepath.Join("public", "url.json") jsonPath := filepath.Join("public", "url.json")
log.Printf("Attempting to read file: %s", jsonPath) log.Printf("Attempting to read file: %s", jsonPath)
data, err := ioutil.ReadFile(jsonPath) data, err := os.ReadFile(jsonPath)
if err != nil { if err != nil {
return fmt.Errorf("failed to read url.json: %w", err) return fmt.Errorf("failed to read url.json: %w", err)
} }
@ -108,36 +153,40 @@ func loadCSVPaths() error {
return nil return nil
} }
func getCSVContent(path string) ([]string, error) { func getCSVContent(path string) (*URLSelector, error) {
mu.RLock() mu.RLock()
content, exists := csvCache[path] selector, exists := csvCache[path]
mu.RUnlock() mu.RUnlock()
if exists { if exists {
return content, nil return selector, nil
} }
fullPath := filepath.Join("public", path) fullPath := filepath.Join("public", path)
log.Printf("Attempting to read file: %s", fullPath) log.Printf("Attempting to read file: %s", fullPath)
fileContent, err := ioutil.ReadFile(fullPath) fileContent, err := os.ReadFile(fullPath)
if err != nil { if err != nil {
return nil, fmt.Errorf("error reading CSV content: %w", err) return nil, fmt.Errorf("error reading CSV content: %w", err)
} }
lines := strings.Split(string(fileContent), "\n") lines := strings.Split(string(fileContent), "\n")
uniqueURLs := make(map[string]bool)
var fileArray []string var fileArray []string
for _, line := range lines { for _, line := range lines {
trimmed := strings.TrimSpace(line) trimmed := strings.TrimSpace(line)
if trimmed != "" && !strings.HasPrefix(trimmed, "#") { if trimmed != "" && !strings.HasPrefix(trimmed, "#") && !uniqueURLs[trimmed] {
fileArray = append(fileArray, trimmed) fileArray = append(fileArray, trimmed)
uniqueURLs[trimmed] = true
} }
} }
selector = NewURLSelector(fileArray)
mu.Lock() mu.Lock()
csvCache[path] = fileArray csvCache[path] = selector
mu.Unlock() mu.Unlock()
return fileArray, nil return selector, nil
} }
func handleAPIRequest(w http.ResponseWriter, r *http.Request) { func handleAPIRequest(w http.ResponseWriter, r *http.Request) {
@ -172,19 +221,19 @@ func handleAPIRequest(w http.ResponseWriter, r *http.Request) {
return return
} }
fileArray, err := getCSVContent(csvPath) selector, err := getCSVContent(csvPath)
if err != nil { if err != nil {
http.Error(w, "Failed to fetch CSV content", http.StatusInternalServerError) http.Error(w, "Failed to fetch CSV content", http.StatusInternalServerError)
log.Println("Error fetching CSV content:", err) log.Println("Error fetching CSV content:", err)
return return
} }
if len(fileArray) == 0 { if len(selector.URLs) == 0 {
http.Error(w, "No content available", http.StatusNotFound) http.Error(w, "No content available", http.StatusNotFound)
return return
} }
randomURL := fileArray[rng.Intn(len(fileArray))] randomURL := selector.GetRandomURL()
duration := time.Since(start) duration := time.Since(start)
log.Printf("Request: %s %s from %s - Duration: %v - Redirecting to: %s\n", log.Printf("Request: %s %s from %s - Duration: %v - Redirecting to: %s\n",