本文整理汇总了Golang中github.com/mdlayher/goat/goat/data.FileRecord.PeerReaper方法的典型用法代码示例。如果您正苦于以下问题:Golang FileRecord.PeerReaper方法的具体用法?Golang FileRecord.PeerReaper怎么用?Golang FileRecord.PeerReaper使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类github.com/mdlayher/goat/goat/data.FileRecord
的用法示例。
在下文中一共展示了FileRecord.PeerReaper方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Golang代码示例。
示例1: Scrape
// Scrape scrapes a tracker request
func Scrape(tracker torrentTracker, query url.Values) []byte {
// List of files to be scraped
scrapeFiles := make([]data.FileRecord, 0)
// Iterate all info_hash values in query
for _, infoHash := range query["info_hash"] {
// Make a copy of query, set the info hash as current in loop
localQuery := query
localQuery.Set("info_hash", infoHash)
// Store scrape information in struct
scrape := new(data.ScrapeLog).FromValues(localQuery)
if scrape == (data.ScrapeLog{}) {
return tracker.Error("Malformed scrape")
}
// Request to store scrape
go scrape.Save()
log.Printf("scrape: [%s %s] %s", tracker.Protocol(), scrape.IP, scrape.InfoHash)
// Check for a matching file via info_hash
file := new(data.FileRecord).Load(scrape.InfoHash, "info_hash")
if file == (data.FileRecord{}) {
// Torrent is not currently registered
return tracker.Error("Unregistered torrent")
}
// Ensure file is verified, meaning we will permit scraping of it
if !file.Verified {
return tracker.Error("Unverified torrent")
}
// Launch peer reaper to remove old peers from this file
go file.PeerReaper()
// File is valid, add it to list to be scraped
scrapeFiles = append(scrapeFiles[:], file)
}
// Create scrape
return tracker.Scrape(scrapeFiles)
}
示例2: Announce
// Announce generates and triggers a tracker announces request
func Announce(tracker TorrentTracker, user data.UserRecord, query url.Values) []byte {
// Store announce information in struct
announce := new(data.AnnounceLog)
err := announce.FromValues(query)
if err != nil {
return tracker.Error("Malformed announce")
}
// Request to store announce
go func(announce *data.AnnounceLog) {
if err := announce.Save(); err != nil {
log.Println(err.Error())
}
}(announce)
// Only report event when needed
event := ""
if announce.Event != "" {
event = announce.Event + " "
}
log.Printf("announce: [%s %s:%d] %s%s", tracker.Protocol(), announce.IP, announce.Port, event, announce.InfoHash)
// Check for a matching file via info_hash
file, err := new(data.FileRecord).Load(announce.InfoHash, "info_hash")
if err != nil {
log.Println(err.Error())
return tracker.Error(ErrAnnounceFailure.Error())
}
// Torrent is currently unregistered
if file == (data.FileRecord{}) {
log.Printf("tracker: detected new file, awaiting manual approval [hash: %s]", announce.InfoHash)
// Create an entry in file table for this hash, but mark it as unverified
file.InfoHash = announce.InfoHash
file.Verified = false
// Save file asynchronously
go func(file data.FileRecord) {
if err := file.Save(); err != nil {
log.Println(err.Error())
}
}(file)
// Report error
return tracker.Error("Unregistered torrent")
}
// Ensure file is verified, meaning we will permit tracking of it
if !file.Verified {
return tracker.Error("Unverified torrent")
}
// Launch peer reaper asynchronously to remove old peers from this file
go func(file data.FileRecord) {
// Start peer reaper
count, err := file.PeerReaper()
if err != nil {
log.Println(err.Error())
}
// Report peers reaped
if count > 0 {
log.Println("peerReaper: reaped %d peers on file ID: %d", count, file.ID)
}
}(file)
// If UDP tracker, we cannot reliably detect user, so we announce anonymously
if _, ok := tracker.(UDPTracker); ok {
return tracker.Announce(query, file)
}
// Check existing record for this user with this file and this IP
fileUser, err := new(data.FileUserRecord).Load(file.ID, user.ID, query.Get("ip"))
if err != nil {
log.Println(err.Error())
return tracker.Error(ErrAnnounceFailure.Error())
}
// New user, starting torrent
if fileUser == (data.FileUserRecord{}) {
// Create new relationship
fileUser.FileID = file.ID
fileUser.UserID = user.ID
fileUser.IP = query.Get("ip")
fileUser.Active = true
fileUser.Announced = 1
// If announce reports 0 left, but no existing record, user is probably the initial seeder
if announce.Left == 0 {
fileUser.Completed = true
} else {
fileUser.Completed = false
}
// Track the initial uploaded, download, and left values
// NOTE: clients report absolute values, so delta should NEVER be calculated for these
fileUser.Uploaded = announce.Uploaded
//.........这里部分代码省略.........
示例3: Scrape
// Scrape generates and triggers a tracker scrape request
func Scrape(tracker TorrentTracker, query url.Values) []byte {
// List of files to be scraped
scrapeFiles := make([]data.FileRecord, 0)
// Iterate all info_hash values in query
for _, infoHash := range query["info_hash"] {
// Make a copy of query, set the info hash as current in loop
localQuery := query
localQuery.Set("info_hash", infoHash)
// Store scrape information in struct
scrape := new(data.ScrapeLog)
err := scrape.FromValues(localQuery)
if err != nil {
return tracker.Error("Malformed scrape")
}
// Request to store scrape
go func(scrape *data.ScrapeLog) {
if err := scrape.Save(); err != nil {
log.Println(err.Error())
}
}(scrape)
log.Printf("scrape: [%s %s] %s", tracker.Protocol(), scrape.IP, scrape.InfoHash)
// Check for a matching file via info_hash
file, err := new(data.FileRecord).Load(scrape.InfoHash, "info_hash")
if err != nil {
log.Println(err.Error())
return tracker.Error(ErrScrapeFailure.Error())
}
// Torrent is not currently registered
if file == (data.FileRecord{}) {
return tracker.Error("Unregistered torrent")
}
// Ensure file is verified, meaning we will permit scraping of it
if !file.Verified {
return tracker.Error("Unverified torrent")
}
// Launch peer reaper asynchronously to remove old peers from this file
go func(file data.FileRecord) {
// Start peer reaper
count, err := file.PeerReaper()
if err != nil {
log.Println(err.Error())
}
// Report peers reaped
if count > 0 {
log.Println("peerReaper: reaped %d peers on file ID: %d", count, file.ID)
}
}(file)
// File is valid, add it to list to be scraped
scrapeFiles = append(scrapeFiles[:], file)
}
// Create scrape
return tracker.Scrape(scrapeFiles)
}