Skip to content

Commit

Permalink
Issue #125
Browse files Browse the repository at this point in the history
CHANGE:
  - database/database.go Updating some comments.
  - server/server.go Added a curried `scrapeHandler` which uses a database
    pool.
  • Loading branch information
Ianleeclark committed Nov 27, 2016
1 parent 5f7c4fd commit 2cd3385
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 17 deletions.
6 changes: 4 additions & 2 deletions database/database.go
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ func UpdateStats(uploaded uint64, downloaded uint64) {
return
}

// UpdateStats Handles updating statistics relevant to our tracker.
// UpdateTorrentStats Handles updating statistics relevant to our tracker.
func UpdateTorrentStats(seederDelta int64, leecherDelta int64) {
db, err := OpenConnection()
if err != nil {
Expand All @@ -121,6 +121,8 @@ func UpdateTorrentStats(seederDelta int64, leecherDelta int64) {
return
}

// UpdatePeerStats handles updating peer info like hits per ip, downloaded
// amount, uploaded amounts.
func UpdatePeerStats(uploaded uint64, downloaded uint64, ip string) {
db, err := OpenConnection()
if err != nil {
Expand All @@ -137,7 +139,7 @@ func UpdatePeerStats(uploaded uint64, downloaded uint64, ip string) {
return
}

// GetWhitelistedTorrent allows us to retrieve all of the white listed
// GetWhitelistedTorrents allows us to retrieve all of the white listed
// torrents. Mostly used for populating the Redis KV storage with all of our
// whitelisted torrents.
func GetWhitelistedTorrents() (x *sql.Rows, err error) {
Expand Down
36 changes: 21 additions & 15 deletions server/server.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import (
"github.com/GrappigPanda/notorious/database"
r "github.com/GrappigPanda/notorious/kvStoreInterfaces"
"github.com/GrappigPanda/notorious/server/peerStore"
"github.com/jinzhu/gorm"
"net/http"
)

Expand Down Expand Up @@ -128,23 +129,21 @@ func (app *applicationContext) requestHandler(w http.ResponseWriter, req *http.R
app.handleStatsTracking(data)
}

func scrapeHandler(w http.ResponseWriter, req *http.Request) {
query := req.URL.Query()
dbConn, err := db.OpenConnection()
if err != nil {
panic(err)
}
func scrapeHandlerCurried(dbConn *gorm.DB) func(w http.ResponseWriter, req *http.Request) {
return func(w http.ResponseWriter, req *http.Request) {
query := req.URL.Query()

infoHash := query.Get("InfoHash")
if infoHash == "" {
failMsg := fmt.Sprintf("Tracker does not support multiple entire DB scrapes.")
writeErrorResponse(w, failMsg)
} else {
torrentData := db.ScrapeTorrent(dbConn, infoHash)
writeResponse(w, formatScrapeResponse(torrentData))
}
infoHash := query.Get("InfoHash")
if infoHash == "" {
failMsg := fmt.Sprintf("Tracker does not support multiple entire DB scrapes.")
writeErrorResponse(w, failMsg)
} else {
torrentData := db.ScrapeTorrent(dbConn, infoHash)
writeResponse(w, formatScrapeResponse(torrentData))
}

return
return
}
}

func writeErrorResponse(w http.ResponseWriter, failMsg string) {
Expand All @@ -166,6 +165,13 @@ func RunServer() {

mux := http.NewServeMux()

dbConn, err := db.OpenConnection()
if err != nil {
panic("Failed to open connection to remote database server.")
}

scrapeHandler := scrapeHandlerCurried(dbConn)

mux.HandleFunc("/announce", app.requestHandler)
mux.HandleFunc("/scrape", scrapeHandler)
http.ListenAndServe(":3000", mux)
Expand Down

0 comments on commit 2cd3385

Please sign in to comment.