From e64862237d02ca4c627fa48fe94c33ce2c757e7e Mon Sep 17 00:00:00 2001 From: Sam T Date: Mon, 8 Apr 2013 05:16:56 +0400 Subject: add scrapeURL implementation --- src/Network/Torrent/Tracker/Scrape.hs | 60 +++++++++++++++++++++++++++++++++-- 1 file changed, 57 insertions(+), 3 deletions(-) (limited to 'src/Network/Torrent/Tracker') diff --git a/src/Network/Torrent/Tracker/Scrape.hs b/src/Network/Torrent/Tracker/Scrape.hs index 2d1dec07..6bfb488f 100644 --- a/src/Network/Torrent/Tracker/Scrape.hs +++ b/src/Network/Torrent/Tracker/Scrape.hs @@ -1,8 +1,62 @@ +{-# LANGUAGE OverloadedStrings #-} module Network.Torrent.Tracker.Scrape - ( + ( ScrapeInfo(..), Scrape + , scrapeURL ) where +import Control.Applicative +import Data.BEncode +import Data.ByteString (ByteString) +import qualified Data.ByteString as B +import qualified Data.ByteString.Char8 as BC +import Data.Map (Map) +import qualified Data.Map as M +import Data.Monoid +import Data.Torrent.InfoHash import Network.URI -scrapeURL :: URI -> Maybe URI -scrapeURL = undefined +data ScrapeInfo = ScrapeInfo { + siComplete :: Int -- ^ Number of seeders. + , siDownloaded :: Int + -- ^ Total number of times the tracker has registered a completion. + , siIncomplete :: Int -- ^ Number of leechers + , siName :: Maybe ByteString -- ^ + } deriving (Show, Eq) + +type Scrape = Map InfoHash ScrapeInfo + +instance BEncodable ScrapeInfo where + toBEncode si = fromAssocs + [ "complete" --> siComplete si + , "downloaded" --> siDownloaded si + , "incomplete" --> siIncomplete si + , "name" -->? siName si + ] + + fromBEncode (BDict d) = + ScrapeInfo <$> d >-- "complete" + <*> d >-- "downloaded" + <*> d >-- "incomplete" + <*> d >--? "name" + fromBEncode _ = decodingError "ScrapeInfo" + +-- TODO: encode info hash +-- | Trying to convert /announce/ URL to /scrape/ URL. If 'scrapeURL' +-- gives 'Nothing' then tracker do not support scraping. The info hash +-- list is used to restrict the tracker's report to that particular +-- torrents. Note that scrapping of multiple torrents may not be +-- supported. (Even if scrapping convention is supported) +-- +scrapeURL :: URI -> [InfoHash] -> Maybe URI +scrapeURL uri ihs = do + newPath <- replace (BC.pack (uriPath uri)) + let newURI = uri { uriPath = BC.unpack newPath } + return (foldl addHashToURI newURI ihs) + where + replace :: ByteString -> Maybe ByteString + replace p + | ps <- BC.splitWith (== '/') p + , "announce" `B.isPrefixOf` last ps + = let newSuff = "scrape" <> B.drop (B.length "announce") (last ps) + in Just (B.intercalate "/" (init ps ++ [newSuff])) + | otherwise = Nothing -- cgit v1.2.3