From 5d5a7dab5ab0d5d7e35617f8476382a99b38d6db Mon Sep 17 00:00:00 2001 From: Sam T Date: Sun, 21 Jul 2013 02:13:59 +0400 Subject: ~ Move HTTP tracker stuff to its own module. --- src/Network/BitTorrent/Tracker.hs | 68 +---------------- src/Network/BitTorrent/Tracker/HTTP.hs | 115 +++++++++++++++++++++++++++++ src/Network/BitTorrent/Tracker/Protocol.hs | 45 +---------- 3 files changed, 120 insertions(+), 108 deletions(-) create mode 100644 src/Network/BitTorrent/Tracker/HTTP.hs (limited to 'src') diff --git a/src/Network/BitTorrent/Tracker.hs b/src/Network/BitTorrent/Tracker.hs index 5acaa3cc..75cbdf9b 100644 --- a/src/Network/BitTorrent/Tracker.hs +++ b/src/Network/BitTorrent/Tracker.hs @@ -30,11 +30,7 @@ module Network.BitTorrent.Tracker -- * Re-export , defaultPorts - - -- * Scrape - , ScrapeInfo(..), Scrape - , scrapeURL - , scrape, scrapeOne + , ScrapeInfo ) where import Control.Applicative @@ -61,9 +57,10 @@ import Network.HTTP import Network.URI import Data.Torrent -import Network.BitTorrent.Sessions.Types import Network.BitTorrent.Peer +import Network.BitTorrent.Sessions.Types import Network.BitTorrent.Tracker.Protocol +import Network.BitTorrent.Tracker.HTTP {----------------------------------------------------------------------- Tracker connection @@ -82,10 +79,10 @@ data TConnection = TConnection { , tconnPort :: PortNumber -- ^ The port number the client is listenning on. } deriving Show +-- TODO tconnection :: SwarmSession -> TConnection tconnection :: Torrent -> PeerId -> PortNumber -> TConnection tconnection t = TConnection (tAnnounce t) (tInfoHash t) - -- | used to avoid boilerplate; do NOT export me genericReq :: TConnection -> Progress -> AnnounceQuery genericReq ses pr = AnnounceQuery { @@ -102,7 +99,6 @@ genericReq ses pr = AnnounceQuery { , reqEvent = Nothing } - -- | The first request to the tracker that should be created is -- 'startedReq'. It includes necessary 'Started' event field. -- @@ -251,59 +247,3 @@ withTracker initProgress conn action = bracket start end (action . fst) killThread tid pr <- getProgress se leaveTracker (tconnAnnounce conn) (stoppedReq conn pr) - -{----------------------------------------------------------------------- - Scrape ------------------------------------------------------------------------} - --- | Scrape info about a set of torrents. -type Scrape = Map InfoHash ScrapeInfo - --- | Trying to convert /announce/ URL to /scrape/ URL. If 'scrapeURL' --- gives 'Nothing' then tracker do not support scraping. The info hash --- list is used to restrict the tracker's report to that particular --- torrents. Note that scrapping of multiple torrents may not be --- supported. (Even if scrapping convention is supported) --- -scrapeURL :: URI -> [InfoHash] -> Maybe URI -scrapeURL uri ihs = do - newPath <- replace (BC.pack (uriPath uri)) - let newURI = uri { uriPath = BC.unpack newPath } - return (foldl addHashToURI newURI ihs) - where - replace :: ByteString -> Maybe ByteString - replace p - | ps <- BC.splitWith (== '/') p - , "announce" `B.isPrefixOf` last ps - = let newSuff = "scrape" <> B.drop (B.length "announce") (last ps) - in Just (B.intercalate "/" (init ps ++ [newSuff])) - | otherwise = Nothing - - --- | For each 'InfoHash' of torrents request scrape info from the tracker. --- However if the info hash list is 'null', the tracker should list --- all available torrents. --- Note that the 'URI' should be /announce/ URI, not /scrape/ URI. --- -scrape :: URI -- ^ Announce 'URI'. - -> [InfoHash] -- ^ Torrents to be scrapped. - -> IO (Result Scrape) -- ^ 'ScrapeInfo' for each torrent. -scrape announce ihs - | Just uri<- scrapeURL announce ihs = do - rawResp <- simpleHTTP (Request uri GET [] "") - respBody <- getResponseBody rawResp - return (decoded (BC.pack respBody)) - - | otherwise = return (Left "Tracker do not support scraping") - --- | More particular version of 'scrape', just for one torrent. --- -scrapeOne :: URI -- ^ Announce 'URI' - -> InfoHash -- ^ Hash of the torrent info. - -> IO (Result ScrapeInfo) -- ^ 'ScrapeInfo' for the torrent. -scrapeOne uri ih = extract <$> scrape uri [ih] - where - extract (Right m) - | Just s <- M.lookup ih m = Right s - | otherwise = Left "unable to find info hash in response dict" - extract (Left e) = Left e diff --git a/src/Network/BitTorrent/Tracker/HTTP.hs b/src/Network/BitTorrent/Tracker/HTTP.hs new file mode 100644 index 00000000..38388316 --- /dev/null +++ b/src/Network/BitTorrent/Tracker/HTTP.hs @@ -0,0 +1,115 @@ +{-# LANGUAGE OverloadedStrings #-} +module Network.BitTorrent.Tracker.HTTP + ( askTracker, leaveTracker + , scrapeURL + ) where + +import Control.Applicative +import Control.Monad +import Data.BEncode +import Data.ByteString as B +import Data.ByteString.Char8 as BC +import Data.List as L +import Data.Map as M +import Data.Monoid +import Data.URLEncoded as URL +import Network.URI +import Network.HTTP + +import Data.Torrent +import Network.BitTorrent.Tracker.Protocol + +{----------------------------------------------------------------------- + Announce +-----------------------------------------------------------------------} + +encodeRequest :: URI -> AnnounceQuery -> URI +encodeRequest announce req = URL.urlEncode req + `addToURI` announce + `addHashToURI` reqInfoHash req + +mkHTTPRequest :: URI -> Request ByteString +mkHTTPRequest uri = Request uri GET [] "" + +-- TODO rename to something like "announceBlahBlah" + +-- | Send request and receive response from the tracker specified in +-- announce list. This function throws 'IOException' if it couldn't +-- send request or receive response or decode response. +-- +askTracker :: URI -> AnnounceQuery -> IO AnnounceInfo +askTracker announce req = do + let r = mkHTTPRequest (encodeRequest announce req) + + rawResp <- simpleHTTP r + respBody <- getResponseBody rawResp + checkResult $ decoded respBody + where + checkResult (Left err) + = ioError $ userError $ err ++ " in tracker response" + checkResult (Right (Failure err)) + = ioError $ userError $ show err ++ " in tracker response" + checkResult (Right resp) = return resp + +-- | The same as the 'askTracker' but ignore response. Used in +-- conjunction with 'Stopped'. +leaveTracker :: URI -> AnnounceQuery -> IO () +leaveTracker announce req = do + let r = mkHTTPRequest (encodeRequest announce req) + void $ simpleHTTP r >>= getResponseBody + +{----------------------------------------------------------------------- + Scrape +-----------------------------------------------------------------------} + +-- | Scrape info about a set of torrents. +type Scrape = Map InfoHash ScrapeInfo + +-- | Trying to convert /announce/ URL to /scrape/ URL. If 'scrapeURL' +-- gives 'Nothing' then tracker do not support scraping. The info hash +-- list is used to restrict the tracker's report to that particular +-- torrents. Note that scrapping of multiple torrents may not be +-- supported. (Even if scrapping convention is supported) +-- +scrapeURL :: URI -> [InfoHash] -> Maybe URI +scrapeURL uri ihs = do + newPath <- replace (BC.pack (uriPath uri)) + let newURI = uri { uriPath = BC.unpack newPath } + return (L.foldl addHashToURI newURI ihs) + where + replace :: ByteString -> Maybe ByteString + replace p + | ps <- BC.splitWith (== '/') p + , "announce" `B.isPrefixOf` L.last ps + = let newSuff = "scrape" <> B.drop (B.length "announce") (L.last ps) + in Just (B.intercalate "/" (L.init ps ++ [newSuff])) + | otherwise = Nothing + + +-- | For each 'InfoHash' of torrents request scrape info from the tracker. +-- However if the info hash list is 'null', the tracker should list +-- all available torrents. +-- Note that the 'URI' should be /announce/ URI, not /scrape/ URI. +-- +scrape :: URI -- ^ Announce 'URI'. + -> [InfoHash] -- ^ Torrents to be scrapped. + -> IO (Result Scrape) -- ^ 'ScrapeInfo' for each torrent. +scrape announce ihs + | Just uri<- scrapeURL announce ihs = do + rawResp <- simpleHTTP (Request uri GET [] "") + respBody <- getResponseBody rawResp + return (decoded (BC.pack respBody)) + + | otherwise = return (Left "Tracker do not support scraping") + +-- | More particular version of 'scrape', just for one torrent. +-- +scrapeOne :: URI -- ^ Announce 'URI' + -> InfoHash -- ^ Hash of the torrent info. + -> IO (Result ScrapeInfo) -- ^ 'ScrapeInfo' for the torrent. +scrapeOne uri ih = extract <$> scrape uri [ih] + where + extract (Right m) + | Just s <- M.lookup ih m = Right s + | otherwise = Left "unable to find info hash in response dict" + extract (Left e) = Left e diff --git a/src/Network/BitTorrent/Tracker/Protocol.hs b/src/Network/BitTorrent/Tracker/Protocol.hs index 5ee61185..f5898d36 100644 --- a/src/Network/BitTorrent/Tracker/Protocol.hs +++ b/src/Network/BitTorrent/Tracker/Protocol.hs @@ -25,11 +25,8 @@ {-# LANGUAGE TemplateHaskell #-} module Network.BitTorrent.Tracker.Protocol ( Event(..), AnnounceQuery(..), AnnounceInfo(..) + , defaultNumWant , defaultPorts , ScrapeQuery, ScrapeInfo(..) - , askTracker, leaveTracker - - -- * Defaults - , defaultPorts, defaultNumWant ) where @@ -44,7 +41,6 @@ import Data.List as L import Data.Word import Data.Monoid import Data.BEncode -import Data.ByteString as B import Data.Text (Text) import Data.Text.Encoding import Data.Serialize hiding (Result) @@ -53,7 +49,6 @@ import Data.Torrent import Network import Network.Socket -import Network.HTTP import Network.URI import Network.BitTorrent.Peer @@ -216,11 +211,6 @@ instance URLEncode AnnounceQuery where ] where s :: String -> String; s = id; {-# INLINE s #-} -encodeRequest :: URI -> AnnounceQuery -> URI -encodeRequest announce req = URL.urlEncode req - `addToURI` announce - `addHashToURI` reqInfoHash req - {----------------------------------------------------------------------- Binary announce encoding -----------------------------------------------------------------------} @@ -369,36 +359,3 @@ instance Serialize ScrapeInfo where , siIncomplete = fromIntegral leechers , siName = Nothing } - -{----------------------------------------------------------------------- - Tracker ------------------------------------------------------------------------} - -mkHTTPRequest :: URI -> Request ByteString -mkHTTPRequest uri = Request uri GET [] "" - --- | Send request and receive response from the tracker specified in --- announce list. This function throws 'IOException' if it couldn't --- send request or receive response or decode response. --- -askTracker :: URI -> AnnounceQuery -> IO AnnounceInfo -askTracker announce req = do - let r = mkHTTPRequest (encodeRequest announce req) - - rawResp <- simpleHTTP r - respBody <- getResponseBody rawResp - checkResult $ decoded respBody - where - - checkResult (Left err) - = ioError $ userError $ err ++ " in tracker response" - checkResult (Right (Failure err)) - = ioError $ userError $ show err ++ " in tracker response" - checkResult (Right resp) = return resp - --- | The same as the 'askTracker' but ignore response. Used in --- conjunction with 'Stopped'. -leaveTracker :: URI -> AnnounceQuery -> IO () -leaveTracker announce req = do - let r = mkHTTPRequest (encodeRequest announce req) - void $ simpleHTTP r >>= getResponseBody -- cgit v1.2.3