From 0e3903fa3d486c57504837fd497a3a348793f7fc Mon Sep 17 00:00:00 2001 From: Sam T Date: Tue, 11 Jun 2013 08:02:09 +0400 Subject: ~ Merge Scrape to Tracker. --- bittorrent.cabal | 4 - src/Network/BitTorrent/Internal.hs | 8 +- src/Network/BitTorrent/Tracker.hs | 115 ++++++++++++++++++++++++++--- src/Network/BitTorrent/Tracker/Protocol.hs | 5 +- src/Network/BitTorrent/Tracker/Scrape.hs | 115 ----------------------------- 5 files changed, 111 insertions(+), 136 deletions(-) delete mode 100644 src/Network/BitTorrent/Tracker/Scrape.hs diff --git a/bittorrent.cabal b/bittorrent.cabal index febea84e..326cbb2a 100644 --- a/bittorrent.cabal +++ b/bittorrent.cabal @@ -35,14 +35,10 @@ flag testing library exposed-modules: Data.Torrent , Data.Bitfield - , Network.BitTorrent , Network.BitTorrent.Extension , Network.BitTorrent.Peer - , Network.BitTorrent.Tracker - , Network.BitTorrent.Tracker.Scrape - , Network.BitTorrent.Exchange other-modules: Network.BitTorrent.Internal diff --git a/src/Network/BitTorrent/Internal.hs b/src/Network/BitTorrent/Internal.hs index 39e10ce2..91dc35d5 100644 --- a/src/Network/BitTorrent/Internal.hs +++ b/src/Network/BitTorrent/Internal.hs @@ -226,12 +226,12 @@ updateOutcoming PeerSession {..} = updateTimeout (eventManager (clientSession swarmSession)) outcomingTimeout maxOutcomingTime -sendKA :: Socket -> SwarmSession -> IO () -sendKA sock SwarmSession {..} = do +sendKA :: Socket -> IO () +sendKA sock {- SwarmSession {..} -} = do print "I'm sending keep alive." sendAll sock (encode BT.KeepAlive) - let mgr = eventManager clientSession - updateTimeout mgr +-- let mgr = eventManager clientSession +-- updateTimeout mgr print "Done.." abortSession :: IO () diff --git a/src/Network/BitTorrent/Tracker.hs b/src/Network/BitTorrent/Tracker.hs index 04a7b43e..275b5422 100644 --- a/src/Network/BitTorrent/Tracker.hs +++ b/src/Network/BitTorrent/Tracker.hs @@ -6,16 +6,18 @@ -- Portability : non-portable -- -- This module provides high level API for peer->tracker --- communication. +-- communication. Tracker is used to discover other peers in the +-- network. -- +-- By convention most trackers support another form of request, +-- which queries the state of a given torrent (or all torrents) that +-- the tracker is managing. This module also provides a way to +-- easily request scrape info for a particular torrent list. +-- +{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} module Network.BitTorrent.Tracker - ( module Network.BitTorrent.Tracker.Scrape - - , withTracker, completedReq - - -- * Progress - , Progress(..), startProgress + ( withTracker, completedReq -- * Connection , TConnection(..), tconnection @@ -27,6 +29,11 @@ module Network.BitTorrent.Tracker -- * Re-export , defaultPorts + + -- * Scrape + , ScrapeInfo(..), Scrape + , scrapeURL + , scrape, scrapeOne ) where import Control.Applicative @@ -34,15 +41,23 @@ import Control.Concurrent import Control.Concurrent.STM import Control.Exception import Control.Monad +import Data.BEncode +import Data.ByteString (ByteString) +import qualified Data.ByteString as B +import qualified Data.ByteString.Char8 as BC +import Data.Map (Map) +import qualified Data.Map as M +import Data.Monoid import Data.IORef -import Data.Torrent + import Network +import Network.HTTP import Network.URI +import Data.Torrent import Network.BitTorrent.Internal import Network.BitTorrent.Peer import Network.BitTorrent.Tracker.Protocol -import Network.BitTorrent.Tracker.Scrape -- | 'TConnection' (shorthand for Tracker session) combines tracker request @@ -179,3 +194,85 @@ withTracker initProgress conn action = bracket start end (action . fst) killThread tid pr <- getProgress se askTracker $ stoppedReq conn pr + + + +-- | Information about particular torrent. +data ScrapeInfo = ScrapeInfo { + siComplete :: Int + -- ^ Number of seeders - peers with the entire file. + , siDownloaded :: Int + -- ^ Total number of times the tracker has registered a completion. + , siIncomplete :: Int + -- ^ Number of leechers. + , siName :: Maybe ByteString + -- ^ Name of the torrent file, as specified by the "name" + -- file in the info section of the .torrent file. + } deriving (Show, Eq) + +-- | Scrape info about a set of torrents. +type Scrape = Map InfoHash ScrapeInfo + +instance BEncodable ScrapeInfo where + toBEncode si = fromAssocs + [ "complete" --> siComplete si + , "downloaded" --> siDownloaded si + , "incomplete" --> siIncomplete si + , "name" -->? siName si + ] + + fromBEncode (BDict d) = + ScrapeInfo <$> d >-- "complete" + <*> d >-- "downloaded" + <*> d >-- "incomplete" + <*> d >--? "name" + fromBEncode _ = decodingError "ScrapeInfo" + +-- | Trying to convert /announce/ URL to /scrape/ URL. If 'scrapeURL' +-- gives 'Nothing' then tracker do not support scraping. The info hash +-- list is used to restrict the tracker's report to that particular +-- torrents. Note that scrapping of multiple torrents may not be +-- supported. (Even if scrapping convention is supported) +-- +scrapeURL :: URI -> [InfoHash] -> Maybe URI +scrapeURL uri ihs = do + newPath <- replace (BC.pack (uriPath uri)) + let newURI = uri { uriPath = BC.unpack newPath } + return (foldl addHashToURI newURI ihs) + where + replace :: ByteString -> Maybe ByteString + replace p + | ps <- BC.splitWith (== '/') p + , "announce" `B.isPrefixOf` last ps + = let newSuff = "scrape" <> B.drop (B.length "announce") (last ps) + in Just (B.intercalate "/" (init ps ++ [newSuff])) + | otherwise = Nothing + + +-- | For each 'InfoHash' of torrents request scrape info from the tracker. +-- However if the info hash list is 'null', the tracker should list +-- all available torrents. +-- Note that the 'URI' should be /announce/ URI, not /scrape/ URI. +-- +scrape :: URI -- ^ Announce 'URI'. + -> [InfoHash] -- ^ Torrents to be scrapped. + -> IO (Result Scrape) -- ^ 'ScrapeInfo' for each torrent. +scrape announce ihs + | Just uri<- scrapeURL announce ihs = do + rawResp <- simpleHTTP (Request uri GET [] "") + respBody <- getResponseBody rawResp + return (decoded (BC.pack respBody)) + + | otherwise = return (Left "Tracker do not support scraping") + +-- | More particular version of 'scrape', just for one torrent. +-- +scrapeOne :: URI -- ^ Announce 'URI' + -> InfoHash -- ^ Hash of the torrent info. + -> IO (Result ScrapeInfo) -- ^ 'ScrapeInfo' for the torrent. +scrapeOne uri ih = extract <$> scrape uri [ih] + where + extract (Right m) + | Just s <- M.lookup ih m = Right s + | otherwise = Left "unable to find info hash in response dict" + extract (Left e) = Left e diff --git a/src/Network/BitTorrent/Tracker/Protocol.hs b/src/Network/BitTorrent/Tracker/Protocol.hs index af48e3e9..c94a2dfc 100644 --- a/src/Network/BitTorrent/Tracker/Protocol.hs +++ b/src/Network/BitTorrent/Tracker/Protocol.hs @@ -21,9 +21,7 @@ {-# LANGUAGE OverloadedStrings #-} -- TODO: add "compact" field to TRequest module Network.BitTorrent.Tracker.Protocol - ( module Network.BitTorrent.Tracker.Scrape - - , Event(..), TRequest(..), TResponse(..) + ( Event(..), TRequest(..), TResponse(..) , askTracker -- * Defaults @@ -50,7 +48,6 @@ import Network.HTTP import Network.URI import Network.BitTorrent.Peer -import Network.BitTorrent.Tracker.Scrape diff --git a/src/Network/BitTorrent/Tracker/Scrape.hs b/src/Network/BitTorrent/Tracker/Scrape.hs deleted file mode 100644 index 0181cf9f..00000000 --- a/src/Network/BitTorrent/Tracker/Scrape.hs +++ /dev/null @@ -1,115 +0,0 @@ --- | --- Copyright : (c) Sam T. 2013 --- License : MIT --- Maintainer : pxqr.sta@gmail.com --- Stability : experimental --- Portability : portable --- --- By convention most trackers support another form of request, --- which queries the state of a given torrent (or all torrents) that the --- tracker is managing. This module provides a way to easily request --- scrape info for a particular torrent list. --- -{-# LANGUAGE OverloadedStrings #-} -module Network.BitTorrent.Tracker.Scrape - ( ScrapeInfo(..), Scrape - , scrapeURL - - -- * Requests - , scrape - , scrapeOne - ) where - -import Control.Applicative -import Data.BEncode -import Data.ByteString (ByteString) -import qualified Data.ByteString as B -import qualified Data.ByteString.Char8 as BC -import Data.Map (Map) -import qualified Data.Map as M -import Data.Monoid -import Network.URI -import Network.HTTP - -import Data.Torrent - - --- | Information about particular torrent. -data ScrapeInfo = ScrapeInfo { - siComplete :: Int - -- ^ Number of seeders - peers with the entire file. - , siDownloaded :: Int - -- ^ Total number of times the tracker has registered a completion. - , siIncomplete :: Int - -- ^ Number of leechers. - , siName :: Maybe ByteString - -- ^ Name of the torrent file, as specified by the "name" - -- file in the info section of the .torrent file. - } deriving (Show, Eq) - --- | Scrape info about a set of torrents. -type Scrape = Map InfoHash ScrapeInfo - -instance BEncodable ScrapeInfo where - toBEncode si = fromAssocs - [ "complete" --> siComplete si - , "downloaded" --> siDownloaded si - , "incomplete" --> siIncomplete si - , "name" -->? siName si - ] - - fromBEncode (BDict d) = - ScrapeInfo <$> d >-- "complete" - <*> d >-- "downloaded" - <*> d >-- "incomplete" - <*> d >--? "name" - fromBEncode _ = decodingError "ScrapeInfo" - --- | Trying to convert /announce/ URL to /scrape/ URL. If 'scrapeURL' --- gives 'Nothing' then tracker do not support scraping. The info hash --- list is used to restrict the tracker's report to that particular --- torrents. Note that scrapping of multiple torrents may not be --- supported. (Even if scrapping convention is supported) --- -scrapeURL :: URI -> [InfoHash] -> Maybe URI -scrapeURL uri ihs = do - newPath <- replace (BC.pack (uriPath uri)) - let newURI = uri { uriPath = BC.unpack newPath } - return (foldl addHashToURI newURI ihs) - where - replace :: ByteString -> Maybe ByteString - replace p - | ps <- BC.splitWith (== '/') p - , "announce" `B.isPrefixOf` last ps - = let newSuff = "scrape" <> B.drop (B.length "announce") (last ps) - in Just (B.intercalate "/" (init ps ++ [newSuff])) - | otherwise = Nothing - - --- | For each 'InfoHash' of torrents request scrape info from the tracker. --- However if the info hash list is 'null', the tracker should list --- all available torrents. --- Note that the 'URI' should be /announce/ URI, not /scrape/ URI. --- -scrape :: URI -- ^ Announce 'URI'. - -> [InfoHash] -- ^ Torrents to be scrapped. - -> IO (Result Scrape) -- ^ 'ScrapeInfo' for each torrent. -scrape announce ihs - | Just uri<- scrapeURL announce ihs = do - rawResp <- simpleHTTP (Request uri GET [] "") - respBody <- getResponseBody rawResp - return (decoded (BC.pack respBody)) - - | otherwise = return (Left "Tracker do not support scraping") - --- | More particular version of 'scrape', just for one torrent. --- -scrapeOne :: URI -- ^ Announce 'URI' - -> InfoHash -- ^ Hash of the torrent info. - -> IO (Result ScrapeInfo) -- ^ 'ScrapeInfo' for the torrent. -scrapeOne uri ih = extract <$> scrape uri [ih] - where - extract (Right m) - | Just s <- M.lookup ih m = Right s - | otherwise = Left "unable to find info hash in response dict" - extract (Left e) = Left e -- cgit v1.2.3