From e64862237d02ca4c627fa48fe94c33ce2c757e7e Mon Sep 17 00:00:00 2001 From: Sam T Date: Mon, 8 Apr 2013 05:16:56 +0400 Subject: add scrapeURL implementation --- src/Data/Torrent/InfoHash.hs | 24 ++++++++++++++ src/Network/Torrent/Tracker.hs | 20 ++---------- src/Network/Torrent/Tracker/Scrape.hs | 60 +++++++++++++++++++++++++++++++++-- 3 files changed, 84 insertions(+), 20 deletions(-) (limited to 'src') diff --git a/src/Data/Torrent/InfoHash.hs b/src/Data/Torrent/InfoHash.hs index 448e9a5a..b2ca44ee 100644 --- a/src/Data/Torrent/InfoHash.hs +++ b/src/Data/Torrent/InfoHash.hs @@ -1,5 +1,6 @@ module Data.Torrent.InfoHash ( InfoHash (getInfoHash) + , addHashToURI -- ^ Construction , hash, hashlazy @@ -10,6 +11,8 @@ module Data.Torrent.InfoHash import Control.Applicative import Data.Foldable +import Data.List as L +import Data.Char import Data.ByteString (ByteString) import qualified Data.ByteString as B import qualified Data.ByteString.Char8 as BC @@ -18,6 +21,9 @@ import qualified Data.ByteString.Builder.Prim as B import qualified Data.ByteString.Lazy as Lazy import Data.Serialize import qualified Crypto.Hash.SHA1 as C +import Network.URI +import Numeric + -- | Exactly 20 bytes long SHA1 hash. newtype InfoHash = InfoHash { getInfoHash :: ByteString } @@ -39,3 +45,21 @@ hashlazy = InfoHash . C.hashlazy ppHex :: InfoHash -> ByteString ppHex = Lazy.toStrict . B.toLazyByteString . foldMap (B.primFixed B.word8HexFixed) . B.unpack . getInfoHash + +addHashToURI :: URI -> InfoHash -> URI +addHashToURI uri s = uri { + uriQuery = uriQuery uri ++ mkPref (uriQuery uri) ++ + "info_hash=" ++ rfc1738Encode (BC.unpack (getInfoHash s)) + } + where + mkPref [] = "?" + mkPref ('?' : _) = "&" + mkPref _ = error "addHashToURI" + + rfc1738Encode = L.concatMap (\c -> if unreservedS c then [c] else encodeHex c) + where + unreservedS = (`L.elem` chars) + chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_./" + encodeHex c = '%' : pHex c + pHex c = let p = (showHex . ord $ c) "" + in if L.length p == 1 then '0' : p else p diff --git a/src/Network/Torrent/Tracker.hs b/src/Network/Torrent/Tracker.hs index 5faa1dd9..ef2413f6 100644 --- a/src/Network/Torrent/Tracker.hs +++ b/src/Network/Torrent/Tracker.hs @@ -30,7 +30,6 @@ import Network.HTTP import Network.URI import Network.Torrent.PeerID -import Numeric data Peer = Peer { peerID :: Maybe PeerID @@ -150,22 +149,9 @@ instance URLEncode TRequest where where s :: String -> String; s = id; {-# INLINE s #-} encodeRequest :: TRequest -> URI -encodeRequest req = URL.urlEncode req `addToURI` reqAnnounce req - `addHash` BC.unpack (getInfoHash (reqInfoHash req)) - where - addHash :: URI -> String -> URI - addHash uri s = uri { uriQuery = uriQuery uri ++ "&info_hash=" ++ rfc1738Encode s } - - rfc1738Encode :: String -> String - rfc1738Encode = L.concatMap (\c -> if unreserved c then [c] else encode c) - where - unreserved = (`L.elem` chars) - chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_./" - encode :: Char -> String - encode c = '%' : pHex c - pHex c = - let p = (showHex . ord $ c) "" - in if L.length p == 1 then '0' : p else p +encodeRequest req = URL.urlEncode req + `addToURI` reqAnnounce req + `addHashToURI` reqInfoHash req -- | Ports typically reserved for bittorrent. diff --git a/src/Network/Torrent/Tracker/Scrape.hs b/src/Network/Torrent/Tracker/Scrape.hs index 2d1dec07..6bfb488f 100644 --- a/src/Network/Torrent/Tracker/Scrape.hs +++ b/src/Network/Torrent/Tracker/Scrape.hs @@ -1,8 +1,62 @@ +{-# LANGUAGE OverloadedStrings #-} module Network.Torrent.Tracker.Scrape - ( + ( ScrapeInfo(..), Scrape + , scrapeURL ) where +import Control.Applicative +import Data.BEncode +import Data.ByteString (ByteString) +import qualified Data.ByteString as B +import qualified Data.ByteString.Char8 as BC +import Data.Map (Map) +import qualified Data.Map as M +import Data.Monoid +import Data.Torrent.InfoHash import Network.URI -scrapeURL :: URI -> Maybe URI -scrapeURL = undefined +data ScrapeInfo = ScrapeInfo { + siComplete :: Int -- ^ Number of seeders. + , siDownloaded :: Int + -- ^ Total number of times the tracker has registered a completion. + , siIncomplete :: Int -- ^ Number of leechers + , siName :: Maybe ByteString -- ^ + } deriving (Show, Eq) + +type Scrape = Map InfoHash ScrapeInfo + +instance BEncodable ScrapeInfo where + toBEncode si = fromAssocs + [ "complete" --> siComplete si + , "downloaded" --> siDownloaded si + , "incomplete" --> siIncomplete si + , "name" -->? siName si + ] + + fromBEncode (BDict d) = + ScrapeInfo <$> d >-- "complete" + <*> d >-- "downloaded" + <*> d >-- "incomplete" + <*> d >--? "name" + fromBEncode _ = decodingError "ScrapeInfo" + +-- TODO: encode info hash +-- | Trying to convert /announce/ URL to /scrape/ URL. If 'scrapeURL' +-- gives 'Nothing' then tracker do not support scraping. The info hash +-- list is used to restrict the tracker's report to that particular +-- torrents. Note that scrapping of multiple torrents may not be +-- supported. (Even if scrapping convention is supported) +-- +scrapeURL :: URI -> [InfoHash] -> Maybe URI +scrapeURL uri ihs = do + newPath <- replace (BC.pack (uriPath uri)) + let newURI = uri { uriPath = BC.unpack newPath } + return (foldl addHashToURI newURI ihs) + where + replace :: ByteString -> Maybe ByteString + replace p + | ps <- BC.splitWith (== '/') p + , "announce" `B.isPrefixOf` last ps + = let newSuff = "scrape" <> B.drop (B.length "announce") (last ps) + in Just (B.intercalate "/" (init ps ++ [newSuff])) + | otherwise = Nothing -- cgit v1.2.3