summaryrefslogtreecommitdiff
path: root/src/Network
diff options
context:
space:
mode:
authorSam T <sta.cs.vsu@gmail.com>2013-04-08 05:16:56 +0400
committerSam T <sta.cs.vsu@gmail.com>2013-04-08 05:16:56 +0400
commite64862237d02ca4c627fa48fe94c33ce2c757e7e (patch)
tree967311584d2c82656d7ef673253b9a1f11e38637 /src/Network
parent0e129341291f20eacc03beefa51376c2dc43301c (diff)
add scrapeURL implementation
Diffstat (limited to 'src/Network')
-rw-r--r--src/Network/Torrent/Tracker.hs20
-rw-r--r--src/Network/Torrent/Tracker/Scrape.hs60
2 files changed, 60 insertions, 20 deletions
diff --git a/src/Network/Torrent/Tracker.hs b/src/Network/Torrent/Tracker.hs
index 5faa1dd9..ef2413f6 100644
--- a/src/Network/Torrent/Tracker.hs
+++ b/src/Network/Torrent/Tracker.hs
@@ -30,7 +30,6 @@ import Network.HTTP
30import Network.URI 30import Network.URI
31import Network.Torrent.PeerID 31import Network.Torrent.PeerID
32 32
33import Numeric
34 33
35data Peer = Peer { 34data Peer = Peer {
36 peerID :: Maybe PeerID 35 peerID :: Maybe PeerID
@@ -150,22 +149,9 @@ instance URLEncode TRequest where
150 where s :: String -> String; s = id; {-# INLINE s #-} 149 where s :: String -> String; s = id; {-# INLINE s #-}
151 150
152encodeRequest :: TRequest -> URI 151encodeRequest :: TRequest -> URI
153encodeRequest req = URL.urlEncode req `addToURI` reqAnnounce req 152encodeRequest req = URL.urlEncode req
154 `addHash` BC.unpack (getInfoHash (reqInfoHash req)) 153 `addToURI` reqAnnounce req
155 where 154 `addHashToURI` reqInfoHash req
156 addHash :: URI -> String -> URI
157 addHash uri s = uri { uriQuery = uriQuery uri ++ "&info_hash=" ++ rfc1738Encode s }
158
159 rfc1738Encode :: String -> String
160 rfc1738Encode = L.concatMap (\c -> if unreserved c then [c] else encode c)
161 where
162 unreserved = (`L.elem` chars)
163 chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_./"
164 encode :: Char -> String
165 encode c = '%' : pHex c
166 pHex c =
167 let p = (showHex . ord $ c) ""
168 in if L.length p == 1 then '0' : p else p
169 155
170 156
171-- | Ports typically reserved for bittorrent. 157-- | Ports typically reserved for bittorrent.
diff --git a/src/Network/Torrent/Tracker/Scrape.hs b/src/Network/Torrent/Tracker/Scrape.hs
index 2d1dec07..6bfb488f 100644
--- a/src/Network/Torrent/Tracker/Scrape.hs
+++ b/src/Network/Torrent/Tracker/Scrape.hs
@@ -1,8 +1,62 @@
1{-# LANGUAGE OverloadedStrings #-}
1module Network.Torrent.Tracker.Scrape 2module Network.Torrent.Tracker.Scrape
2 ( 3 ( ScrapeInfo(..), Scrape
4 , scrapeURL
3 ) where 5 ) where
4 6
7import Control.Applicative
8import Data.BEncode
9import Data.ByteString (ByteString)
10import qualified Data.ByteString as B
11import qualified Data.ByteString.Char8 as BC
12import Data.Map (Map)
13import qualified Data.Map as M
14import Data.Monoid
15import Data.Torrent.InfoHash
5import Network.URI 16import Network.URI
6 17
7scrapeURL :: URI -> Maybe URI 18data ScrapeInfo = ScrapeInfo {
8scrapeURL = undefined 19 siComplete :: Int -- ^ Number of seeders.
20 , siDownloaded :: Int
21 -- ^ Total number of times the tracker has registered a completion.
22 , siIncomplete :: Int -- ^ Number of leechers
23 , siName :: Maybe ByteString -- ^
24 } deriving (Show, Eq)
25
26type Scrape = Map InfoHash ScrapeInfo
27
28instance BEncodable ScrapeInfo where
29 toBEncode si = fromAssocs
30 [ "complete" --> siComplete si
31 , "downloaded" --> siDownloaded si
32 , "incomplete" --> siIncomplete si
33 , "name" -->? siName si
34 ]
35
36 fromBEncode (BDict d) =
37 ScrapeInfo <$> d >-- "complete"
38 <*> d >-- "downloaded"
39 <*> d >-- "incomplete"
40 <*> d >--? "name"
41 fromBEncode _ = decodingError "ScrapeInfo"
42
43-- TODO: encode info hash
44-- | Trying to convert /announce/ URL to /scrape/ URL. If 'scrapeURL'
45-- gives 'Nothing' then tracker do not support scraping. The info hash
46-- list is used to restrict the tracker's report to that particular
47-- torrents. Note that scrapping of multiple torrents may not be
48-- supported. (Even if scrapping convention is supported)
49--
50scrapeURL :: URI -> [InfoHash] -> Maybe URI
51scrapeURL uri ihs = do
52 newPath <- replace (BC.pack (uriPath uri))
53 let newURI = uri { uriPath = BC.unpack newPath }
54 return (foldl addHashToURI newURI ihs)
55 where
56 replace :: ByteString -> Maybe ByteString
57 replace p
58 | ps <- BC.splitWith (== '/') p
59 , "announce" `B.isPrefixOf` last ps
60 = let newSuff = "scrape" <> B.drop (B.length "announce") (last ps)
61 in Just (B.intercalate "/" (init ps ++ [newSuff]))
62 | otherwise = Nothing