summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorSam T <pxqr.sta@gmail.com>2013-07-21 02:13:59 +0400
committerSam T <pxqr.sta@gmail.com>2013-07-21 02:13:59 +0400
commit5d5a7dab5ab0d5d7e35617f8476382a99b38d6db (patch)
treedccb1f72a7f3ccdb4d1896272dcf6bbedebf2254 /src
parentda55acae9bba103ddda4385cb4d8918afcad7be1 (diff)
~ Move HTTP tracker stuff to its own module.
Diffstat (limited to 'src')
-rw-r--r--src/Network/BitTorrent/Tracker.hs68
-rw-r--r--src/Network/BitTorrent/Tracker/HTTP.hs115
-rw-r--r--src/Network/BitTorrent/Tracker/Protocol.hs45
3 files changed, 120 insertions, 108 deletions
diff --git a/src/Network/BitTorrent/Tracker.hs b/src/Network/BitTorrent/Tracker.hs
index 5acaa3cc..75cbdf9b 100644
--- a/src/Network/BitTorrent/Tracker.hs
+++ b/src/Network/BitTorrent/Tracker.hs
@@ -30,11 +30,7 @@ module Network.BitTorrent.Tracker
30 30
31 -- * Re-export 31 -- * Re-export
32 , defaultPorts 32 , defaultPorts
33 33 , ScrapeInfo
34 -- * Scrape
35 , ScrapeInfo(..), Scrape
36 , scrapeURL
37 , scrape, scrapeOne
38 ) where 34 ) where
39 35
40import Control.Applicative 36import Control.Applicative
@@ -61,9 +57,10 @@ import Network.HTTP
61import Network.URI 57import Network.URI
62 58
63import Data.Torrent 59import Data.Torrent
64import Network.BitTorrent.Sessions.Types
65import Network.BitTorrent.Peer 60import Network.BitTorrent.Peer
61import Network.BitTorrent.Sessions.Types
66import Network.BitTorrent.Tracker.Protocol 62import Network.BitTorrent.Tracker.Protocol
63import Network.BitTorrent.Tracker.HTTP
67 64
68{----------------------------------------------------------------------- 65{-----------------------------------------------------------------------
69 Tracker connection 66 Tracker connection
@@ -82,10 +79,10 @@ data TConnection = TConnection {
82 , tconnPort :: PortNumber -- ^ The port number the client is listenning on. 79 , tconnPort :: PortNumber -- ^ The port number the client is listenning on.
83 } deriving Show 80 } deriving Show
84 81
82-- TODO tconnection :: SwarmSession -> TConnection
85tconnection :: Torrent -> PeerId -> PortNumber -> TConnection 83tconnection :: Torrent -> PeerId -> PortNumber -> TConnection
86tconnection t = TConnection (tAnnounce t) (tInfoHash t) 84tconnection t = TConnection (tAnnounce t) (tInfoHash t)
87 85
88
89-- | used to avoid boilerplate; do NOT export me 86-- | used to avoid boilerplate; do NOT export me
90genericReq :: TConnection -> Progress -> AnnounceQuery 87genericReq :: TConnection -> Progress -> AnnounceQuery
91genericReq ses pr = AnnounceQuery { 88genericReq ses pr = AnnounceQuery {
@@ -102,7 +99,6 @@ genericReq ses pr = AnnounceQuery {
102 , reqEvent = Nothing 99 , reqEvent = Nothing
103 } 100 }
104 101
105
106-- | The first request to the tracker that should be created is 102-- | The first request to the tracker that should be created is
107-- 'startedReq'. It includes necessary 'Started' event field. 103-- 'startedReq'. It includes necessary 'Started' event field.
108-- 104--
@@ -251,59 +247,3 @@ withTracker initProgress conn action = bracket start end (action . fst)
251 killThread tid 247 killThread tid
252 pr <- getProgress se 248 pr <- getProgress se
253 leaveTracker (tconnAnnounce conn) (stoppedReq conn pr) 249 leaveTracker (tconnAnnounce conn) (stoppedReq conn pr)
254
255{-----------------------------------------------------------------------
256 Scrape
257-----------------------------------------------------------------------}
258
259-- | Scrape info about a set of torrents.
260type Scrape = Map InfoHash ScrapeInfo
261
262-- | Trying to convert /announce/ URL to /scrape/ URL. If 'scrapeURL'
263-- gives 'Nothing' then tracker do not support scraping. The info hash
264-- list is used to restrict the tracker's report to that particular
265-- torrents. Note that scrapping of multiple torrents may not be
266-- supported. (Even if scrapping convention is supported)
267--
268scrapeURL :: URI -> [InfoHash] -> Maybe URI
269scrapeURL uri ihs = do
270 newPath <- replace (BC.pack (uriPath uri))
271 let newURI = uri { uriPath = BC.unpack newPath }
272 return (foldl addHashToURI newURI ihs)
273 where
274 replace :: ByteString -> Maybe ByteString
275 replace p
276 | ps <- BC.splitWith (== '/') p
277 , "announce" `B.isPrefixOf` last ps
278 = let newSuff = "scrape" <> B.drop (B.length "announce") (last ps)
279 in Just (B.intercalate "/" (init ps ++ [newSuff]))
280 | otherwise = Nothing
281
282
283-- | For each 'InfoHash' of torrents request scrape info from the tracker.
284-- However if the info hash list is 'null', the tracker should list
285-- all available torrents.
286-- Note that the 'URI' should be /announce/ URI, not /scrape/ URI.
287--
288scrape :: URI -- ^ Announce 'URI'.
289 -> [InfoHash] -- ^ Torrents to be scrapped.
290 -> IO (Result Scrape) -- ^ 'ScrapeInfo' for each torrent.
291scrape announce ihs
292 | Just uri<- scrapeURL announce ihs = do
293 rawResp <- simpleHTTP (Request uri GET [] "")
294 respBody <- getResponseBody rawResp
295 return (decoded (BC.pack respBody))
296
297 | otherwise = return (Left "Tracker do not support scraping")
298
299-- | More particular version of 'scrape', just for one torrent.
300--
301scrapeOne :: URI -- ^ Announce 'URI'
302 -> InfoHash -- ^ Hash of the torrent info.
303 -> IO (Result ScrapeInfo) -- ^ 'ScrapeInfo' for the torrent.
304scrapeOne uri ih = extract <$> scrape uri [ih]
305 where
306 extract (Right m)
307 | Just s <- M.lookup ih m = Right s
308 | otherwise = Left "unable to find info hash in response dict"
309 extract (Left e) = Left e
diff --git a/src/Network/BitTorrent/Tracker/HTTP.hs b/src/Network/BitTorrent/Tracker/HTTP.hs
new file mode 100644
index 00000000..38388316
--- /dev/null
+++ b/src/Network/BitTorrent/Tracker/HTTP.hs
@@ -0,0 +1,115 @@
1{-# LANGUAGE OverloadedStrings #-}
2module Network.BitTorrent.Tracker.HTTP
3 ( askTracker, leaveTracker
4 , scrapeURL
5 ) where
6
7import Control.Applicative
8import Control.Monad
9import Data.BEncode
10import Data.ByteString as B
11import Data.ByteString.Char8 as BC
12import Data.List as L
13import Data.Map as M
14import Data.Monoid
15import Data.URLEncoded as URL
16import Network.URI
17import Network.HTTP
18
19import Data.Torrent
20import Network.BitTorrent.Tracker.Protocol
21
22{-----------------------------------------------------------------------
23 Announce
24-----------------------------------------------------------------------}
25
26encodeRequest :: URI -> AnnounceQuery -> URI
27encodeRequest announce req = URL.urlEncode req
28 `addToURI` announce
29 `addHashToURI` reqInfoHash req
30
31mkHTTPRequest :: URI -> Request ByteString
32mkHTTPRequest uri = Request uri GET [] ""
33
34-- TODO rename to something like "announceBlahBlah"
35
36-- | Send request and receive response from the tracker specified in
37-- announce list. This function throws 'IOException' if it couldn't
38-- send request or receive response or decode response.
39--
40askTracker :: URI -> AnnounceQuery -> IO AnnounceInfo
41askTracker announce req = do
42 let r = mkHTTPRequest (encodeRequest announce req)
43
44 rawResp <- simpleHTTP r
45 respBody <- getResponseBody rawResp
46 checkResult $ decoded respBody
47 where
48 checkResult (Left err)
49 = ioError $ userError $ err ++ " in tracker response"
50 checkResult (Right (Failure err))
51 = ioError $ userError $ show err ++ " in tracker response"
52 checkResult (Right resp) = return resp
53
54-- | The same as the 'askTracker' but ignore response. Used in
55-- conjunction with 'Stopped'.
56leaveTracker :: URI -> AnnounceQuery -> IO ()
57leaveTracker announce req = do
58 let r = mkHTTPRequest (encodeRequest announce req)
59 void $ simpleHTTP r >>= getResponseBody
60
61{-----------------------------------------------------------------------
62 Scrape
63-----------------------------------------------------------------------}
64
65-- | Scrape info about a set of torrents.
66type Scrape = Map InfoHash ScrapeInfo
67
68-- | Trying to convert /announce/ URL to /scrape/ URL. If 'scrapeURL'
69-- gives 'Nothing' then tracker do not support scraping. The info hash
70-- list is used to restrict the tracker's report to that particular
71-- torrents. Note that scrapping of multiple torrents may not be
72-- supported. (Even if scrapping convention is supported)
73--
74scrapeURL :: URI -> [InfoHash] -> Maybe URI
75scrapeURL uri ihs = do
76 newPath <- replace (BC.pack (uriPath uri))
77 let newURI = uri { uriPath = BC.unpack newPath }
78 return (L.foldl addHashToURI newURI ihs)
79 where
80 replace :: ByteString -> Maybe ByteString
81 replace p
82 | ps <- BC.splitWith (== '/') p
83 , "announce" `B.isPrefixOf` L.last ps
84 = let newSuff = "scrape" <> B.drop (B.length "announce") (L.last ps)
85 in Just (B.intercalate "/" (L.init ps ++ [newSuff]))
86 | otherwise = Nothing
87
88
89-- | For each 'InfoHash' of torrents request scrape info from the tracker.
90-- However if the info hash list is 'null', the tracker should list
91-- all available torrents.
92-- Note that the 'URI' should be /announce/ URI, not /scrape/ URI.
93--
94scrape :: URI -- ^ Announce 'URI'.
95 -> [InfoHash] -- ^ Torrents to be scrapped.
96 -> IO (Result Scrape) -- ^ 'ScrapeInfo' for each torrent.
97scrape announce ihs
98 | Just uri<- scrapeURL announce ihs = do
99 rawResp <- simpleHTTP (Request uri GET [] "")
100 respBody <- getResponseBody rawResp
101 return (decoded (BC.pack respBody))
102
103 | otherwise = return (Left "Tracker do not support scraping")
104
105-- | More particular version of 'scrape', just for one torrent.
106--
107scrapeOne :: URI -- ^ Announce 'URI'
108 -> InfoHash -- ^ Hash of the torrent info.
109 -> IO (Result ScrapeInfo) -- ^ 'ScrapeInfo' for the torrent.
110scrapeOne uri ih = extract <$> scrape uri [ih]
111 where
112 extract (Right m)
113 | Just s <- M.lookup ih m = Right s
114 | otherwise = Left "unable to find info hash in response dict"
115 extract (Left e) = Left e
diff --git a/src/Network/BitTorrent/Tracker/Protocol.hs b/src/Network/BitTorrent/Tracker/Protocol.hs
index 5ee61185..f5898d36 100644
--- a/src/Network/BitTorrent/Tracker/Protocol.hs
+++ b/src/Network/BitTorrent/Tracker/Protocol.hs
@@ -25,11 +25,8 @@
25{-# LANGUAGE TemplateHaskell #-} 25{-# LANGUAGE TemplateHaskell #-}
26module Network.BitTorrent.Tracker.Protocol 26module Network.BitTorrent.Tracker.Protocol
27 ( Event(..), AnnounceQuery(..), AnnounceInfo(..) 27 ( Event(..), AnnounceQuery(..), AnnounceInfo(..)
28 , defaultNumWant , defaultPorts
28 , ScrapeQuery, ScrapeInfo(..) 29 , ScrapeQuery, ScrapeInfo(..)
29 , askTracker, leaveTracker
30
31 -- * Defaults
32 , defaultPorts, defaultNumWant
33 ) 30 )
34 where 31 where
35 32
@@ -44,7 +41,6 @@ import Data.List as L
44import Data.Word 41import Data.Word
45import Data.Monoid 42import Data.Monoid
46import Data.BEncode 43import Data.BEncode
47import Data.ByteString as B
48import Data.Text (Text) 44import Data.Text (Text)
49import Data.Text.Encoding 45import Data.Text.Encoding
50import Data.Serialize hiding (Result) 46import Data.Serialize hiding (Result)
@@ -53,7 +49,6 @@ import Data.Torrent
53 49
54import Network 50import Network
55import Network.Socket 51import Network.Socket
56import Network.HTTP
57import Network.URI 52import Network.URI
58 53
59import Network.BitTorrent.Peer 54import Network.BitTorrent.Peer
@@ -216,11 +211,6 @@ instance URLEncode AnnounceQuery where
216 ] 211 ]
217 where s :: String -> String; s = id; {-# INLINE s #-} 212 where s :: String -> String; s = id; {-# INLINE s #-}
218 213
219encodeRequest :: URI -> AnnounceQuery -> URI
220encodeRequest announce req = URL.urlEncode req
221 `addToURI` announce
222 `addHashToURI` reqInfoHash req
223
224{----------------------------------------------------------------------- 214{-----------------------------------------------------------------------
225 Binary announce encoding 215 Binary announce encoding
226-----------------------------------------------------------------------} 216-----------------------------------------------------------------------}
@@ -369,36 +359,3 @@ instance Serialize ScrapeInfo where
369 , siIncomplete = fromIntegral leechers 359 , siIncomplete = fromIntegral leechers
370 , siName = Nothing 360 , siName = Nothing
371 } 361 }
372
373{-----------------------------------------------------------------------
374 Tracker
375-----------------------------------------------------------------------}
376
377mkHTTPRequest :: URI -> Request ByteString
378mkHTTPRequest uri = Request uri GET [] ""
379
380-- | Send request and receive response from the tracker specified in
381-- announce list. This function throws 'IOException' if it couldn't
382-- send request or receive response or decode response.
383--
384askTracker :: URI -> AnnounceQuery -> IO AnnounceInfo
385askTracker announce req = do
386 let r = mkHTTPRequest (encodeRequest announce req)
387
388 rawResp <- simpleHTTP r
389 respBody <- getResponseBody rawResp
390 checkResult $ decoded respBody
391 where
392
393 checkResult (Left err)
394 = ioError $ userError $ err ++ " in tracker response"
395 checkResult (Right (Failure err))
396 = ioError $ userError $ show err ++ " in tracker response"
397 checkResult (Right resp) = return resp
398
399-- | The same as the 'askTracker' but ignore response. Used in
400-- conjunction with 'Stopped'.
401leaveTracker :: URI -> AnnounceQuery -> IO ()
402leaveTracker announce req = do
403 let r = mkHTTPRequest (encodeRequest announce req)
404 void $ simpleHTTP r >>= getResponseBody