diff options
author | Sam T <pxqr.sta@gmail.com> | 2013-06-11 08:02:09 +0400 |
---|---|---|
committer | Sam T <pxqr.sta@gmail.com> | 2013-06-11 08:02:09 +0400 |
commit | 0e3903fa3d486c57504837fd497a3a348793f7fc (patch) | |
tree | b47f210c347062ca8fbe7c34012bb36107c86e52 /src/Network | |
parent | 0254b200cd4aa5245c37c7a650f8b14567a3b4cf (diff) |
~ Merge Scrape to Tracker.
Diffstat (limited to 'src/Network')
-rw-r--r-- | src/Network/BitTorrent/Internal.hs | 8 | ||||
-rw-r--r-- | src/Network/BitTorrent/Tracker.hs | 115 | ||||
-rw-r--r-- | src/Network/BitTorrent/Tracker/Protocol.hs | 5 | ||||
-rw-r--r-- | src/Network/BitTorrent/Tracker/Scrape.hs | 115 |
4 files changed, 111 insertions, 132 deletions
diff --git a/src/Network/BitTorrent/Internal.hs b/src/Network/BitTorrent/Internal.hs index 39e10ce2..91dc35d5 100644 --- a/src/Network/BitTorrent/Internal.hs +++ b/src/Network/BitTorrent/Internal.hs | |||
@@ -226,12 +226,12 @@ updateOutcoming PeerSession {..} = | |||
226 | updateTimeout (eventManager (clientSession swarmSession)) | 226 | updateTimeout (eventManager (clientSession swarmSession)) |
227 | outcomingTimeout maxOutcomingTime | 227 | outcomingTimeout maxOutcomingTime |
228 | 228 | ||
229 | sendKA :: Socket -> SwarmSession -> IO () | 229 | sendKA :: Socket -> IO () |
230 | sendKA sock SwarmSession {..} = do | 230 | sendKA sock {- SwarmSession {..} -} = do |
231 | print "I'm sending keep alive." | 231 | print "I'm sending keep alive." |
232 | sendAll sock (encode BT.KeepAlive) | 232 | sendAll sock (encode BT.KeepAlive) |
233 | let mgr = eventManager clientSession | 233 | -- let mgr = eventManager clientSession |
234 | updateTimeout mgr | 234 | -- updateTimeout mgr |
235 | print "Done.." | 235 | print "Done.." |
236 | 236 | ||
237 | abortSession :: IO () | 237 | abortSession :: IO () |
diff --git a/src/Network/BitTorrent/Tracker.hs b/src/Network/BitTorrent/Tracker.hs index 04a7b43e..275b5422 100644 --- a/src/Network/BitTorrent/Tracker.hs +++ b/src/Network/BitTorrent/Tracker.hs | |||
@@ -6,16 +6,18 @@ | |||
6 | -- Portability : non-portable | 6 | -- Portability : non-portable |
7 | -- | 7 | -- |
8 | -- This module provides high level API for peer->tracker | 8 | -- This module provides high level API for peer->tracker |
9 | -- communication. | 9 | -- communication. Tracker is used to discover other peers in the |
10 | -- network. | ||
10 | -- | 11 | -- |
12 | -- By convention most trackers support another form of request, | ||
13 | -- which queries the state of a given torrent (or all torrents) that | ||
14 | -- the tracker is managing. This module also provides a way to | ||
15 | -- easily request scrape info for a particular torrent list. | ||
16 | -- | ||
17 | {-# LANGUAGE OverloadedStrings #-} | ||
11 | {-# LANGUAGE RecordWildCards #-} | 18 | {-# LANGUAGE RecordWildCards #-} |
12 | module Network.BitTorrent.Tracker | 19 | module Network.BitTorrent.Tracker |
13 | ( module Network.BitTorrent.Tracker.Scrape | 20 | ( withTracker, completedReq |
14 | |||
15 | , withTracker, completedReq | ||
16 | |||
17 | -- * Progress | ||
18 | , Progress(..), startProgress | ||
19 | 21 | ||
20 | -- * Connection | 22 | -- * Connection |
21 | , TConnection(..), tconnection | 23 | , TConnection(..), tconnection |
@@ -27,6 +29,11 @@ module Network.BitTorrent.Tracker | |||
27 | 29 | ||
28 | -- * Re-export | 30 | -- * Re-export |
29 | , defaultPorts | 31 | , defaultPorts |
32 | |||
33 | -- * Scrape | ||
34 | , ScrapeInfo(..), Scrape | ||
35 | , scrapeURL | ||
36 | , scrape, scrapeOne | ||
30 | ) where | 37 | ) where |
31 | 38 | ||
32 | import Control.Applicative | 39 | import Control.Applicative |
@@ -34,15 +41,23 @@ import Control.Concurrent | |||
34 | import Control.Concurrent.STM | 41 | import Control.Concurrent.STM |
35 | import Control.Exception | 42 | import Control.Exception |
36 | import Control.Monad | 43 | import Control.Monad |
44 | import Data.BEncode | ||
45 | import Data.ByteString (ByteString) | ||
46 | import qualified Data.ByteString as B | ||
47 | import qualified Data.ByteString.Char8 as BC | ||
48 | import Data.Map (Map) | ||
49 | import qualified Data.Map as M | ||
50 | import Data.Monoid | ||
37 | import Data.IORef | 51 | import Data.IORef |
38 | import Data.Torrent | 52 | |
39 | import Network | 53 | import Network |
54 | import Network.HTTP | ||
40 | import Network.URI | 55 | import Network.URI |
41 | 56 | ||
57 | import Data.Torrent | ||
42 | import Network.BitTorrent.Internal | 58 | import Network.BitTorrent.Internal |
43 | import Network.BitTorrent.Peer | 59 | import Network.BitTorrent.Peer |
44 | import Network.BitTorrent.Tracker.Protocol | 60 | import Network.BitTorrent.Tracker.Protocol |
45 | import Network.BitTorrent.Tracker.Scrape | ||
46 | 61 | ||
47 | 62 | ||
48 | -- | 'TConnection' (shorthand for Tracker session) combines tracker request | 63 | -- | 'TConnection' (shorthand for Tracker session) combines tracker request |
@@ -179,3 +194,85 @@ withTracker initProgress conn action = bracket start end (action . fst) | |||
179 | killThread tid | 194 | killThread tid |
180 | pr <- getProgress se | 195 | pr <- getProgress se |
181 | askTracker $ stoppedReq conn pr | 196 | askTracker $ stoppedReq conn pr |
197 | |||
198 | |||
199 | |||
200 | -- | Information about particular torrent. | ||
201 | data ScrapeInfo = ScrapeInfo { | ||
202 | siComplete :: Int | ||
203 | -- ^ Number of seeders - peers with the entire file. | ||
204 | , siDownloaded :: Int | ||
205 | -- ^ Total number of times the tracker has registered a completion. | ||
206 | , siIncomplete :: Int | ||
207 | -- ^ Number of leechers. | ||
208 | , siName :: Maybe ByteString | ||
209 | -- ^ Name of the torrent file, as specified by the "name" | ||
210 | -- file in the info section of the .torrent file. | ||
211 | } deriving (Show, Eq) | ||
212 | |||
213 | -- | Scrape info about a set of torrents. | ||
214 | type Scrape = Map InfoHash ScrapeInfo | ||
215 | |||
216 | instance BEncodable ScrapeInfo where | ||
217 | toBEncode si = fromAssocs | ||
218 | [ "complete" --> siComplete si | ||
219 | , "downloaded" --> siDownloaded si | ||
220 | , "incomplete" --> siIncomplete si | ||
221 | , "name" -->? siName si | ||
222 | ] | ||
223 | |||
224 | fromBEncode (BDict d) = | ||
225 | ScrapeInfo <$> d >-- "complete" | ||
226 | <*> d >-- "downloaded" | ||
227 | <*> d >-- "incomplete" | ||
228 | <*> d >--? "name" | ||
229 | fromBEncode _ = decodingError "ScrapeInfo" | ||
230 | |||
231 | -- | Trying to convert /announce/ URL to /scrape/ URL. If 'scrapeURL' | ||
232 | -- gives 'Nothing' then tracker do not support scraping. The info hash | ||
233 | -- list is used to restrict the tracker's report to that particular | ||
234 | -- torrents. Note that scrapping of multiple torrents may not be | ||
235 | -- supported. (Even if scrapping convention is supported) | ||
236 | -- | ||
237 | scrapeURL :: URI -> [InfoHash] -> Maybe URI | ||
238 | scrapeURL uri ihs = do | ||
239 | newPath <- replace (BC.pack (uriPath uri)) | ||
240 | let newURI = uri { uriPath = BC.unpack newPath } | ||
241 | return (foldl addHashToURI newURI ihs) | ||
242 | where | ||
243 | replace :: ByteString -> Maybe ByteString | ||
244 | replace p | ||
245 | | ps <- BC.splitWith (== '/') p | ||
246 | , "announce" `B.isPrefixOf` last ps | ||
247 | = let newSuff = "scrape" <> B.drop (B.length "announce") (last ps) | ||
248 | in Just (B.intercalate "/" (init ps ++ [newSuff])) | ||
249 | | otherwise = Nothing | ||
250 | |||
251 | |||
252 | -- | For each 'InfoHash' of torrents request scrape info from the tracker. | ||
253 | -- However if the info hash list is 'null', the tracker should list | ||
254 | -- all available torrents. | ||
255 | -- Note that the 'URI' should be /announce/ URI, not /scrape/ URI. | ||
256 | -- | ||
257 | scrape :: URI -- ^ Announce 'URI'. | ||
258 | -> [InfoHash] -- ^ Torrents to be scrapped. | ||
259 | -> IO (Result Scrape) -- ^ 'ScrapeInfo' for each torrent. | ||
260 | scrape announce ihs | ||
261 | | Just uri<- scrapeURL announce ihs = do | ||
262 | rawResp <- simpleHTTP (Request uri GET [] "") | ||
263 | respBody <- getResponseBody rawResp | ||
264 | return (decoded (BC.pack respBody)) | ||
265 | |||
266 | | otherwise = return (Left "Tracker do not support scraping") | ||
267 | |||
268 | -- | More particular version of 'scrape', just for one torrent. | ||
269 | -- | ||
270 | scrapeOne :: URI -- ^ Announce 'URI' | ||
271 | -> InfoHash -- ^ Hash of the torrent info. | ||
272 | -> IO (Result ScrapeInfo) -- ^ 'ScrapeInfo' for the torrent. | ||
273 | scrapeOne uri ih = extract <$> scrape uri [ih] | ||
274 | where | ||
275 | extract (Right m) | ||
276 | | Just s <- M.lookup ih m = Right s | ||
277 | | otherwise = Left "unable to find info hash in response dict" | ||
278 | extract (Left e) = Left e | ||
diff --git a/src/Network/BitTorrent/Tracker/Protocol.hs b/src/Network/BitTorrent/Tracker/Protocol.hs index af48e3e9..c94a2dfc 100644 --- a/src/Network/BitTorrent/Tracker/Protocol.hs +++ b/src/Network/BitTorrent/Tracker/Protocol.hs | |||
@@ -21,9 +21,7 @@ | |||
21 | {-# LANGUAGE OverloadedStrings #-} | 21 | {-# LANGUAGE OverloadedStrings #-} |
22 | -- TODO: add "compact" field to TRequest | 22 | -- TODO: add "compact" field to TRequest |
23 | module Network.BitTorrent.Tracker.Protocol | 23 | module Network.BitTorrent.Tracker.Protocol |
24 | ( module Network.BitTorrent.Tracker.Scrape | 24 | ( Event(..), TRequest(..), TResponse(..) |
25 | |||
26 | , Event(..), TRequest(..), TResponse(..) | ||
27 | , askTracker | 25 | , askTracker |
28 | 26 | ||
29 | -- * Defaults | 27 | -- * Defaults |
@@ -50,7 +48,6 @@ import Network.HTTP | |||
50 | import Network.URI | 48 | import Network.URI |
51 | 49 | ||
52 | import Network.BitTorrent.Peer | 50 | import Network.BitTorrent.Peer |
53 | import Network.BitTorrent.Tracker.Scrape | ||
54 | 51 | ||
55 | 52 | ||
56 | 53 | ||
diff --git a/src/Network/BitTorrent/Tracker/Scrape.hs b/src/Network/BitTorrent/Tracker/Scrape.hs deleted file mode 100644 index 0181cf9f..00000000 --- a/src/Network/BitTorrent/Tracker/Scrape.hs +++ /dev/null | |||
@@ -1,115 +0,0 @@ | |||
1 | -- | | ||
2 | -- Copyright : (c) Sam T. 2013 | ||
3 | -- License : MIT | ||
4 | -- Maintainer : pxqr.sta@gmail.com | ||
5 | -- Stability : experimental | ||
6 | -- Portability : portable | ||
7 | -- | ||
8 | -- By convention most trackers support another form of request, | ||
9 | -- which queries the state of a given torrent (or all torrents) that the | ||
10 | -- tracker is managing. This module provides a way to easily request | ||
11 | -- scrape info for a particular torrent list. | ||
12 | -- | ||
13 | {-# LANGUAGE OverloadedStrings #-} | ||
14 | module Network.BitTorrent.Tracker.Scrape | ||
15 | ( ScrapeInfo(..), Scrape | ||
16 | , scrapeURL | ||
17 | |||
18 | -- * Requests | ||
19 | , scrape | ||
20 | , scrapeOne | ||
21 | ) where | ||
22 | |||
23 | import Control.Applicative | ||
24 | import Data.BEncode | ||
25 | import Data.ByteString (ByteString) | ||
26 | import qualified Data.ByteString as B | ||
27 | import qualified Data.ByteString.Char8 as BC | ||
28 | import Data.Map (Map) | ||
29 | import qualified Data.Map as M | ||
30 | import Data.Monoid | ||
31 | import Network.URI | ||
32 | import Network.HTTP | ||
33 | |||
34 | import Data.Torrent | ||
35 | |||
36 | |||
37 | -- | Information about particular torrent. | ||
38 | data ScrapeInfo = ScrapeInfo { | ||
39 | siComplete :: Int | ||
40 | -- ^ Number of seeders - peers with the entire file. | ||
41 | , siDownloaded :: Int | ||
42 | -- ^ Total number of times the tracker has registered a completion. | ||
43 | , siIncomplete :: Int | ||
44 | -- ^ Number of leechers. | ||
45 | , siName :: Maybe ByteString | ||
46 | -- ^ Name of the torrent file, as specified by the "name" | ||
47 | -- file in the info section of the .torrent file. | ||
48 | } deriving (Show, Eq) | ||
49 | |||
50 | -- | Scrape info about a set of torrents. | ||
51 | type Scrape = Map InfoHash ScrapeInfo | ||
52 | |||
53 | instance BEncodable ScrapeInfo where | ||
54 | toBEncode si = fromAssocs | ||
55 | [ "complete" --> siComplete si | ||
56 | , "downloaded" --> siDownloaded si | ||
57 | , "incomplete" --> siIncomplete si | ||
58 | , "name" -->? siName si | ||
59 | ] | ||
60 | |||
61 | fromBEncode (BDict d) = | ||
62 | ScrapeInfo <$> d >-- "complete" | ||
63 | <*> d >-- "downloaded" | ||
64 | <*> d >-- "incomplete" | ||
65 | <*> d >--? "name" | ||
66 | fromBEncode _ = decodingError "ScrapeInfo" | ||
67 | |||
68 | -- | Trying to convert /announce/ URL to /scrape/ URL. If 'scrapeURL' | ||
69 | -- gives 'Nothing' then tracker do not support scraping. The info hash | ||
70 | -- list is used to restrict the tracker's report to that particular | ||
71 | -- torrents. Note that scrapping of multiple torrents may not be | ||
72 | -- supported. (Even if scrapping convention is supported) | ||
73 | -- | ||
74 | scrapeURL :: URI -> [InfoHash] -> Maybe URI | ||
75 | scrapeURL uri ihs = do | ||
76 | newPath <- replace (BC.pack (uriPath uri)) | ||
77 | let newURI = uri { uriPath = BC.unpack newPath } | ||
78 | return (foldl addHashToURI newURI ihs) | ||
79 | where | ||
80 | replace :: ByteString -> Maybe ByteString | ||
81 | replace p | ||
82 | | ps <- BC.splitWith (== '/') p | ||
83 | , "announce" `B.isPrefixOf` last ps | ||
84 | = let newSuff = "scrape" <> B.drop (B.length "announce") (last ps) | ||
85 | in Just (B.intercalate "/" (init ps ++ [newSuff])) | ||
86 | | otherwise = Nothing | ||
87 | |||
88 | |||
89 | -- | For each 'InfoHash' of torrents request scrape info from the tracker. | ||
90 | -- However if the info hash list is 'null', the tracker should list | ||
91 | -- all available torrents. | ||
92 | -- Note that the 'URI' should be /announce/ URI, not /scrape/ URI. | ||
93 | -- | ||
94 | scrape :: URI -- ^ Announce 'URI'. | ||
95 | -> [InfoHash] -- ^ Torrents to be scrapped. | ||
96 | -> IO (Result Scrape) -- ^ 'ScrapeInfo' for each torrent. | ||
97 | scrape announce ihs | ||
98 | | Just uri<- scrapeURL announce ihs = do | ||
99 | rawResp <- simpleHTTP (Request uri GET [] "") | ||
100 | respBody <- getResponseBody rawResp | ||
101 | return (decoded (BC.pack respBody)) | ||
102 | |||
103 | | otherwise = return (Left "Tracker do not support scraping") | ||
104 | |||
105 | -- | More particular version of 'scrape', just for one torrent. | ||
106 | -- | ||
107 | scrapeOne :: URI -- ^ Announce 'URI' | ||
108 | -> InfoHash -- ^ Hash of the torrent info. | ||
109 | -> IO (Result ScrapeInfo) -- ^ 'ScrapeInfo' for the torrent. | ||
110 | scrapeOne uri ih = extract <$> scrape uri [ih] | ||
111 | where | ||
112 | extract (Right m) | ||
113 | | Just s <- M.lookup ih m = Right s | ||
114 | | otherwise = Left "unable to find info hash in response dict" | ||
115 | extract (Left e) = Left e | ||