diff options
author | Sam T <pxqr.sta@gmail.com> | 2013-08-28 04:56:52 +0400 |
---|---|---|
committer | Sam T <pxqr.sta@gmail.com> | 2013-08-28 04:56:52 +0400 |
commit | 03841eb1f2e8e0d38833b8855a55e393fb4d766a (patch) | |
tree | e693750cc126f529dcbbd3db8709f508a7f101bf /src/Network/BitTorrent/Tracker/HTTP.hs | |
parent | 54b20f9ac462105cd3857b9c8102954a725ef308 (diff) |
~ Adapt HTTP Tracker to new interface.
Diffstat (limited to 'src/Network/BitTorrent/Tracker/HTTP.hs')
-rw-r--r-- | src/Network/BitTorrent/Tracker/HTTP.hs | 72 |
1 files changed, 26 insertions, 46 deletions
diff --git a/src/Network/BitTorrent/Tracker/HTTP.hs b/src/Network/BitTorrent/Tracker/HTTP.hs index 8d3a6412..ce517b34 100644 --- a/src/Network/BitTorrent/Tracker/HTTP.hs +++ b/src/Network/BitTorrent/Tracker/HTTP.hs | |||
@@ -14,17 +14,17 @@ | |||
14 | -- <https://wiki.theory.org/BitTorrentSpecification#Tracker_HTTP.2FHTTPS_Protocol> | 14 | -- <https://wiki.theory.org/BitTorrentSpecification#Tracker_HTTP.2FHTTPS_Protocol> |
15 | -- | 15 | -- |
16 | module Network.BitTorrent.Tracker.HTTP | 16 | module Network.BitTorrent.Tracker.HTTP |
17 | ( askTracker, leaveTracker | 17 | ( HTTPTracker |
18 | , scrapeURL, scrape, scrapeOne | 18 | |
19 | -- * Extra | ||
20 | , scrapeURL | ||
19 | ) where | 21 | ) where |
20 | 22 | ||
21 | import Control.Applicative | 23 | import Control.Exception |
22 | import Control.Monad | ||
23 | import Data.BEncode | 24 | import Data.BEncode |
24 | import Data.ByteString as B | 25 | import Data.ByteString as B |
25 | import Data.ByteString.Char8 as BC | 26 | import Data.ByteString.Char8 as BC |
26 | import Data.List as L | 27 | import Data.List as L |
27 | import Data.Map as M | ||
28 | import Data.Monoid | 28 | import Data.Monoid |
29 | import Data.URLEncoded as URL | 29 | import Data.URLEncoded as URL |
30 | import Network.URI | 30 | import Network.URI |
@@ -34,37 +34,34 @@ import Data.Torrent.Metainfo hiding (announce) | |||
34 | import Network.BitTorrent.Tracker.Protocol | 34 | import Network.BitTorrent.Tracker.Protocol |
35 | 35 | ||
36 | 36 | ||
37 | data HTTPTracker = HTTPTracker URI | 37 | data HTTPTracker = HTTPTracker |
38 | { announceURI :: URI | ||
39 | } deriving Show | ||
38 | 40 | ||
39 | instance Tracker URI where | 41 | instance Tracker HTTPTracker where |
40 | announce = askTracker | 42 | connect = return . HTTPTracker |
41 | scrape_ uri ihs = do | 43 | announce = announceHTTP |
42 | e <- scrape uri ihs | 44 | scrape = scrapeHTTP |
43 | case e of | ||
44 | Left str -> error str | ||
45 | Right si -> return si | ||
46 | 45 | ||
47 | {----------------------------------------------------------------------- | 46 | {----------------------------------------------------------------------- |
48 | Announce | 47 | Announce |
49 | -----------------------------------------------------------------------} | 48 | -----------------------------------------------------------------------} |
50 | 49 | ||
51 | encodeRequest :: URI -> AnnounceQuery -> URI | 50 | encodeRequest :: URI -> AnnounceQuery -> URI |
52 | encodeRequest announce req = URL.urlEncode req | 51 | encodeRequest announceURI req = URL.urlEncode req |
53 | `addToURI` announce | 52 | `addToURI` announceURI |
54 | `addHashToURI` reqInfoHash req | 53 | `addHashToURI` reqInfoHash req |
55 | 54 | ||
56 | mkGET :: URI -> Request ByteString | 55 | mkGET :: URI -> Request ByteString |
57 | mkGET uri = Request uri GET [] "" | 56 | mkGET uri = Request uri GET [] "" |
58 | 57 | ||
59 | -- TODO rename to something like "announceBlahBlah" | ||
60 | |||
61 | -- | Send request and receive response from the tracker specified in | 58 | -- | Send request and receive response from the tracker specified in |
62 | -- announce list. This function throws 'IOException' if it couldn't | 59 | -- announce list. This function throws 'IOException' if it couldn't |
63 | -- send request or receive response or decode response. | 60 | -- send request or receive response or decode response. |
64 | -- | 61 | -- |
65 | askTracker :: URI -> AnnounceQuery -> IO AnnounceInfo | 62 | announceHTTP :: HTTPTracker -> AnnounceQuery -> IO AnnounceInfo |
66 | askTracker announce req = do | 63 | announceHTTP HTTPTracker {..} req = do |
67 | let r = mkGET (encodeRequest announce req) | 64 | let r = mkGET (encodeRequest announceURI req) |
68 | 65 | ||
69 | rawResp <- simpleHTTP r | 66 | rawResp <- simpleHTTP r |
70 | respBody <- getResponseBody rawResp | 67 | respBody <- getResponseBody rawResp |
@@ -76,13 +73,6 @@ askTracker announce req = do | |||
76 | = ioError $ userError $ show err ++ " in tracker response" | 73 | = ioError $ userError $ show err ++ " in tracker response" |
77 | checkResult (Right resp) = return resp | 74 | checkResult (Right resp) = return resp |
78 | 75 | ||
79 | -- | The same as the 'askTracker' but ignore response. Used in | ||
80 | -- conjunction with 'Stopped'. | ||
81 | leaveTracker :: URI -> AnnounceQuery -> IO () | ||
82 | leaveTracker announce req = do | ||
83 | let r = mkGET (encodeRequest announce req) | ||
84 | void $ simpleHTTP r >>= getResponseBody | ||
85 | |||
86 | {----------------------------------------------------------------------- | 76 | {----------------------------------------------------------------------- |
87 | Scrape | 77 | Scrape |
88 | -----------------------------------------------------------------------} | 78 | -----------------------------------------------------------------------} |
@@ -113,25 +103,15 @@ scrapeURL uri ihs = do | |||
113 | -- all available torrents. | 103 | -- all available torrents. |
114 | -- Note that the 'URI' should be /announce/ URI, not /scrape/ URI. | 104 | -- Note that the 'URI' should be /announce/ URI, not /scrape/ URI. |
115 | -- | 105 | -- |
116 | scrape :: URI -- ^ Announce 'URI'. | 106 | scrapeHTTP :: HTTPTracker -- ^ Announce 'URI'. |
117 | -> [InfoHash] -- ^ Torrents to be scrapped. | 107 | -> [InfoHash] -- ^ Torrents to be scrapped. |
118 | -> IO (Result Scrape) -- ^ 'ScrapeInfo' for each torrent. | 108 | -> IO Scrape -- ^ 'ScrapeInfo' for each torrent. |
119 | scrape announce ihs | 109 | scrapeHTTP HTTPTracker {..} ihs |
120 | | Just uri<- scrapeURL announce ihs = do | 110 | | Just uri <- scrapeURL announceURI ihs = do |
121 | rawResp <- simpleHTTP (Request uri GET [] "") | 111 | rawResp <- simpleHTTP (Request uri GET [] "") |
122 | respBody <- getResponseBody rawResp | 112 | respBody <- getResponseBody rawResp |
123 | return (decoded (BC.pack respBody)) | 113 | case decoded (BC.pack respBody) of |
114 | Left e -> throwIO $ userError $ e ++ " in scrape response" | ||
115 | Right r -> return r | ||
124 | 116 | ||
125 | | otherwise = return (Left "Tracker do not support scraping") | 117 | | otherwise = throwIO $ userError "Tracker do not support scraping" |
126 | |||
127 | -- | More particular version of 'scrape', just for one torrent. | ||
128 | -- | ||
129 | scrapeOne :: URI -- ^ Announce 'URI' | ||
130 | -> InfoHash -- ^ Hash of the torrent info. | ||
131 | -> IO (Result ScrapeInfo) -- ^ 'ScrapeInfo' for the torrent. | ||
132 | scrapeOne uri ih = extract <$> scrape uri [ih] | ||
133 | where | ||
134 | extract (Right m) | ||
135 | | Just s <- M.lookup ih m = Right s | ||
136 | | otherwise = Left "unable to find info hash in response dict" | ||
137 | extract (Left e) = Left e | ||