summaryrefslogtreecommitdiff
path: root/src/Network/BitTorrent/Tracker/HTTP.hs
diff options
context:
space:
mode:
authorSam T <pxqr.sta@gmail.com>2013-08-28 04:56:52 +0400
committerSam T <pxqr.sta@gmail.com>2013-08-28 04:56:52 +0400
commit03841eb1f2e8e0d38833b8855a55e393fb4d766a (patch)
treee693750cc126f529dcbbd3db8709f508a7f101bf /src/Network/BitTorrent/Tracker/HTTP.hs
parent54b20f9ac462105cd3857b9c8102954a725ef308 (diff)
~ Adapt HTTP Tracker to new interface.
Diffstat (limited to 'src/Network/BitTorrent/Tracker/HTTP.hs')
-rw-r--r--src/Network/BitTorrent/Tracker/HTTP.hs72
1 files changed, 26 insertions, 46 deletions
diff --git a/src/Network/BitTorrent/Tracker/HTTP.hs b/src/Network/BitTorrent/Tracker/HTTP.hs
index 8d3a6412..ce517b34 100644
--- a/src/Network/BitTorrent/Tracker/HTTP.hs
+++ b/src/Network/BitTorrent/Tracker/HTTP.hs
@@ -14,17 +14,17 @@
14-- <https://wiki.theory.org/BitTorrentSpecification#Tracker_HTTP.2FHTTPS_Protocol> 14-- <https://wiki.theory.org/BitTorrentSpecification#Tracker_HTTP.2FHTTPS_Protocol>
15-- 15--
16module Network.BitTorrent.Tracker.HTTP 16module Network.BitTorrent.Tracker.HTTP
17 ( askTracker, leaveTracker 17 ( HTTPTracker
18 , scrapeURL, scrape, scrapeOne 18
19 -- * Extra
20 , scrapeURL
19 ) where 21 ) where
20 22
21import Control.Applicative 23import Control.Exception
22import Control.Monad
23import Data.BEncode 24import Data.BEncode
24import Data.ByteString as B 25import Data.ByteString as B
25import Data.ByteString.Char8 as BC 26import Data.ByteString.Char8 as BC
26import Data.List as L 27import Data.List as L
27import Data.Map as M
28import Data.Monoid 28import Data.Monoid
29import Data.URLEncoded as URL 29import Data.URLEncoded as URL
30import Network.URI 30import Network.URI
@@ -34,37 +34,34 @@ import Data.Torrent.Metainfo hiding (announce)
34import Network.BitTorrent.Tracker.Protocol 34import Network.BitTorrent.Tracker.Protocol
35 35
36 36
37data HTTPTracker = HTTPTracker URI 37data HTTPTracker = HTTPTracker
38 { announceURI :: URI
39 } deriving Show
38 40
39instance Tracker URI where 41instance Tracker HTTPTracker where
40 announce = askTracker 42 connect = return . HTTPTracker
41 scrape_ uri ihs = do 43 announce = announceHTTP
42 e <- scrape uri ihs 44 scrape = scrapeHTTP
43 case e of
44 Left str -> error str
45 Right si -> return si
46 45
47{----------------------------------------------------------------------- 46{-----------------------------------------------------------------------
48 Announce 47 Announce
49-----------------------------------------------------------------------} 48-----------------------------------------------------------------------}
50 49
51encodeRequest :: URI -> AnnounceQuery -> URI 50encodeRequest :: URI -> AnnounceQuery -> URI
52encodeRequest announce req = URL.urlEncode req 51encodeRequest announceURI req = URL.urlEncode req
53 `addToURI` announce 52 `addToURI` announceURI
54 `addHashToURI` reqInfoHash req 53 `addHashToURI` reqInfoHash req
55 54
56mkGET :: URI -> Request ByteString 55mkGET :: URI -> Request ByteString
57mkGET uri = Request uri GET [] "" 56mkGET uri = Request uri GET [] ""
58 57
59-- TODO rename to something like "announceBlahBlah"
60
61-- | Send request and receive response from the tracker specified in 58-- | Send request and receive response from the tracker specified in
62-- announce list. This function throws 'IOException' if it couldn't 59-- announce list. This function throws 'IOException' if it couldn't
63-- send request or receive response or decode response. 60-- send request or receive response or decode response.
64-- 61--
65askTracker :: URI -> AnnounceQuery -> IO AnnounceInfo 62announceHTTP :: HTTPTracker -> AnnounceQuery -> IO AnnounceInfo
66askTracker announce req = do 63announceHTTP HTTPTracker {..} req = do
67 let r = mkGET (encodeRequest announce req) 64 let r = mkGET (encodeRequest announceURI req)
68 65
69 rawResp <- simpleHTTP r 66 rawResp <- simpleHTTP r
70 respBody <- getResponseBody rawResp 67 respBody <- getResponseBody rawResp
@@ -76,13 +73,6 @@ askTracker announce req = do
76 = ioError $ userError $ show err ++ " in tracker response" 73 = ioError $ userError $ show err ++ " in tracker response"
77 checkResult (Right resp) = return resp 74 checkResult (Right resp) = return resp
78 75
79-- | The same as the 'askTracker' but ignore response. Used in
80-- conjunction with 'Stopped'.
81leaveTracker :: URI -> AnnounceQuery -> IO ()
82leaveTracker announce req = do
83 let r = mkGET (encodeRequest announce req)
84 void $ simpleHTTP r >>= getResponseBody
85
86{----------------------------------------------------------------------- 76{-----------------------------------------------------------------------
87 Scrape 77 Scrape
88-----------------------------------------------------------------------} 78-----------------------------------------------------------------------}
@@ -113,25 +103,15 @@ scrapeURL uri ihs = do
113-- all available torrents. 103-- all available torrents.
114-- Note that the 'URI' should be /announce/ URI, not /scrape/ URI. 104-- Note that the 'URI' should be /announce/ URI, not /scrape/ URI.
115-- 105--
116scrape :: URI -- ^ Announce 'URI'. 106scrapeHTTP :: HTTPTracker -- ^ Announce 'URI'.
117 -> [InfoHash] -- ^ Torrents to be scrapped. 107 -> [InfoHash] -- ^ Torrents to be scrapped.
118 -> IO (Result Scrape) -- ^ 'ScrapeInfo' for each torrent. 108 -> IO Scrape -- ^ 'ScrapeInfo' for each torrent.
119scrape announce ihs 109scrapeHTTP HTTPTracker {..} ihs
120 | Just uri<- scrapeURL announce ihs = do 110 | Just uri <- scrapeURL announceURI ihs = do
121 rawResp <- simpleHTTP (Request uri GET [] "") 111 rawResp <- simpleHTTP (Request uri GET [] "")
122 respBody <- getResponseBody rawResp 112 respBody <- getResponseBody rawResp
123 return (decoded (BC.pack respBody)) 113 case decoded (BC.pack respBody) of
114 Left e -> throwIO $ userError $ e ++ " in scrape response"
115 Right r -> return r
124 116
125 | otherwise = return (Left "Tracker do not support scraping") 117 | otherwise = throwIO $ userError "Tracker do not support scraping"
126
127-- | More particular version of 'scrape', just for one torrent.
128--
129scrapeOne :: URI -- ^ Announce 'URI'
130 -> InfoHash -- ^ Hash of the torrent info.
131 -> IO (Result ScrapeInfo) -- ^ 'ScrapeInfo' for the torrent.
132scrapeOne uri ih = extract <$> scrape uri [ih]
133 where
134 extract (Right m)
135 | Just s <- M.lookup ih m = Right s
136 | otherwise = Left "unable to find info hash in response dict"
137 extract (Left e) = Left e