summaryrefslogtreecommitdiff
path: root/src/Network/BitTorrent/Tracker/HTTP.hs
diff options
context:
space:
mode:
Diffstat (limited to 'src/Network/BitTorrent/Tracker/HTTP.hs')
-rw-r--r--src/Network/BitTorrent/Tracker/HTTP.hs32
1 files changed, 21 insertions, 11 deletions
diff --git a/src/Network/BitTorrent/Tracker/HTTP.hs b/src/Network/BitTorrent/Tracker/HTTP.hs
index 55b347ce..2d49436d 100644
--- a/src/Network/BitTorrent/Tracker/HTTP.hs
+++ b/src/Network/BitTorrent/Tracker/HTTP.hs
@@ -17,7 +17,7 @@ module Network.BitTorrent.Tracker.HTTP
17 ( HTTPTracker 17 ( HTTPTracker
18 18
19 -- * Extra 19 -- * Extra
20 , scrapeURL 20-- , scrapeURL
21 ) where 21 ) where
22 22
23import Control.Exception 23import Control.Exception
@@ -25,13 +25,27 @@ import Data.BEncode
25import Data.ByteString as B 25import Data.ByteString as B
26import Data.ByteString.Char8 as BC 26import Data.ByteString.Char8 as BC
27import Data.List as L 27import Data.List as L
28import Data.Map as M
28import Data.Monoid 29import Data.Monoid
29import Data.URLEncoded as URL 30import Data.URLEncoded as URL
30import Network.URI 31import Network.URI
31import Network.HTTP 32import Network.HTTP
32 33
33import Network.BitTorrent.Tracker.Protocol 34import Data.Torrent.InfoHash
35import Network.BitTorrent.Tracker.Message
34 36
37-- | Set of tracker RPCs.
38class Tracker s where
39 connect :: URI -> IO s
40 announce :: s -> AnnounceQuery -> IO AnnounceInfo
41 scrape :: s -> ScrapeQuery -> IO Scrape
42
43-- | More particular version of 'scrape', just for one torrent.
44--
45scrapeOne :: Tracker t => t -> InfoHash -> IO ScrapeInfo
46scrapeOne uri ih = scrape uri [ih] >>= maybe err return . M.lookup ih
47 where
48 err = throwIO $ userError "unable to find info hash in response dict"
35 49
36data HTTPTracker = HTTPTracker 50data HTTPTracker = HTTPTracker
37 { announceURI :: URI 51 { announceURI :: URI
@@ -40,17 +54,12 @@ data HTTPTracker = HTTPTracker
40instance Tracker HTTPTracker where 54instance Tracker HTTPTracker where
41 connect = return . HTTPTracker 55 connect = return . HTTPTracker
42 announce = announceHTTP 56 announce = announceHTTP
43 scrape = scrapeHTTP 57-- scrape = scrapeHTTP
44 58
45{----------------------------------------------------------------------- 59{-----------------------------------------------------------------------
46 Announce 60 Announce
47-----------------------------------------------------------------------} 61-----------------------------------------------------------------------}
48 62
49encodeRequest :: URI -> AnnounceQuery -> URI
50encodeRequest announceURI req = URL.urlEncode req
51 `addToURI` announceURI
52 `addHashToURI` reqInfoHash req
53
54mkGET :: URI -> Request ByteString 63mkGET :: URI -> Request ByteString
55mkGET uri = Request uri GET [] "" 64mkGET uri = Request uri GET [] ""
56 65
@@ -64,14 +73,14 @@ announceHTTP HTTPTracker {..} req = do
64 73
65 rawResp <- simpleHTTP r 74 rawResp <- simpleHTTP r
66 respBody <- getResponseBody rawResp 75 respBody <- getResponseBody rawResp
67 checkResult $ decoded respBody 76 checkResult $ decode respBody
68 where 77 where
69 checkResult (Left err) 78 checkResult (Left err)
70 = ioError $ userError $ err ++ " in tracker response" 79 = ioError $ userError $ err ++ " in tracker response"
71 checkResult (Right (Failure err)) 80 checkResult (Right (Failure err))
72 = ioError $ userError $ show err ++ " in tracker response" 81 = ioError $ userError $ show err ++ " in tracker response"
73 checkResult (Right resp) = return resp 82 checkResult (Right resp) = return resp
74 83{-
75{----------------------------------------------------------------------- 84{-----------------------------------------------------------------------
76 Scrape 85 Scrape
77-----------------------------------------------------------------------} 86-----------------------------------------------------------------------}
@@ -109,8 +118,9 @@ scrapeHTTP HTTPTracker {..} ihs
109 | Just uri <- scrapeURL announceURI ihs = do 118 | Just uri <- scrapeURL announceURI ihs = do
110 rawResp <- simpleHTTP (Request uri GET [] "") 119 rawResp <- simpleHTTP (Request uri GET [] "")
111 respBody <- getResponseBody rawResp 120 respBody <- getResponseBody rawResp
112 case decoded (BC.pack respBody) of 121 case decode (BC.pack respBody) of
113 Left e -> throwIO $ userError $ e ++ " in scrape response" 122 Left e -> throwIO $ userError $ e ++ " in scrape response"
114 Right r -> return r 123 Right r -> return r
115 124
116 | otherwise = throwIO $ userError "Tracker do not support scraping" 125 | otherwise = throwIO $ userError "Tracker do not support scraping"
126-} \ No newline at end of file