summaryrefslogtreecommitdiff
path: root/src/Network/BitTorrent/Tracker/RPC/HTTP.hs
diff options
context:
space:
mode:
Diffstat (limited to 'src/Network/BitTorrent/Tracker/RPC/HTTP.hs')
-rw-r--r--src/Network/BitTorrent/Tracker/RPC/HTTP.hs90
1 files changed, 46 insertions, 44 deletions
diff --git a/src/Network/BitTorrent/Tracker/RPC/HTTP.hs b/src/Network/BitTorrent/Tracker/RPC/HTTP.hs
index 0eef2b7e..81208590 100644
--- a/src/Network/BitTorrent/Tracker/RPC/HTTP.hs
+++ b/src/Network/BitTorrent/Tracker/RPC/HTTP.hs
@@ -15,6 +15,9 @@
15-- 15--
16module Network.BitTorrent.Tracker.RPC.HTTP 16module Network.BitTorrent.Tracker.RPC.HTTP
17 ( Connection 17 ( Connection
18 , putConnection
19
20 -- * RPC
18 , connect 21 , connect
19 , announce 22 , announce
20 , scrape 23 , scrape
@@ -22,6 +25,7 @@ module Network.BitTorrent.Tracker.RPC.HTTP
22 25
23import Control.Applicative 26import Control.Applicative
24import Control.Exception 27import Control.Exception
28import Control.Monad.Trans.Resource
25import Data.BEncode as BE 29import Data.BEncode as BE
26import Data.ByteString as B 30import Data.ByteString as B
27import Data.ByteString.Char8 as BC 31import Data.ByteString.Char8 as BC
@@ -31,6 +35,8 @@ import Data.Map as M
31import Data.Monoid 35import Data.Monoid
32import Network.URI 36import Network.URI
33import Network.HTTP.Conduit 37import Network.HTTP.Conduit
38import Network.HTTP.Conduit.Internal
39import Network.HTTP.Types.URI
34 40
35import Data.Torrent.InfoHash 41import Data.Torrent.InfoHash
36import Network.BitTorrent.Tracker.RPC.Message 42import Network.BitTorrent.Tracker.RPC.Message
@@ -38,38 +44,43 @@ import Network.BitTorrent.Tracker.RPC.Message
38 44
39data Connection = Connection 45data Connection = Connection
40 { announceURI :: URI 46 { announceURI :: URI
41 } deriving Show 47 , manager :: Manager
48 , connProxy :: Maybe Proxy
49 }
50
51putConnection :: Connection -> IO ()
52putConnection = undefined
42 53
43connect :: URI -> IO Connection 54connect :: URI -> IO Connection
44connect = return . Connection 55connect = undefined
56
57setSimpleQuery :: SimpleQuery -> Request m -> Request m
58setSimpleQuery q r = r
59 { queryString = undefined renderSimpleQuery False q
60 }
61
62trackerHTTP :: BEncode a => SimpleQuery -> Connection -> ResourceT IO a
63trackerHTTP q Connection {..} = do
64 request <- setSimpleQuery q <$> setUri def announceURI
65 response <- httpLbs request { proxy = connProxy } manager
66 case BE.decode $ BL.toStrict $ responseBody response of
67 Left msg -> error "TODO"
68 Right info -> return info
45 69
46-- | Send request and receive response from the tracker specified in 70-- | Send request and receive response from the tracker specified in
47-- announce list. This function throws 'IOException' if it couldn't 71-- announce list.
48-- send request or receive response or decode response.
49-- 72--
50announce :: AnnounceQuery -> Connection -> IO (Result AnnounceInfo) 73announce :: AnnounceQuery -> Connection -> ResourceT IO AnnounceInfo
51announce req = do 74announce q = trackerHTTP (renderAnnounceQuery q)
52 let uri = undefined
53 resp <- BL.toStrict <$> simpleHttp uri
54 return $ BE.decode resp
55
56scrape :: ScrapeQuery -> Connection -> IO (Result Scrape)
57scrape = undefined
58 75
59{-
60-- | Trying to convert /announce/ URL to /scrape/ URL. If 'scrapeURL' 76-- | Trying to convert /announce/ URL to /scrape/ URL. If 'scrapeURL'
61-- gives 'Nothing' then tracker do not support scraping. The info hash 77-- gives 'Nothing' then tracker do not support scraping.
62-- list is used to restrict the tracker's report to that particular
63-- torrents. Note that scrapping of multiple torrents may not be
64-- supported. (Even if scrapping convention is supported)
65-- 78--
66scrapeURL :: URI -> [InfoHash] -> Maybe URI 79scrapeURL :: URI -> Maybe URI
67scrapeURL uri ihs = do 80scrapeURL uri = do
68 newPath <- replace (BC.pack (uriPath uri)) 81 newPath <- replace (BC.pack (uriPath uri))
69 let newURI = uri { uriPath = BC.unpack newPath } 82 return uri { uriPath = BC.unpack newPath }
70 return (L.foldl addHashToURI newURI ihs) 83 where
71 where
72 replace :: ByteString -> Maybe ByteString
73 replace p 84 replace p
74 | ps <- BC.splitWith (== '/') p 85 | ps <- BC.splitWith (== '/') p
75 , "announce" `B.isPrefixOf` L.last ps 86 , "announce" `B.isPrefixOf` L.last ps
@@ -77,30 +88,21 @@ scrapeURL uri ihs = do
77 in Just (B.intercalate "/" (L.init ps ++ [newSuff])) 88 in Just (B.intercalate "/" (L.init ps ++ [newSuff]))
78 | otherwise = Nothing 89 | otherwise = Nothing
79 90
80
81-- | For each 'InfoHash' of torrents request scrape info from the tracker. 91-- | For each 'InfoHash' of torrents request scrape info from the tracker.
82-- However if the info hash list is 'null', the tracker should list 92-- However if the info hash list is 'null', the tracker should list
83-- all available torrents. 93-- all available torrents.
84-- Note that the 'URI' should be /announce/ URI, not /scrape/ URI.
85-- 94--
86scrapeHTTP :: HTTPTracker -- ^ Announce 'URI'. 95scrape :: ScrapeQuery -> Connection -> ResourceT IO ScrapeInfo
87 -> [InfoHash] -- ^ Torrents to be scrapped. 96scrape q conn @ Connection {..} = do
88 -> IO Scrape -- ^ 'ScrapeInfo' for each torrent. 97 case scrapeURL announceURI of
89scrapeHTTP HTTPTracker {..} ihs 98 Nothing -> error "Tracker do not support scraping"
90 | Just uri <- scrapeURL announceURI ihs = do 99 Just uri -> trackerHTTP (renderScrapeQuery q) conn { announceURI = uri }
91 rawResp <- simpleHTTP (Request uri GET [] "")
92 respBody <- getResponseBody rawResp
93 case decode (BC.pack respBody) of
94 Left e -> throwIO $ userError $ e ++ " in scrape response"
95 Right r -> return r
96
97 | otherwise = throwIO $ userError "Tracker do not support scraping"
98 100
99-- | More particular version of 'scrape', just for one torrent. 101-- | More particular version of 'scrape', just for one torrent.
100-- 102--
101scrapeOne :: Tracker t => t -> InfoHash -> IO ScrapeInfo 103scrapeOne :: InfoHash -> Connection -> ResourceT IO ScrapeEntry
102scrapeOne uri ih = scrape uri [ih] >>= maybe err return . M.lookup ih 104scrapeOne ih uri = do
103 where 105 xs <- scrape [ih] uri
104 err = throwIO $ userError "unable to find info hash in response dict" 106 case L.lookup ih xs of
105 107 Nothing -> error "unable to find info hash in response dict"
106-} \ No newline at end of file 108 Just a -> return a