diff options
Diffstat (limited to 'src/Network/BitTorrent/Tracker/RPC/HTTP.hs')
-rw-r--r-- | src/Network/BitTorrent/Tracker/RPC/HTTP.hs | 90 |
1 files changed, 46 insertions, 44 deletions
diff --git a/src/Network/BitTorrent/Tracker/RPC/HTTP.hs b/src/Network/BitTorrent/Tracker/RPC/HTTP.hs index 0eef2b7e..81208590 100644 --- a/src/Network/BitTorrent/Tracker/RPC/HTTP.hs +++ b/src/Network/BitTorrent/Tracker/RPC/HTTP.hs | |||
@@ -15,6 +15,9 @@ | |||
15 | -- | 15 | -- |
16 | module Network.BitTorrent.Tracker.RPC.HTTP | 16 | module Network.BitTorrent.Tracker.RPC.HTTP |
17 | ( Connection | 17 | ( Connection |
18 | , putConnection | ||
19 | |||
20 | -- * RPC | ||
18 | , connect | 21 | , connect |
19 | , announce | 22 | , announce |
20 | , scrape | 23 | , scrape |
@@ -22,6 +25,7 @@ module Network.BitTorrent.Tracker.RPC.HTTP | |||
22 | 25 | ||
23 | import Control.Applicative | 26 | import Control.Applicative |
24 | import Control.Exception | 27 | import Control.Exception |
28 | import Control.Monad.Trans.Resource | ||
25 | import Data.BEncode as BE | 29 | import Data.BEncode as BE |
26 | import Data.ByteString as B | 30 | import Data.ByteString as B |
27 | import Data.ByteString.Char8 as BC | 31 | import Data.ByteString.Char8 as BC |
@@ -31,6 +35,8 @@ import Data.Map as M | |||
31 | import Data.Monoid | 35 | import Data.Monoid |
32 | import Network.URI | 36 | import Network.URI |
33 | import Network.HTTP.Conduit | 37 | import Network.HTTP.Conduit |
38 | import Network.HTTP.Conduit.Internal | ||
39 | import Network.HTTP.Types.URI | ||
34 | 40 | ||
35 | import Data.Torrent.InfoHash | 41 | import Data.Torrent.InfoHash |
36 | import Network.BitTorrent.Tracker.RPC.Message | 42 | import Network.BitTorrent.Tracker.RPC.Message |
@@ -38,38 +44,43 @@ import Network.BitTorrent.Tracker.RPC.Message | |||
38 | 44 | ||
39 | data Connection = Connection | 45 | data Connection = Connection |
40 | { announceURI :: URI | 46 | { announceURI :: URI |
41 | } deriving Show | 47 | , manager :: Manager |
48 | , connProxy :: Maybe Proxy | ||
49 | } | ||
50 | |||
51 | putConnection :: Connection -> IO () | ||
52 | putConnection = undefined | ||
42 | 53 | ||
43 | connect :: URI -> IO Connection | 54 | connect :: URI -> IO Connection |
44 | connect = return . Connection | 55 | connect = undefined |
56 | |||
57 | setSimpleQuery :: SimpleQuery -> Request m -> Request m | ||
58 | setSimpleQuery q r = r | ||
59 | { queryString = undefined renderSimpleQuery False q | ||
60 | } | ||
61 | |||
62 | trackerHTTP :: BEncode a => SimpleQuery -> Connection -> ResourceT IO a | ||
63 | trackerHTTP q Connection {..} = do | ||
64 | request <- setSimpleQuery q <$> setUri def announceURI | ||
65 | response <- httpLbs request { proxy = connProxy } manager | ||
66 | case BE.decode $ BL.toStrict $ responseBody response of | ||
67 | Left msg -> error "TODO" | ||
68 | Right info -> return info | ||
45 | 69 | ||
46 | -- | Send request and receive response from the tracker specified in | 70 | -- | Send request and receive response from the tracker specified in |
47 | -- announce list. This function throws 'IOException' if it couldn't | 71 | -- announce list. |
48 | -- send request or receive response or decode response. | ||
49 | -- | 72 | -- |
50 | announce :: AnnounceQuery -> Connection -> IO (Result AnnounceInfo) | 73 | announce :: AnnounceQuery -> Connection -> ResourceT IO AnnounceInfo |
51 | announce req = do | 74 | announce q = trackerHTTP (renderAnnounceQuery q) |
52 | let uri = undefined | ||
53 | resp <- BL.toStrict <$> simpleHttp uri | ||
54 | return $ BE.decode resp | ||
55 | |||
56 | scrape :: ScrapeQuery -> Connection -> IO (Result Scrape) | ||
57 | scrape = undefined | ||
58 | 75 | ||
59 | {- | ||
60 | -- | Trying to convert /announce/ URL to /scrape/ URL. If 'scrapeURL' | 76 | -- | Trying to convert /announce/ URL to /scrape/ URL. If 'scrapeURL' |
61 | -- gives 'Nothing' then tracker do not support scraping. The info hash | 77 | -- gives 'Nothing' then tracker do not support scraping. |
62 | -- list is used to restrict the tracker's report to that particular | ||
63 | -- torrents. Note that scrapping of multiple torrents may not be | ||
64 | -- supported. (Even if scrapping convention is supported) | ||
65 | -- | 78 | -- |
66 | scrapeURL :: URI -> [InfoHash] -> Maybe URI | 79 | scrapeURL :: URI -> Maybe URI |
67 | scrapeURL uri ihs = do | 80 | scrapeURL uri = do |
68 | newPath <- replace (BC.pack (uriPath uri)) | 81 | newPath <- replace (BC.pack (uriPath uri)) |
69 | let newURI = uri { uriPath = BC.unpack newPath } | 82 | return uri { uriPath = BC.unpack newPath } |
70 | return (L.foldl addHashToURI newURI ihs) | 83 | where |
71 | where | ||
72 | replace :: ByteString -> Maybe ByteString | ||
73 | replace p | 84 | replace p |
74 | | ps <- BC.splitWith (== '/') p | 85 | | ps <- BC.splitWith (== '/') p |
75 | , "announce" `B.isPrefixOf` L.last ps | 86 | , "announce" `B.isPrefixOf` L.last ps |
@@ -77,30 +88,21 @@ scrapeURL uri ihs = do | |||
77 | in Just (B.intercalate "/" (L.init ps ++ [newSuff])) | 88 | in Just (B.intercalate "/" (L.init ps ++ [newSuff])) |
78 | | otherwise = Nothing | 89 | | otherwise = Nothing |
79 | 90 | ||
80 | |||
81 | -- | For each 'InfoHash' of torrents request scrape info from the tracker. | 91 | -- | For each 'InfoHash' of torrents request scrape info from the tracker. |
82 | -- However if the info hash list is 'null', the tracker should list | 92 | -- However if the info hash list is 'null', the tracker should list |
83 | -- all available torrents. | 93 | -- all available torrents. |
84 | -- Note that the 'URI' should be /announce/ URI, not /scrape/ URI. | ||
85 | -- | 94 | -- |
86 | scrapeHTTP :: HTTPTracker -- ^ Announce 'URI'. | 95 | scrape :: ScrapeQuery -> Connection -> ResourceT IO ScrapeInfo |
87 | -> [InfoHash] -- ^ Torrents to be scrapped. | 96 | scrape q conn @ Connection {..} = do |
88 | -> IO Scrape -- ^ 'ScrapeInfo' for each torrent. | 97 | case scrapeURL announceURI of |
89 | scrapeHTTP HTTPTracker {..} ihs | 98 | Nothing -> error "Tracker do not support scraping" |
90 | | Just uri <- scrapeURL announceURI ihs = do | 99 | Just uri -> trackerHTTP (renderScrapeQuery q) conn { announceURI = uri } |
91 | rawResp <- simpleHTTP (Request uri GET [] "") | ||
92 | respBody <- getResponseBody rawResp | ||
93 | case decode (BC.pack respBody) of | ||
94 | Left e -> throwIO $ userError $ e ++ " in scrape response" | ||
95 | Right r -> return r | ||
96 | |||
97 | | otherwise = throwIO $ userError "Tracker do not support scraping" | ||
98 | 100 | ||
99 | -- | More particular version of 'scrape', just for one torrent. | 101 | -- | More particular version of 'scrape', just for one torrent. |
100 | -- | 102 | -- |
101 | scrapeOne :: Tracker t => t -> InfoHash -> IO ScrapeInfo | 103 | scrapeOne :: InfoHash -> Connection -> ResourceT IO ScrapeEntry |
102 | scrapeOne uri ih = scrape uri [ih] >>= maybe err return . M.lookup ih | 104 | scrapeOne ih uri = do |
103 | where | 105 | xs <- scrape [ih] uri |
104 | err = throwIO $ userError "unable to find info hash in response dict" | 106 | case L.lookup ih xs of |
105 | 107 | Nothing -> error "unable to find info hash in response dict" | |
106 | -} \ No newline at end of file | 108 | Just a -> return a |