diff options
Diffstat (limited to 'src/Network/BitTorrent/Tracker/RPC/HTTP.hs')
-rw-r--r-- | src/Network/BitTorrent/Tracker/RPC/HTTP.hs | 106 |
1 files changed, 106 insertions, 0 deletions
diff --git a/src/Network/BitTorrent/Tracker/RPC/HTTP.hs b/src/Network/BitTorrent/Tracker/RPC/HTTP.hs new file mode 100644 index 00000000..0eef2b7e --- /dev/null +++ b/src/Network/BitTorrent/Tracker/RPC/HTTP.hs | |||
@@ -0,0 +1,106 @@ | |||
1 | -- | | ||
2 | -- Copyright : (c) Sam Truzjan 2013 | ||
3 | -- License : BSD | ||
4 | -- Maintainer : pxqr.sta@gmail.com | ||
5 | -- Stability : experimental | ||
6 | -- Portability : non-portable | ||
7 | -- | ||
8 | -- The tracker is an HTTP/HTTPS service used to discovery peers for | ||
9 | -- a particular existing torrent and keep statistics about the | ||
10 | -- swarm. This module also provides a way to easily request scrape | ||
11 | -- info for a particular torrent list. | ||
12 | -- | ||
13 | -- For more information see: | ||
14 | -- <https://wiki.theory.org/BitTorrentSpecification#Tracker_HTTP.2FHTTPS_Protocol> | ||
15 | -- | ||
16 | module Network.BitTorrent.Tracker.RPC.HTTP | ||
17 | ( Connection | ||
18 | , connect | ||
19 | , announce | ||
20 | , scrape | ||
21 | ) where | ||
22 | |||
23 | import Control.Applicative | ||
24 | import Control.Exception | ||
25 | import Data.BEncode as BE | ||
26 | import Data.ByteString as B | ||
27 | import Data.ByteString.Char8 as BC | ||
28 | import Data.ByteString.Lazy as BL | ||
29 | import Data.List as L | ||
30 | import Data.Map as M | ||
31 | import Data.Monoid | ||
32 | import Network.URI | ||
33 | import Network.HTTP.Conduit | ||
34 | |||
35 | import Data.Torrent.InfoHash | ||
36 | import Network.BitTorrent.Tracker.RPC.Message | ||
37 | |||
38 | |||
39 | data Connection = Connection | ||
40 | { announceURI :: URI | ||
41 | } deriving Show | ||
42 | |||
43 | connect :: URI -> IO Connection | ||
44 | connect = return . Connection | ||
45 | |||
46 | -- | Send request and receive response from the tracker specified in | ||
47 | -- announce list. This function throws 'IOException' if it couldn't | ||
48 | -- send request or receive response or decode response. | ||
49 | -- | ||
50 | announce :: AnnounceQuery -> Connection -> IO (Result AnnounceInfo) | ||
51 | announce req = do | ||
52 | let uri = undefined | ||
53 | resp <- BL.toStrict <$> simpleHttp uri | ||
54 | return $ BE.decode resp | ||
55 | |||
56 | scrape :: ScrapeQuery -> Connection -> IO (Result Scrape) | ||
57 | scrape = undefined | ||
58 | |||
59 | {- | ||
60 | -- | Trying to convert /announce/ URL to /scrape/ URL. If 'scrapeURL' | ||
61 | -- gives 'Nothing' then tracker do not support scraping. The info hash | ||
62 | -- list is used to restrict the tracker's report to that particular | ||
63 | -- torrents. Note that scrapping of multiple torrents may not be | ||
64 | -- supported. (Even if scrapping convention is supported) | ||
65 | -- | ||
66 | scrapeURL :: URI -> [InfoHash] -> Maybe URI | ||
67 | scrapeURL uri ihs = do | ||
68 | newPath <- replace (BC.pack (uriPath uri)) | ||
69 | let newURI = uri { uriPath = BC.unpack newPath } | ||
70 | return (L.foldl addHashToURI newURI ihs) | ||
71 | where | ||
72 | replace :: ByteString -> Maybe ByteString | ||
73 | replace p | ||
74 | | ps <- BC.splitWith (== '/') p | ||
75 | , "announce" `B.isPrefixOf` L.last ps | ||
76 | = let newSuff = "scrape" <> B.drop (B.length "announce") (L.last ps) | ||
77 | in Just (B.intercalate "/" (L.init ps ++ [newSuff])) | ||
78 | | otherwise = Nothing | ||
79 | |||
80 | |||
81 | -- | For each 'InfoHash' of torrents request scrape info from the tracker. | ||
82 | -- However if the info hash list is 'null', the tracker should list | ||
83 | -- all available torrents. | ||
84 | -- Note that the 'URI' should be /announce/ URI, not /scrape/ URI. | ||
85 | -- | ||
86 | scrapeHTTP :: HTTPTracker -- ^ Announce 'URI'. | ||
87 | -> [InfoHash] -- ^ Torrents to be scrapped. | ||
88 | -> IO Scrape -- ^ 'ScrapeInfo' for each torrent. | ||
89 | scrapeHTTP HTTPTracker {..} ihs | ||
90 | | Just uri <- scrapeURL announceURI ihs = do | ||
91 | rawResp <- simpleHTTP (Request uri GET [] "") | ||
92 | respBody <- getResponseBody rawResp | ||
93 | case decode (BC.pack respBody) of | ||
94 | Left e -> throwIO $ userError $ e ++ " in scrape response" | ||
95 | Right r -> return r | ||
96 | |||
97 | | otherwise = throwIO $ userError "Tracker do not support scraping" | ||
98 | |||
99 | -- | More particular version of 'scrape', just for one torrent. | ||
100 | -- | ||
101 | scrapeOne :: Tracker t => t -> InfoHash -> IO ScrapeInfo | ||
102 | scrapeOne uri ih = scrape uri [ih] >>= maybe err return . M.lookup ih | ||
103 | where | ||
104 | err = throwIO $ userError "unable to find info hash in response dict" | ||
105 | |||
106 | -} \ No newline at end of file | ||