diff options
Diffstat (limited to 'src')
-rw-r--r-- | src/Data/Torrent/InfoHash.hs | 24 | ||||
-rw-r--r-- | src/Network/Torrent/Tracker.hs | 20 | ||||
-rw-r--r-- | src/Network/Torrent/Tracker/Scrape.hs | 60 |
3 files changed, 84 insertions, 20 deletions
diff --git a/src/Data/Torrent/InfoHash.hs b/src/Data/Torrent/InfoHash.hs index 448e9a5a..b2ca44ee 100644 --- a/src/Data/Torrent/InfoHash.hs +++ b/src/Data/Torrent/InfoHash.hs | |||
@@ -1,5 +1,6 @@ | |||
1 | module Data.Torrent.InfoHash | 1 | module Data.Torrent.InfoHash |
2 | ( InfoHash (getInfoHash) | 2 | ( InfoHash (getInfoHash) |
3 | , addHashToURI | ||
3 | 4 | ||
4 | -- ^ Construction | 5 | -- ^ Construction |
5 | , hash, hashlazy | 6 | , hash, hashlazy |
@@ -10,6 +11,8 @@ module Data.Torrent.InfoHash | |||
10 | 11 | ||
11 | import Control.Applicative | 12 | import Control.Applicative |
12 | import Data.Foldable | 13 | import Data.Foldable |
14 | import Data.List as L | ||
15 | import Data.Char | ||
13 | import Data.ByteString (ByteString) | 16 | import Data.ByteString (ByteString) |
14 | import qualified Data.ByteString as B | 17 | import qualified Data.ByteString as B |
15 | import qualified Data.ByteString.Char8 as BC | 18 | import qualified Data.ByteString.Char8 as BC |
@@ -18,6 +21,9 @@ import qualified Data.ByteString.Builder.Prim as B | |||
18 | import qualified Data.ByteString.Lazy as Lazy | 21 | import qualified Data.ByteString.Lazy as Lazy |
19 | import Data.Serialize | 22 | import Data.Serialize |
20 | import qualified Crypto.Hash.SHA1 as C | 23 | import qualified Crypto.Hash.SHA1 as C |
24 | import Network.URI | ||
25 | import Numeric | ||
26 | |||
21 | 27 | ||
22 | -- | Exactly 20 bytes long SHA1 hash. | 28 | -- | Exactly 20 bytes long SHA1 hash. |
23 | newtype InfoHash = InfoHash { getInfoHash :: ByteString } | 29 | newtype InfoHash = InfoHash { getInfoHash :: ByteString } |
@@ -39,3 +45,21 @@ hashlazy = InfoHash . C.hashlazy | |||
39 | ppHex :: InfoHash -> ByteString | 45 | ppHex :: InfoHash -> ByteString |
40 | ppHex = Lazy.toStrict . B.toLazyByteString . | 46 | ppHex = Lazy.toStrict . B.toLazyByteString . |
41 | foldMap (B.primFixed B.word8HexFixed) . B.unpack . getInfoHash | 47 | foldMap (B.primFixed B.word8HexFixed) . B.unpack . getInfoHash |
48 | |||
49 | addHashToURI :: URI -> InfoHash -> URI | ||
50 | addHashToURI uri s = uri { | ||
51 | uriQuery = uriQuery uri ++ mkPref (uriQuery uri) ++ | ||
52 | "info_hash=" ++ rfc1738Encode (BC.unpack (getInfoHash s)) | ||
53 | } | ||
54 | where | ||
55 | mkPref [] = "?" | ||
56 | mkPref ('?' : _) = "&" | ||
57 | mkPref _ = error "addHashToURI" | ||
58 | |||
59 | rfc1738Encode = L.concatMap (\c -> if unreservedS c then [c] else encodeHex c) | ||
60 | where | ||
61 | unreservedS = (`L.elem` chars) | ||
62 | chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_./" | ||
63 | encodeHex c = '%' : pHex c | ||
64 | pHex c = let p = (showHex . ord $ c) "" | ||
65 | in if L.length p == 1 then '0' : p else p | ||
diff --git a/src/Network/Torrent/Tracker.hs b/src/Network/Torrent/Tracker.hs index 5faa1dd9..ef2413f6 100644 --- a/src/Network/Torrent/Tracker.hs +++ b/src/Network/Torrent/Tracker.hs | |||
@@ -30,7 +30,6 @@ import Network.HTTP | |||
30 | import Network.URI | 30 | import Network.URI |
31 | import Network.Torrent.PeerID | 31 | import Network.Torrent.PeerID |
32 | 32 | ||
33 | import Numeric | ||
34 | 33 | ||
35 | data Peer = Peer { | 34 | data Peer = Peer { |
36 | peerID :: Maybe PeerID | 35 | peerID :: Maybe PeerID |
@@ -150,22 +149,9 @@ instance URLEncode TRequest where | |||
150 | where s :: String -> String; s = id; {-# INLINE s #-} | 149 | where s :: String -> String; s = id; {-# INLINE s #-} |
151 | 150 | ||
152 | encodeRequest :: TRequest -> URI | 151 | encodeRequest :: TRequest -> URI |
153 | encodeRequest req = URL.urlEncode req `addToURI` reqAnnounce req | 152 | encodeRequest req = URL.urlEncode req |
154 | `addHash` BC.unpack (getInfoHash (reqInfoHash req)) | 153 | `addToURI` reqAnnounce req |
155 | where | 154 | `addHashToURI` reqInfoHash req |
156 | addHash :: URI -> String -> URI | ||
157 | addHash uri s = uri { uriQuery = uriQuery uri ++ "&info_hash=" ++ rfc1738Encode s } | ||
158 | |||
159 | rfc1738Encode :: String -> String | ||
160 | rfc1738Encode = L.concatMap (\c -> if unreserved c then [c] else encode c) | ||
161 | where | ||
162 | unreserved = (`L.elem` chars) | ||
163 | chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_./" | ||
164 | encode :: Char -> String | ||
165 | encode c = '%' : pHex c | ||
166 | pHex c = | ||
167 | let p = (showHex . ord $ c) "" | ||
168 | in if L.length p == 1 then '0' : p else p | ||
169 | 155 | ||
170 | 156 | ||
171 | -- | Ports typically reserved for bittorrent. | 157 | -- | Ports typically reserved for bittorrent. |
diff --git a/src/Network/Torrent/Tracker/Scrape.hs b/src/Network/Torrent/Tracker/Scrape.hs index 2d1dec07..6bfb488f 100644 --- a/src/Network/Torrent/Tracker/Scrape.hs +++ b/src/Network/Torrent/Tracker/Scrape.hs | |||
@@ -1,8 +1,62 @@ | |||
1 | {-# LANGUAGE OverloadedStrings #-} | ||
1 | module Network.Torrent.Tracker.Scrape | 2 | module Network.Torrent.Tracker.Scrape |
2 | ( | 3 | ( ScrapeInfo(..), Scrape |
4 | , scrapeURL | ||
3 | ) where | 5 | ) where |
4 | 6 | ||
7 | import Control.Applicative | ||
8 | import Data.BEncode | ||
9 | import Data.ByteString (ByteString) | ||
10 | import qualified Data.ByteString as B | ||
11 | import qualified Data.ByteString.Char8 as BC | ||
12 | import Data.Map (Map) | ||
13 | import qualified Data.Map as M | ||
14 | import Data.Monoid | ||
15 | import Data.Torrent.InfoHash | ||
5 | import Network.URI | 16 | import Network.URI |
6 | 17 | ||
7 | scrapeURL :: URI -> Maybe URI | 18 | data ScrapeInfo = ScrapeInfo { |
8 | scrapeURL = undefined | 19 | siComplete :: Int -- ^ Number of seeders. |
20 | , siDownloaded :: Int | ||
21 | -- ^ Total number of times the tracker has registered a completion. | ||
22 | , siIncomplete :: Int -- ^ Number of leechers | ||
23 | , siName :: Maybe ByteString -- ^ | ||
24 | } deriving (Show, Eq) | ||
25 | |||
26 | type Scrape = Map InfoHash ScrapeInfo | ||
27 | |||
28 | instance BEncodable ScrapeInfo where | ||
29 | toBEncode si = fromAssocs | ||
30 | [ "complete" --> siComplete si | ||
31 | , "downloaded" --> siDownloaded si | ||
32 | , "incomplete" --> siIncomplete si | ||
33 | , "name" -->? siName si | ||
34 | ] | ||
35 | |||
36 | fromBEncode (BDict d) = | ||
37 | ScrapeInfo <$> d >-- "complete" | ||
38 | <*> d >-- "downloaded" | ||
39 | <*> d >-- "incomplete" | ||
40 | <*> d >--? "name" | ||
41 | fromBEncode _ = decodingError "ScrapeInfo" | ||
42 | |||
43 | -- TODO: encode info hash | ||
44 | -- | Trying to convert /announce/ URL to /scrape/ URL. If 'scrapeURL' | ||
45 | -- gives 'Nothing' then tracker do not support scraping. The info hash | ||
46 | -- list is used to restrict the tracker's report to that particular | ||
47 | -- torrents. Note that scrapping of multiple torrents may not be | ||
48 | -- supported. (Even if scrapping convention is supported) | ||
49 | -- | ||
50 | scrapeURL :: URI -> [InfoHash] -> Maybe URI | ||
51 | scrapeURL uri ihs = do | ||
52 | newPath <- replace (BC.pack (uriPath uri)) | ||
53 | let newURI = uri { uriPath = BC.unpack newPath } | ||
54 | return (foldl addHashToURI newURI ihs) | ||
55 | where | ||
56 | replace :: ByteString -> Maybe ByteString | ||
57 | replace p | ||
58 | | ps <- BC.splitWith (== '/') p | ||
59 | , "announce" `B.isPrefixOf` last ps | ||
60 | = let newSuff = "scrape" <> B.drop (B.length "announce") (last ps) | ||
61 | in Just (B.intercalate "/" (init ps ++ [newSuff])) | ||
62 | | otherwise = Nothing | ||