summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorSam T <sta.cs.vsu@gmail.com>2013-04-08 05:16:56 +0400
committerSam T <sta.cs.vsu@gmail.com>2013-04-08 05:16:56 +0400
commite64862237d02ca4c627fa48fe94c33ce2c757e7e (patch)
tree967311584d2c82656d7ef673253b9a1f11e38637 /src
parent0e129341291f20eacc03beefa51376c2dc43301c (diff)
add scrapeURL implementation
Diffstat (limited to 'src')
-rw-r--r--src/Data/Torrent/InfoHash.hs24
-rw-r--r--src/Network/Torrent/Tracker.hs20
-rw-r--r--src/Network/Torrent/Tracker/Scrape.hs60
3 files changed, 84 insertions, 20 deletions
diff --git a/src/Data/Torrent/InfoHash.hs b/src/Data/Torrent/InfoHash.hs
index 448e9a5a..b2ca44ee 100644
--- a/src/Data/Torrent/InfoHash.hs
+++ b/src/Data/Torrent/InfoHash.hs
@@ -1,5 +1,6 @@
1module Data.Torrent.InfoHash 1module Data.Torrent.InfoHash
2 ( InfoHash (getInfoHash) 2 ( InfoHash (getInfoHash)
3 , addHashToURI
3 4
4 -- ^ Construction 5 -- ^ Construction
5 , hash, hashlazy 6 , hash, hashlazy
@@ -10,6 +11,8 @@ module Data.Torrent.InfoHash
10 11
11import Control.Applicative 12import Control.Applicative
12import Data.Foldable 13import Data.Foldable
14import Data.List as L
15import Data.Char
13import Data.ByteString (ByteString) 16import Data.ByteString (ByteString)
14import qualified Data.ByteString as B 17import qualified Data.ByteString as B
15import qualified Data.ByteString.Char8 as BC 18import qualified Data.ByteString.Char8 as BC
@@ -18,6 +21,9 @@ import qualified Data.ByteString.Builder.Prim as B
18import qualified Data.ByteString.Lazy as Lazy 21import qualified Data.ByteString.Lazy as Lazy
19import Data.Serialize 22import Data.Serialize
20import qualified Crypto.Hash.SHA1 as C 23import qualified Crypto.Hash.SHA1 as C
24import Network.URI
25import Numeric
26
21 27
22-- | Exactly 20 bytes long SHA1 hash. 28-- | Exactly 20 bytes long SHA1 hash.
23newtype InfoHash = InfoHash { getInfoHash :: ByteString } 29newtype InfoHash = InfoHash { getInfoHash :: ByteString }
@@ -39,3 +45,21 @@ hashlazy = InfoHash . C.hashlazy
39ppHex :: InfoHash -> ByteString 45ppHex :: InfoHash -> ByteString
40ppHex = Lazy.toStrict . B.toLazyByteString . 46ppHex = Lazy.toStrict . B.toLazyByteString .
41 foldMap (B.primFixed B.word8HexFixed) . B.unpack . getInfoHash 47 foldMap (B.primFixed B.word8HexFixed) . B.unpack . getInfoHash
48
49addHashToURI :: URI -> InfoHash -> URI
50addHashToURI uri s = uri {
51 uriQuery = uriQuery uri ++ mkPref (uriQuery uri) ++
52 "info_hash=" ++ rfc1738Encode (BC.unpack (getInfoHash s))
53 }
54 where
55 mkPref [] = "?"
56 mkPref ('?' : _) = "&"
57 mkPref _ = error "addHashToURI"
58
59 rfc1738Encode = L.concatMap (\c -> if unreservedS c then [c] else encodeHex c)
60 where
61 unreservedS = (`L.elem` chars)
62 chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_./"
63 encodeHex c = '%' : pHex c
64 pHex c = let p = (showHex . ord $ c) ""
65 in if L.length p == 1 then '0' : p else p
diff --git a/src/Network/Torrent/Tracker.hs b/src/Network/Torrent/Tracker.hs
index 5faa1dd9..ef2413f6 100644
--- a/src/Network/Torrent/Tracker.hs
+++ b/src/Network/Torrent/Tracker.hs
@@ -30,7 +30,6 @@ import Network.HTTP
30import Network.URI 30import Network.URI
31import Network.Torrent.PeerID 31import Network.Torrent.PeerID
32 32
33import Numeric
34 33
35data Peer = Peer { 34data Peer = Peer {
36 peerID :: Maybe PeerID 35 peerID :: Maybe PeerID
@@ -150,22 +149,9 @@ instance URLEncode TRequest where
150 where s :: String -> String; s = id; {-# INLINE s #-} 149 where s :: String -> String; s = id; {-# INLINE s #-}
151 150
152encodeRequest :: TRequest -> URI 151encodeRequest :: TRequest -> URI
153encodeRequest req = URL.urlEncode req `addToURI` reqAnnounce req 152encodeRequest req = URL.urlEncode req
154 `addHash` BC.unpack (getInfoHash (reqInfoHash req)) 153 `addToURI` reqAnnounce req
155 where 154 `addHashToURI` reqInfoHash req
156 addHash :: URI -> String -> URI
157 addHash uri s = uri { uriQuery = uriQuery uri ++ "&info_hash=" ++ rfc1738Encode s }
158
159 rfc1738Encode :: String -> String
160 rfc1738Encode = L.concatMap (\c -> if unreserved c then [c] else encode c)
161 where
162 unreserved = (`L.elem` chars)
163 chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_./"
164 encode :: Char -> String
165 encode c = '%' : pHex c
166 pHex c =
167 let p = (showHex . ord $ c) ""
168 in if L.length p == 1 then '0' : p else p
169 155
170 156
171-- | Ports typically reserved for bittorrent. 157-- | Ports typically reserved for bittorrent.
diff --git a/src/Network/Torrent/Tracker/Scrape.hs b/src/Network/Torrent/Tracker/Scrape.hs
index 2d1dec07..6bfb488f 100644
--- a/src/Network/Torrent/Tracker/Scrape.hs
+++ b/src/Network/Torrent/Tracker/Scrape.hs
@@ -1,8 +1,62 @@
1{-# LANGUAGE OverloadedStrings #-}
1module Network.Torrent.Tracker.Scrape 2module Network.Torrent.Tracker.Scrape
2 ( 3 ( ScrapeInfo(..), Scrape
4 , scrapeURL
3 ) where 5 ) where
4 6
7import Control.Applicative
8import Data.BEncode
9import Data.ByteString (ByteString)
10import qualified Data.ByteString as B
11import qualified Data.ByteString.Char8 as BC
12import Data.Map (Map)
13import qualified Data.Map as M
14import Data.Monoid
15import Data.Torrent.InfoHash
5import Network.URI 16import Network.URI
6 17
7scrapeURL :: URI -> Maybe URI 18data ScrapeInfo = ScrapeInfo {
8scrapeURL = undefined 19 siComplete :: Int -- ^ Number of seeders.
20 , siDownloaded :: Int
21 -- ^ Total number of times the tracker has registered a completion.
22 , siIncomplete :: Int -- ^ Number of leechers
23 , siName :: Maybe ByteString -- ^
24 } deriving (Show, Eq)
25
26type Scrape = Map InfoHash ScrapeInfo
27
28instance BEncodable ScrapeInfo where
29 toBEncode si = fromAssocs
30 [ "complete" --> siComplete si
31 , "downloaded" --> siDownloaded si
32 , "incomplete" --> siIncomplete si
33 , "name" -->? siName si
34 ]
35
36 fromBEncode (BDict d) =
37 ScrapeInfo <$> d >-- "complete"
38 <*> d >-- "downloaded"
39 <*> d >-- "incomplete"
40 <*> d >--? "name"
41 fromBEncode _ = decodingError "ScrapeInfo"
42
43-- TODO: encode info hash
44-- | Trying to convert /announce/ URL to /scrape/ URL. If 'scrapeURL'
45-- gives 'Nothing' then tracker do not support scraping. The info hash
46-- list is used to restrict the tracker's report to that particular
47-- torrents. Note that scrapping of multiple torrents may not be
48-- supported. (Even if scrapping convention is supported)
49--
50scrapeURL :: URI -> [InfoHash] -> Maybe URI
51scrapeURL uri ihs = do
52 newPath <- replace (BC.pack (uriPath uri))
53 let newURI = uri { uriPath = BC.unpack newPath }
54 return (foldl addHashToURI newURI ihs)
55 where
56 replace :: ByteString -> Maybe ByteString
57 replace p
58 | ps <- BC.splitWith (== '/') p
59 , "announce" `B.isPrefixOf` last ps
60 = let newSuff = "scrape" <> B.drop (B.length "announce") (last ps)
61 in Just (B.intercalate "/" (init ps ++ [newSuff]))
62 | otherwise = Nothing