summary refs log tree commit diff
diff options
context:
space:
mode:
authorMatt Arnold <matt@thegnuguru.org>2023-06-06 15:15:22 -0400
committerMatt Arnold <matt@thegnuguru.org>2023-06-06 15:15:22 -0400
commit1cd5b2f2e16c5ea2a9392a4c09c467cbf3b8be6a (patch)
tree900bf11d20f6055867e0abaf13f7c1c70322eec5
parent479bd45b11c8a274e2fc00ee3dfbde1f23bc8e5e (diff)
make more unixy HEAD main
-rw-r--r--.gitignore3
-rw-r--r--Dockerfile2
-rw-r--r--fetchers.go205
-rw-r--r--handlers.go223
-rw-r--r--main.go444
-rw-r--r--middleware.go24
6 files changed, 465 insertions, 436 deletions
diff --git a/.gitignore b/.gitignore
index ba8376c..24e3295 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,3 @@
+*.log
 poseidon
-*.bak
\ No newline at end of file
+*.bak
diff --git a/Dockerfile b/Dockerfile
index fdfcabc..7e932dd 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -12,7 +12,9 @@ FROM alpine:latest
 RUN apk add --update --no-cache curl openssl && rm -rf /var/cache/apk/*
 WORKDIR /app
 COPY . ./
+RUN rm /app/access.log
 COPY --from=builder /app/poseidon /app/poseidon
 RUN chmod +x /app/entrypoint.sh
+
 EXPOSE 3000
 ENTRYPOINT /app/entrypoint.sh
diff --git a/fetchers.go b/fetchers.go
new file mode 100644
index 0000000..4ec8bdd
--- /dev/null
+++ b/fetchers.go
@@ -0,0 +1,205 @@
+package main
+
+import (
+	"bytes"
+	"compress/gzip"
+	"errors"
+	"fmt"
+	"io"
+	"log"
+	"net/http"
+	"net/url"
+	"strings"
+	"time"
+
+	"github.com/flosch/pongo2/v6"
+	readability "github.com/go-shiori/go-readability"
+	"piusbird.space/poseidon/nuparser"
+)
+
+func gmiFetch(fetchurl string) (*http.Response, error) {
+	resp := http.Response{
+		Body: io.NopCloser(bytes.NewBufferString("Gemini Contenet")),
+	}
+	tpl, err := pongo2.FromString(Header)
+	if err != nil {
+		return nil, err
+	}
+	rawhtml, err := gmiGet(fetchurl, 0)
+	if err != nil {
+		return nil, err
+	}
+	uu, err := url.Parse(fetchurl)
+	if err != nil {
+		return nil, err
+	}
+	tmpbuf := strings.NewReader(rawhtml)
+	filteredContent, err := RewriteLinks(tmpbuf, homeURL)
+	inbuf := strings.NewReader(filteredContent)
+	if err != nil {
+		log.Println("Failed filter pass on " + fetchurl)
+		inbuf = strings.NewReader(rawhtml)
+	}
+
+	article, err := readability.FromReader(inbuf, uu)
+	if err != nil {
+		return nil, err
+	}
+	out, err := tpl.Execute(pongo2.Context{"article": article, "url": fetchurl})
+	if err != nil {
+		return nil, err
+	}
+	resp.Body = io.NopCloser(strings.NewReader(out))
+	return &resp, nil
+
+}
+
+// FIXME: This code is basically a pile of dung
+// Templates render where they shouldn't, miniweb should be deprecated etc, etc
+// Shpuld also move this into it's own file
+func fetch(fetchurl string, user_agent string, parser_select bool, original *http.Request) (*http.Response, error) {
+
+	tpl, err := pongo2.FromString(Header)
+	if err != nil {
+		return nil, err
+
+	}
+	tr := &http.Transport{
+		MaxIdleConns:       10,
+		IdleConnTimeout:    30 * time.Second,
+		DisableCompression: false,
+	}
+	u, err := url.Parse(original.RequestURI)
+	if err != nil {
+		log.Println(err)
+		return nil, err
+	}
+	newQueryString := u.Query()
+	origQuery, _ := url.ParseQuery(original.URL.RawQuery)
+	if _, ok := origQuery["engine"]; !ok {
+		newQueryString.Set("engine", "1")
+	} else {
+		newQueryString.Del("engine")
+	}
+	u.RawQuery = newQueryString.Encode()
+	lightswitch := u.String()
+
+	client := &http.Client{}
+	client.Transport = tr
+
+	req, err := http.NewRequest("GET", fetchurl, nil)
+	if err != nil {
+		log.Println(err)
+		return nil, err
+	}
+	req.Header.Set("User-Agent", default_agent)
+	if user_agent != "" {
+		req.Header.Set("User-Agent", user_agent)
+	}
+
+	req.Header.Set("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8")
+	req.Header.Set("Accept-Language", "en-US,en;q=0.9")
+	req.Header.Set("Accept-Encoding", "gzip")
+	resp, err := client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer resp.Body.Close()
+
+	var tmp bytes.Buffer
+	if strings.EqualFold(resp.Header.Get("Content-Encoding"), "gzip") {
+		log.Println("Yes we gziped")
+
+		gz, _ := gzip.NewReader(resp.Body)
+
+		contentSize := resp.ContentLength
+
+		if contentSize > maxBodySize {
+			return nil, errors.New("response body to large")
+		}
+
+		decompBuffMax := maxBodySize * 2
+		log.Println("dezipping")
+
+		for {
+			var bytesRead int64 = 0
+			n, err := io.CopyN(&tmp, gz, 4096)
+			if errors.Is(err, io.EOF) {
+				break
+			}
+			bytesRead += n
+			if bytesRead > decompBuffMax {
+				return nil, errors.New("decompression failed")
+			}
+
+		}
+
+		err = resp.Body.Close()
+		if err != nil {
+			return nil, err
+		}
+		resp.Body = io.NopCloser(&tmp)
+
+	}
+	var tmp2 bytes.Buffer
+	_, err = io.Copy(&tmp2, resp.Body)
+	if err != nil {
+		return nil, err
+	}
+
+	publishUrl, err := url.Parse(fetchurl)
+	if err != nil {
+		return resp, err
+	}
+
+	var article GenaricArticle
+
+	if parser_select {
+		raw_article, err := readability.FromReader(&tmp2, publishUrl)
+		if err != nil {
+			return nil, err
+		}
+		article = GenaricArticle{}
+		article.Byline = raw_article.Byline
+		article.Content = raw_article.Content
+		article.Title = raw_article.Title
+		article.Length = raw_article.Length
+		article.Image = raw_article.Image
+		article.Text = raw_article.TextContent
+	} else {
+		raw_article, err := nuparser.FromReader(&tmp2)
+		if err != nil {
+			return nil, err
+		}
+		article = GenaricArticle{}
+		article.Byline = raw_article.Byline
+		article.Content = raw_article.Content
+		article.Title = raw_article.Title
+		article.Length = raw_article.Length
+		article.Image = raw_article.Image
+	}
+
+	tmp_content := strings.NewReader(article.Content)
+	backupContent := strings.Clone(article.Content)
+	filteredContent, err := RewriteLinks(tmp_content, homeURL)
+
+	article.Content = filteredContent
+	if err != nil {
+		log.Println("failed filter pass " + fetchurl)
+		article.Content = backupContent
+	}
+
+	out, err := tpl.Execute(pongo2.Context{"article": article, "url": fetchurl, "switchurl": lightswitch})
+	if err != nil {
+		return nil, err
+	}
+	if strings.HasPrefix(original.Header.Get("User-Agent"), "curl") {
+		prettyBody := fmt.Sprintf("%s By %s\n %s\n ", article.Title, article.Byline, article.Text)
+		resp.Body = io.NopCloser(strings.NewReader(prettyBody))
+		return resp, err
+	}
+	resp.Body = io.NopCloser(strings.NewReader(out))
+
+	return resp, err
+
+}
diff --git a/handlers.go b/handlers.go
new file mode 100644
index 0000000..2c611a3
--- /dev/null
+++ b/handlers.go
@@ -0,0 +1,223 @@
+package main
+
+import (
+	"errors"
+	"io"
+	"log"
+	"net/http"
+	"net/url"
+	"strings"
+
+	"github.com/flosch/pongo2/v6"
+)
+
+var homeURL string = "http://localhost:3000"
+
+func postFormHandler(w http.ResponseWriter, r *http.Request) {
+	if r.Method != http.MethodPost {
+
+		http.Error(w, "Method not allowed "+r.Method, http.StatusInternalServerError)
+		return
+	}
+	err := r.ParseForm()
+	if err != nil {
+		http.Error(w, err.Error(), http.StatusInternalServerError)
+		return
+	}
+	log.Println(r.Form)
+	target_url := r.Form.Get("target_url")
+	rd := r.Form["readability"]
+	log.Println(rd)
+	ua := r.Form.Get("target_ua")
+	if !validUserAgent(ua) {
+		http.Error(w, "Agent not allowed "+ua, http.StatusForbidden)
+	}
+	vb := ArcParser
+	if len(rd) != 0 {
+		vb = ArcParser
+	}
+	ckMstr := OurCookie{ua, vb}
+
+	encoded_ua, err := encodeCookie(ckMstr)
+	if err != nil {
+		log.Println(err)
+		http.Error(w, err.Error(), http.StatusInternalServerError)
+		return
+
+	}
+
+	final := r.URL.Hostname() + "/" + target_url
+	cookie := http.Cookie{
+		Name:     "blueProxyUserAgent",
+		Value:    encoded_ua,
+		Path:     "/",
+		MaxAge:   3600,
+		HttpOnly: true,
+		Secure:   false,
+		SameSite: http.SameSiteLaxMode,
+	}
+	http.SetCookie(w, &cookie)
+	log.Println(final)
+	req, err := http.NewRequest("GET", final, nil)
+	if err != nil {
+		log.Println(err)
+		http.Error(w, err.Error(), http.StatusInternalServerError)
+		return
+
+	}
+	req.Header.Add("X-Target-User-Agent", ua)
+	w.Header().Set("X-Target-User-Agent", ua)
+	http.Redirect(w, req, final, http.StatusFound)
+
+}
+
+func indexHandler(w http.ResponseWriter, r *http.Request) {
+	fakeCookie := http.Cookie{
+		Name:     "blueProxyUserAgent",
+		Path:     "/",
+		MaxAge:   3600,
+		HttpOnly: true,
+		Secure:   false,
+		SameSite: http.SameSiteLaxMode,
+	}
+	if r.Method == http.MethodPost {
+		http.Error(w, "I am not an owl", http.StatusTeapot)
+		return
+	}
+	var tpl = pongo2.Must(pongo2.FromFile("index.html"))
+
+	if r.URL.Path == "/" {
+		err := tpl.ExecuteWriter(pongo2.Context{"useragents": UserAgents, "version": version}, w)
+
+		if err != nil {
+			http.Error(w, err.Error(), http.StatusInternalServerError)
+		}
+		return
+	}
+	queryParams, _ := url.ParseQuery(r.URL.RawQuery)
+	var err error
+	homeURL = "http://" + r.Host
+	log.Println(homeURL)
+	if err != nil {
+		http.Error(w, "Lost my soul", http.StatusInternalServerError)
+		return
+	}
+
+	requesterUserAgent := r.Header.Get("User-Agent")
+
+	urlparts := strings.SplitN(r.URL.Path[1:], "/", 2)
+	if len(urlparts) < 2 {
+		return
+	}
+
+	remurl := urlparts[0] + "//" + urlparts[1]
+	encoded_ua, err := encodeCookie(defaultCookie)
+	fakeCookie.Value = encoded_ua
+	if strings.HasPrefix(requesterUserAgent, "curl") {
+		_, err = validateURL(remurl)
+
+		if err != nil {
+			http.Error(w, err.Error()+" "+remurl, http.StatusTeapot)
+			return
+		}
+		ur, _ := url.Parse(remurl)
+		if ur.Scheme == "gemini" {
+			http.Error(w, "Gemini not supported through curl", http.StatusBadGateway)
+			return
+		}
+		a, err := fetch(remurl, default_agent, bool(ArcParser), r)
+		if err != nil {
+			http.Error(w, err.Error(), http.StatusInternalServerError)
+			return
+		}
+
+		io.Copy(w, a.Body)
+		return
+
+	}
+
+	if err != nil {
+		log.Println(err)
+		http.Error(w, err.Error(), http.StatusInternalServerError)
+		return
+
+	}
+	ur, err := url.Parse(remurl)
+	if err != nil {
+		http.Error(w, err.Error(), http.StatusInternalServerError)
+		return
+	}
+
+	log.Println("Honk!")
+	log.Println(ur.String())
+	if ur.Scheme == "gemini" {
+		remurl += r.URL.RawQuery
+		resp, err := gmiFetch(remurl)
+		if err != nil {
+			http.Error(w, err.Error(), http.StatusBadRequest)
+			return
+		}
+		defer resp.Body.Close()
+		_, err = io.Copy(w, resp.Body)
+		if err != nil {
+			http.Error(w, err.Error(), http.StatusInternalServerError)
+		}
+		return
+	}
+
+	_, err = validateURL(remurl)
+	if err != nil {
+		http.Error(w, err.Error()+" "+remurl, http.StatusTeapot)
+		return
+	}
+	var cookie *http.Cookie
+	cookie, err = r.Cookie("blueProxyUserAgent")
+	if err != nil {
+		switch {
+		case errors.Is(err, http.ErrNoCookie):
+			cookie = &fakeCookie
+			http.SetCookie(w, cookie)
+			http.Redirect(w, r, r.RequestURI, http.StatusSeeOther)
+
+		default:
+			log.Println(err)
+			http.Error(w, "server error", http.StatusInternalServerError)
+		}
+		return
+	}
+	decagent, err := decodeCookie(cookie.Value)
+	if err != nil {
+		http.Error(w, err.Error(), http.StatusInternalServerError)
+		return
+	}
+
+	if err != nil {
+		http.Error(w, err.Error(), http.StatusInternalServerError)
+		return
+	}
+
+	if r.Header.Get("X-Forwarded-For") != "" {
+		log.Printf("%s: %s", r.Header.Get("X-Forwarded-For"), remurl)
+	} else {
+		log.Printf("%v: %s", r.RemoteAddr, remurl)
+	}
+	var parser_select bool
+	if _, ok := queryParams["engine"]; !ok {
+		parser_select = !bool(decagent.Parser)
+	} else {
+		parser_select = bool(decagent.Parser)
+	}
+
+	resp, err := fetch(remurl, decagent.UserAgent, parser_select, r)
+	if err != nil {
+		http.Error(w, err.Error(), http.StatusInternalServerError)
+		return
+	}
+	defer resp.Body.Close()
+	_, err = io.Copy(w, resp.Body)
+	if err != nil {
+		http.Error(w, err.Error(), http.StatusInternalServerError)
+		return
+	}
+
+}
diff --git a/main.go b/main.go
index acdd635..2e3062d 100644
--- a/main.go
+++ b/main.go
@@ -1,455 +1,24 @@
 package main
 
 import (
-	"bytes"
-	"compress/gzip"
-	"encoding/base64"
-	"encoding/json"
-	"errors"
 	"fmt"
-	"io"
 	"log"
 	"net/http"
-	"net/url"
 	"os"
-	"strings"
 	"time"
 
 	"net/http/pprof"
-
-	"github.com/flosch/pongo2/v6"
-	readability "github.com/go-shiori/go-readability"
-
-	"piusbird.space/poseidon/nuparser"
 )
 
-var homeURL string = "http://localhost:3000"
-
-func encodeCookie(c OurCookie) (string, error) {
-	first, err := json.Marshal(c)
-	if err != nil {
-		return "", err
-	}
-	output := base64.URLEncoding.EncodeToString(first)
-	return output, nil
-}
-
-func decodeCookie(cookieValue string) (OurCookie, error) {
-	decodedJson, err := base64.URLEncoding.DecodeString(cookieValue)
-	var oc OurCookie
-	if err != nil {
-		return oc, err
-	}
-	err = json.Unmarshal([]byte(decodedJson), &oc)
-	if err != nil {
-		return oc, err
-	}
-	return oc, nil
-}
-func postFormHandler(w http.ResponseWriter, r *http.Request) {
-	if r.Method != http.MethodPost {
-
-		http.Error(w, "Method not allowed "+r.Method, http.StatusInternalServerError)
-		return
-	}
-	err := r.ParseForm()
-	if err != nil {
-		http.Error(w, err.Error(), http.StatusInternalServerError)
-		return
-	}
-	log.Println(r.Form)
-	target_url := r.Form.Get("target_url")
-	rd := r.Form["readability"]
-	log.Println(rd)
-	ua := r.Form.Get("target_ua")
-	if !validUserAgent(ua) {
-		http.Error(w, "Agent not allowed "+ua, http.StatusForbidden)
-	}
-	vb := ArcParser
-	if len(rd) != 0 {
-		vb = NUParser
-	}
-	ckMstr := OurCookie{ua, vb}
-
-	encoded_ua, err := encodeCookie(ckMstr)
-	if err != nil {
-		log.Println(err)
-		http.Error(w, err.Error(), http.StatusInternalServerError)
-		return
-
-	}
-
-	final := r.URL.Hostname() + "/" + target_url
-	cookie := http.Cookie{
-		Name:     "blueProxyUserAgent",
-		Value:    encoded_ua,
-		Path:     "/",
-		MaxAge:   3600,
-		HttpOnly: true,
-		Secure:   false,
-		SameSite: http.SameSiteLaxMode,
-	}
-	http.SetCookie(w, &cookie)
-	log.Println(final)
-	req, err := http.NewRequest("GET", final, nil)
-	if err != nil {
-		log.Println(err)
-		http.Error(w, err.Error(), http.StatusInternalServerError)
-		return
-
-	}
-	req.Header.Add("X-Target-User-Agent", ua)
-	w.Header().Set("X-Target-User-Agent", ua)
-	http.Redirect(w, req, final, http.StatusFound)
-
-}
-func gmiFetch(fetchurl string) (*http.Response, error) {
-	resp := http.Response{
-		Body: io.NopCloser(bytes.NewBufferString("Gemini Contenet")),
-	}
-	tpl, err := pongo2.FromString(Header)
-	if err != nil {
-		return nil, err
-	}
-	rawhtml, err := gmiGet(fetchurl, 0)
-	if err != nil {
-		return nil, err
-	}
-	uu, err := url.Parse(fetchurl)
-	if err != nil {
-		return nil, err
-	}
-	tmpbuf := strings.NewReader(rawhtml)
-	filteredContent, err := RewriteLinks(tmpbuf, homeURL)
-	inbuf := strings.NewReader(filteredContent)
-	if err != nil {
-		log.Println("Failed filter pass on " + fetchurl)
-		inbuf = strings.NewReader(rawhtml)
-	}
-
-	article, err := readability.FromReader(inbuf, uu)
-	if err != nil {
-		return nil, err
-	}
-	out, err := tpl.Execute(pongo2.Context{"article": article, "url": fetchurl})
-	if err != nil {
-		return nil, err
-	}
-	resp.Body = io.NopCloser(strings.NewReader(out))
-	return &resp, nil
-
-}
-
-// FIXME: This code is basically a pile of dung
-// Templates render where they shouldn't, miniweb should be deprecated etc, etc
-// Shpuld also move this into it's own file
-func fetch(fetchurl string, user_agent string, parser_select bool, original *http.Request) (*http.Response, error) {
-
-	tpl, err := pongo2.FromString(Header)
-	if err != nil {
-		return nil, err
-
-	}
-	tr := &http.Transport{
-		MaxIdleConns:       10,
-		IdleConnTimeout:    30 * time.Second,
-		DisableCompression: false,
-	}
-	u, err := url.Parse(original.RequestURI)
-	if err != nil {
-		log.Println(err)
-		return nil, err
-	}
-	newQueryString := u.Query()
-	origQuery, _ := url.ParseQuery(original.URL.RawQuery)
-	if _, ok := origQuery["engine"]; !ok {
-		newQueryString.Set("engine", "1")
-	} else {
-		newQueryString.Del("engine")
-	}
-	u.RawQuery = newQueryString.Encode()
-	lightswitch := u.String()
-
-	client := &http.Client{}
-	client.Transport = tr
-
-	req, err := http.NewRequest("GET", fetchurl, nil)
-	if err != nil {
-		log.Println(err)
-		return nil, err
-	}
-	req.Header.Set("User-Agent", default_agent)
-	if user_agent != "" {
-		req.Header.Set("User-Agent", user_agent)
-	}
-
-	req.Header.Set("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8")
-	req.Header.Set("Accept-Language", "en-US,en;q=0.9")
-	req.Header.Set("Accept-Encoding", "gzip")
-	resp, err := client.Do(req)
-	if err != nil {
-		return nil, err
-	}
-	defer resp.Body.Close()
-
-	var tmp bytes.Buffer
-	if strings.EqualFold(resp.Header.Get("Content-Encoding"), "gzip") {
-		log.Println("Yes we gziped")
-
-		gz, _ := gzip.NewReader(resp.Body)
-
-		contentSize := resp.ContentLength
-
-		if contentSize > maxBodySize {
-			return nil, errors.New("response body to large")
-		}
-
-		decompBuffMax := maxBodySize * 2
-		log.Println("dezipping")
-
-		for {
-			var bytesRead int64 = 0
-			n, err := io.CopyN(&tmp, gz, 4096)
-			if errors.Is(err, io.EOF) {
-				break
-			}
-			bytesRead += n
-			if bytesRead > decompBuffMax {
-				return nil, errors.New("decompression failed")
-			}
-
-		}
-
-		err = resp.Body.Close()
-		if err != nil {
-			return nil, err
-		}
-		resp.Body = io.NopCloser(&tmp)
-
-	}
-	var tmp2 bytes.Buffer
-	_, err = io.Copy(&tmp2, resp.Body)
-	if err != nil {
-		return nil, err
-	}
-
-	publishUrl, err := url.Parse(fetchurl)
-	if err != nil {
-		return resp, err
-	}
-
-	var article GenaricArticle
-
-	if parser_select {
-		raw_article, err := readability.FromReader(&tmp2, publishUrl)
-		if err != nil {
-			return nil, err
-		}
-		article = GenaricArticle{}
-		article.Byline = raw_article.Byline
-		article.Content = raw_article.Content
-		article.Title = raw_article.Title
-		article.Length = raw_article.Length
-		article.Image = raw_article.Image
-		article.Text = raw_article.TextContent
-	} else {
-		raw_article, err := nuparser.FromReader(&tmp2)
-		if err != nil {
-			return nil, err
-		}
-		article = GenaricArticle{}
-		article.Byline = raw_article.Byline
-		article.Content = raw_article.Content
-		article.Title = raw_article.Title
-		article.Length = raw_article.Length
-		article.Image = raw_article.Image
-	}
-
-	tmp_content := strings.NewReader(article.Content)
-	backupContent := strings.Clone(article.Content)
-	filteredContent, err := RewriteLinks(tmp_content, homeURL)
-
-	article.Content = filteredContent
-	if err != nil {
-		log.Println("failed filter pass " + fetchurl)
-		article.Content = backupContent
-	}
-
-	out, err := tpl.Execute(pongo2.Context{"article": article, "url": fetchurl, "switchurl": lightswitch})
-	if err != nil {
-		return nil, err
-	}
-	if strings.HasPrefix(original.Header.Get("User-Agent"), "curl") {
-		prettyBody := fmt.Sprintf("%s By %s\n %s\n ", article.Title, article.Byline, article.Text)
-		resp.Body = io.NopCloser(strings.NewReader(prettyBody))
-		return resp, err
-	}
-	resp.Body = io.NopCloser(strings.NewReader(out))
-
-	return resp, err
-
-}
-
-func indexHandler(w http.ResponseWriter, r *http.Request) {
-	fakeCookie := http.Cookie{
-		Name:     "blueProxyUserAgent",
-		Path:     "/",
-		MaxAge:   3600,
-		HttpOnly: true,
-		Secure:   false,
-		SameSite: http.SameSiteLaxMode,
-	}
-	if r.Method == http.MethodPost {
-		http.Error(w, "I am not an owl", http.StatusTeapot)
-		return
-	}
-	var tpl = pongo2.Must(pongo2.FromFile("index.html"))
-
-	if r.URL.Path == "/" {
-		err := tpl.ExecuteWriter(pongo2.Context{"useragents": UserAgents, "version": version}, w)
-
-		if err != nil {
-			http.Error(w, err.Error(), http.StatusInternalServerError)
-		}
-		return
-	}
-	queryParams, _ := url.ParseQuery(r.URL.RawQuery)
-	var err error
-	homeURL = "http://" + r.Host
-	log.Println(homeURL)
-	if err != nil {
-		http.Error(w, "Lost my soul", http.StatusInternalServerError)
-		return
-	}
-
-	requesterUserAgent := r.Header.Get("User-Agent")
-
-	urlparts := strings.SplitN(r.URL.Path[1:], "/", 2)
-	if len(urlparts) < 2 {
-		return
-	}
-
-	remurl := urlparts[0] + "//" + urlparts[1]
-	encoded_ua, err := encodeCookie(defaultCookie)
-	fakeCookie.Value = encoded_ua
-	if strings.HasPrefix(requesterUserAgent, "curl") {
-		_, err = validateURL(remurl)
-
-		if err != nil {
-			http.Error(w, err.Error()+" "+remurl, http.StatusTeapot)
-			return
-		}
-		ur, _ := url.Parse(remurl)
-		if ur.Scheme == "gemini" {
-			http.Error(w, "Gemini not supported through curl", http.StatusBadGateway)
-			return
-		}
-		a, err := fetch(remurl, default_agent, bool(ArcParser), r)
-		if err != nil {
-			http.Error(w, err.Error(), http.StatusInternalServerError)
-			return
-		}
-
-		io.Copy(w, a.Body)
-		return
-
-	}
-
-	if err != nil {
-		log.Println(err)
-		http.Error(w, err.Error(), http.StatusInternalServerError)
-		return
-
-	}
-	ur, err := url.Parse(remurl)
-	if err != nil {
-		http.Error(w, err.Error(), http.StatusInternalServerError)
-		return
-	}
-
-	log.Println("Honk!")
-	log.Println(ur.String())
-	if ur.Scheme == "gemini" {
-		remurl += r.URL.RawQuery
-		resp, err := gmiFetch(remurl)
-		if err != nil {
-			http.Error(w, err.Error(), http.StatusBadRequest)
-			return
-		}
-		defer resp.Body.Close()
-		_, err = io.Copy(w, resp.Body)
-		if err != nil {
-			http.Error(w, err.Error(), http.StatusInternalServerError)
-		}
-		return
-	}
-
-	_, err = validateURL(remurl)
-	if err != nil {
-		http.Error(w, err.Error()+" "+remurl, http.StatusTeapot)
-		return
-	}
-	var cookie *http.Cookie
-	cookie, err = r.Cookie("blueProxyUserAgent")
-	if err != nil {
-		switch {
-		case errors.Is(err, http.ErrNoCookie):
-			cookie = &fakeCookie
-			http.SetCookie(w, cookie)
-			http.Redirect(w, r, r.RequestURI, http.StatusSeeOther)
-
-		default:
-			log.Println(err)
-			http.Error(w, "server error", http.StatusInternalServerError)
-		}
-		return
-	}
-	decagent, err := decodeCookie(cookie.Value)
-	if err != nil {
-		http.Error(w, err.Error(), http.StatusInternalServerError)
-		return
-	}
-
-	if err != nil {
-		http.Error(w, err.Error(), http.StatusInternalServerError)
-		return
-	}
-
-	if r.Header.Get("X-Forwarded-For") != "" {
-		log.Printf("%s: %s", r.Header.Get("X-Forwarded-For"), remurl)
-	} else {
-		log.Printf("%v: %s", r.RemoteAddr, remurl)
-	}
-	var parser_select bool
-	if _, ok := queryParams["engine"]; !ok {
-		parser_select = !bool(decagent.Parser)
-	} else {
-		parser_select = bool(decagent.Parser)
-	}
-
-	resp, err := fetch(remurl, decagent.UserAgent, parser_select, r)
-	if err != nil {
-		http.Error(w, err.Error(), http.StatusInternalServerError)
-		return
-	}
-	defer resp.Body.Close()
-	_, err = io.Copy(w, resp.Body)
-	if err != nil {
-		http.Error(w, err.Error(), http.StatusInternalServerError)
-		return
-	}
-
-}
-
-func main() {
+func main_wrap() int {
 	srv := &http.Server{
 		ReadTimeout:  5 * time.Second,
 		WriteTimeout: 10 * time.Second,
 	}
-	logfile, err := os.OpenFile("access.log", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
+	logfile, err := os.OpenFile("access.log", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0600)
 	if err != nil {
 		log.Println("Error opening log")
-		panic(err)
+		return 1
 	}
 	defer logfile.Close()
 
@@ -479,6 +48,11 @@ func main() {
 
 	err = srv.ListenAndServe()
 	if err != nil {
-		panic(err)
+		fmt.Println(err)
+		return 1
 	}
+	return 0
+}
+func main() {
+	os.Exit(main_wrap())
 }
diff --git a/middleware.go b/middleware.go
index 73725de..06cf812 100644
--- a/middleware.go
+++ b/middleware.go
@@ -1,6 +1,8 @@
 package main
 
 import (
+	"encoding/base64"
+	"encoding/json"
 	"net/http"
 	"os"
 	"time"
@@ -28,3 +30,25 @@ func LoggingWrapper(log *os.File, handler http.HandlerFunc) http.HandlerFunc {
 		log.WriteString(r.RemoteAddr + " " + t.Format(time.UnixDate) + " " + r.RequestURI + "\n")
 	}
 }
+
+func encodeCookie(c OurCookie) (string, error) {
+	first, err := json.Marshal(c)
+	if err != nil {
+		return "", err
+	}
+	output := base64.URLEncoding.EncodeToString(first)
+	return output, nil
+}
+
+func decodeCookie(cookieValue string) (OurCookie, error) {
+	decodedJson, err := base64.URLEncoding.DecodeString(cookieValue)
+	var oc OurCookie
+	if err != nil {
+		return oc, err
+	}
+	err = json.Unmarshal([]byte(decodedJson), &oc)
+	if err != nil {
+		return oc, err
+	}
+	return oc, nil
+}