aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPetter Rodhelind <petter.rodhelind@gmail.com>2021-02-21 22:55:58 +0100
committerPetter Rodhelind <petter.rodhelind@gmail.com>2021-02-21 22:55:58 +0100
commitbb1e6b3f9fa33fe018291ccde6fbfd7a48b459cc (patch)
treefd01effd432925ff0ca7af3859add961c97d3fa3
parent7a56cec6b6122d1e7b80e4283f259346c7eb3547 (diff)
downloadfbfeed-bb1e6b3f9fa33fe018291ccde6fbfd7a48b459cc.tar.gz
fbfeed-bb1e6b3f9fa33fe018291ccde6fbfd7a48b459cc.tar.bz2
fbfeed-bb1e6b3f9fa33fe018291ccde6fbfd7a48b459cc.zip
Implement concurrently safe caching of requests.
-rw-r--r--main.go74
1 files changed, 55 insertions, 19 deletions
diff --git a/main.go b/main.go
index fc243fa..87d40fa 100644
--- a/main.go
+++ b/main.go
@@ -2,17 +2,40 @@ package main
import (
"fmt"
+ "log"
"net/http"
"strings"
+ "sync"
"time"
)
-var outputMode string
+var (
+ // outputMode is either HTML or RSS
+ outputMode string
+
+ // cache will hold all previous network calls and re-use within set timeframe
+ cache safeCache
+
+ // cacheTimeout sets the time in in seconds for how long a channel will be cached
+ cacheTimeout float64
+)
+
+func init() {
+ outputMode = "html"
+ cache = safeCache{v: make(map[string]*channel)}
+ cacheTimeout = 60 * 15 // seconds
+}
+
+type safeCache struct {
+ sync.Mutex
+ v map[string]*channel
+}
type channel struct {
Title string
Link string
Name string
+ Time time.Time
Description string
Items []*post
}
@@ -65,7 +88,7 @@ func (p *post) String() string {
var s string
// time format: Mon Jan 2 15:04:05 -0700 MST 2006
- s = strings.Replace(template, "{{time}}", p.Time.Format("Mon, 2 Jan 2006 15:04:05 MST"), 2)
+ s = strings.Replace(template, "{{time}}", p.Time.Format("Mon, 2 Jan 2006 15:04:05"), 2)
s = strings.Replace(s, "{{content}}", p.Content, 1)
var imgs string
for i := range p.Images {
@@ -79,34 +102,40 @@ func (p *post) String() string {
type handler struct{}
func (h handler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
+ if r.URL.Path == "/favicon.ico" {
+ return
+ }
group := r.URL.Path[1:]
if strings.HasSuffix(group, ".rss") {
group = strings.TrimSuffix(group, ".rss")
outputMode = "rss"
}
- c, err := fetch(group)
- if err != nil {
- http.Error(w, fmt.Sprintf("error: %s", err), 400)
- return
- }
-
- if c == nil || len(c.Items) < 1 {
- http.Error(w, fmt.Sprintf("%s", "group not found"), 400)
- return
+ c, ok := cache.v[group]
+ if !ok || time.Now().Sub(c.Time).Seconds() > cacheTimeout {
+ var err error
+ c, err = fetch(group)
+ if err != nil {
+ http.Error(w, fmt.Sprintf("error: %s", err), 400)
+ return
+ }
+
+ if c == nil || len(c.Items) < 1 {
+ http.Error(w, fmt.Sprintf("%s", "group not found"), 400)
+ return
+ }
+
+ c.Name = group
+ c.Time = time.Now()
+
+ cache.Lock()
+ cache.v[c.Name] = c
+ cache.Unlock()
}
- c.Name = group
-
fmt.Fprintf(w, "%s\n", c.String())
}
-func main() {
- outputMode = "html"
- fmt.Println("Serving: http://localhost:1212")
- http.ListenAndServe(":1212", handler{})
-}
-
func fetch(group string) (c *channel, err error) {
if group == "" {
return
@@ -114,6 +143,8 @@ func fetch(group string) (c *channel, err error) {
url := "https://www.facebook.com/pg/" + group + "/posts/"
+ log.Println("Fetching: ", url)
+
resp, err := http.Get(url)
if err != nil {
return
@@ -129,3 +160,8 @@ func fetch(group string) (c *channel, err error) {
return
}
+
+func main() {
+ log.Println("Serving: http://localhost:1212")
+ http.ListenAndServe(":1212", handler{})
+}