From 0c29d2652956bd07327295025d5277676d48b718 Mon Sep 17 00:00:00 2001 From: Denis Zheleztsov Date: Tue, 30 Mar 2021 19:47:25 +0300 Subject: [PATCH] [ssr]: Read only is working --- node/api.go | 4 + node/elastic.go | 130 ++++++++++++++++++++++++++- node/ssr.go | 146 ++++++++++++++++++++++++++++++- templates/common/header.html | 15 ++-- templates/common/style.html | 3 + templates/components/echoes.html | 4 +- templates/components/post.html | 12 +-- templates/meta/forum.html | 20 +++-- templates/views/echo.html | 40 +++++++++ templates/views/message.html | 19 ++++ templates/views/root.html | 4 +- templates/views/search.html | 17 ++++ templates/views/thread.html | 17 ++++ 13 files changed, 406 insertions(+), 25 deletions(-) create mode 100644 templates/views/echo.html create mode 100644 templates/views/message.html create mode 100644 templates/views/search.html create mode 100644 templates/views/thread.html diff --git a/node/api.go b/node/api.go index acfdd2d..c4bb716 100644 --- a/node/api.go +++ b/node/api.go @@ -233,6 +233,10 @@ func Serve(listen string, es ESConf) { ssr := newSSR("./templates", es) r.HandleFunc("/", ssr.ssrRootHandler) r.HandleFunc("/forum", ssr.ssrForumHandler) + r.HandleFunc("/echo/{echo:[a-z0-9-_.]+}/page/{page:[0-9]+}", ssr.echoViewHandler) + r.HandleFunc("/thread/{topicid:[a-z0-9-]+}", ssr.threadViewHandler) + r.HandleFunc("/msg/{msgid:[a-zA-Z0-9]{20}}", ssr.singleMessageHandler) + r.HandleFunc("/find", ssr.searchHandler).Methods(http.MethodGet) http.Handle("/", r) diff --git a/node/elastic.go b/node/elastic.go index 936e3da..c070a37 100644 --- a/node/elastic.go +++ b/node/elastic.go @@ -178,6 +178,42 @@ func (es ESConf) GetLimitedEchoMessageHashes(echo string, offset int, limit int) return hashes } +func (es ESConf) DoSearch(query string) []i2es.ESDoc { + q := `{"sort": [ + {"date":{ "order": "desc" }},{ "_score":{ "order": "desc" }}], + "query": {"query_string" : {"fields": ["message", "subg"], "query":` + query + `}}, "size": 100}` + + req, err := http.NewRequest("POST", es.searchURI(), bytes.NewBuffer([]byte(q))) + if err != nil { + log.Error(err.Error()) + return nil + } + req.Header.Add("Content-Type", "application/json") + + client := &http.Client{} + resp, err := client.Do(req) + if err != nil { + log.Error(err.Error()) + return nil + } + + defer resp.Body.Close() + + var esr ESSearchResp + err = json.NewDecoder(resp.Body).Decode(&esr) + if err != nil { + log.Error(err.Error()) + return nil + } + + var posts []i2es.ESDoc + for _, hit := range esr.Hits.Hits { + posts = append(posts, hit.Source) + } + + return posts +} + func (es ESConf) GetUMMessages(msgs string) []string { var encodedMessages []string @@ -372,8 +408,97 @@ type ThreadBucket struct { Post Hits } -func (es ESConf) GetThreads(echoes ...string) (posts []i2es.ESDoc) { - query := `{"sort":[{"date":{"order":"desc"}}],"aggs":{"topics":{"terms":{"field":"topicid.keyword","size":100},"aggs":{"post":{"top_hits":{"size":1,"sort":[{"date":{"order":"desc"}}],"_source":{"include":["subg","author","date","echo","topicid","address"]}}}}}},"query":{"bool":{"must":[{"range":{"date":{"from":"now-30d","to":"now-0d"}}},{"constant_score":{"filter":{"terms":{"echo.keyword":["idec.talks","pipe.2032","linux.14","develop.16","dynamic.local","std.club","std.hugeping","oldpc.51t.ru","difrex.blog","ii.test.14"]}}}}]}}}` +var defaultEchoes = []string{`"idec.talks"`, `"pipe.2032"`, `"linux.14"`, `"develop.16"`, `"dynamic.local"`, `"std.club"`, `"std.hugeping"`, `"difrex.blog"`, `"ii.test.14"`} + +func (es ESConf) GetTopic(topicID string) (posts []i2es.ESDoc) { + query := []byte(strings.Join([]string{ + `{"sort": [{"date": {"order": "asc"}}, + {"_score": {"order": "desc" }}], "size":1000,"query": {"term": {"topicid.keyword": "`, topicID, `"}}}`}, "")) + + req, err := http.NewRequest("POST", es.searchURI(), bytes.NewReader([]byte(query))) + if err != nil { + log.Error(err) + return + } + + client := &http.Client{} + resp, err := client.Do(req) + if err != nil { + log.Error(err) + return + } + defer resp.Body.Close() + + var esr ESSearchResp + err = json.NewDecoder(resp.Body).Decode(&esr) + if err != nil { + log.Error(err.Error()) + return nil + } + + for _, hit := range esr.Hits.Hits { + hit.Source.Message = strings.Trim(hit.Source.Message, "\n") + hit.Source.Date = parseTime(hit.Source.Date) + posts = append(posts, hit.Source) + } + + return +} + +func (es ESConf) GetMessage(msgID string) (posts []i2es.ESDoc) { + query := []byte(strings.Join([]string{ + `{"sort": [{"date": {"order": "asc"}}, + {"_score": {"order": "desc" }}], "size":1000,"query": {"term": {"msgid.keyword": "`, msgID, `"}}}`}, "")) + + req, err := http.NewRequest("POST", es.searchURI(), bytes.NewReader([]byte(query))) + if err != nil { + log.Error(err) + return + } + + client := &http.Client{} + resp, err := client.Do(req) + if err != nil { + log.Error(err) + return + } + defer resp.Body.Close() + + var esr ESSearchResp + err = json.NewDecoder(resp.Body).Decode(&esr) + if err != nil { + log.Error(err.Error()) + return nil + } + + for _, hit := range esr.Hits.Hits { + hit.Source.Message = strings.Trim(hit.Source.Message, "\n") + hit.Source.Date = parseTime(hit.Source.Date) + posts = append(posts, hit.Source) + } + + return +} + +func (es ESConf) GetThreads(pageNum int, echoes ...string) (posts []i2es.ESDoc) { + ech := defaultEchoes + if len(echoes) > 0 { + ech = []string{} + for _, echo := range echoes { + ech = append(ech, fmt.Sprintf(`"%s"`, echo)) + } + } + rangeStr := `"from":"now-30d","to":"now-0d"` + if pageNum > 1 { + to := 30*pageNum - 30 + from := 30 * pageNum + rangeStr = fmt.Sprintf(`"from":"now-%dd","to":"now-%dd"`, from, to) + } + log.Debug(rangeStr) + + query := `{"sort":[{"date":{"order":"desc"}}],"aggs":{"topics":{"terms":{"field":"topicid.keyword","size":100},"aggs":{"post":{"top_hits":{"size":1,"sort":[{"date":{"order":"desc"}}],"_source":{"include": ["subg","author","date","echo","topicid","address"]}}}}}},"query":{"bool":{"must":[{"range":{"date":{` + rangeStr + `}}},{"constant_score":{"filter":{"terms":{"echo.keyword": [` + + strings.Join(ech, ",") + + `]}}}}]}}}` req, err := http.NewRequest("POST", es.searchURI(), bytes.NewReader([]byte(query))) if err != nil { log.Error(err) @@ -393,6 +518,7 @@ func (es ESConf) GetThreads(echoes ...string) (posts []i2es.ESDoc) { log.Error(err) return } + for _, bucket := range data.Aggregations.Topics.Buckets { for _, post := range bucket.Post.Hits.Hits { posts = append(posts, post.Source) diff --git a/node/ssr.go b/node/ssr.go index a2b1d4d..76e8246 100644 --- a/node/ssr.go +++ b/node/ssr.go @@ -1,9 +1,14 @@ package node import ( + "encoding/json" "net/http" + "time" + + "strconv" "gitea.difrex.ru/Umbrella/fetcher/i2es" + "github.com/gorilla/mux" log "github.com/sirupsen/logrus" ) @@ -22,14 +27,32 @@ func newSSR(templatesDir string, es ESConf) *ssr { type PageData struct { Echoes []echo CurrentPage string + PageNum int Posts []i2es.ESDoc } -func (s *ssr) newPageData(page string, posts []i2es.ESDoc) *PageData { +func (p *PageData) GetDate(date string) string { + d, err := strconv.ParseInt(date, 0, 64) + if err != nil { + return err.Error() + } + return time.Unix(d, 0).UTC().Format("02 Jan 06 15:04 MST") +} + +func (p *PageData) Inc() int { + return p.PageNum + 1 +} + +func (p *PageData) Dec() int { + return p.PageNum - 1 +} + +func (s *ssr) newPageData(page string, posts []i2es.ESDoc, num int) *PageData { return &PageData{ Echoes: s.es.GetEchoesList(), Posts: posts, CurrentPage: page, + PageNum: num, } } @@ -40,7 +63,7 @@ func (s *ssr) ssrRootHandler(w http.ResponseWriter, r *http.Request) { return } - if err := tpl.Execute(w, s.newPageData("", s.es.GetLatestPosts(50))); err != nil { + if err := tpl.Execute(w, s.newPageData("feed", s.es.GetLatestPosts(50), 1)); err != nil { log.Error(err) } } @@ -52,7 +75,124 @@ func (s *ssr) ssrForumHandler(w http.ResponseWriter, r *http.Request) { return } - if err := tpl.Execute(w, s.newPageData("", s.es.GetThreads())); err != nil { + vars := mux.Vars(r) + + var num int + if _, ok := vars["page"]; ok { + num = getPageNum(mux.Vars(r)["page"]) + } + + if err := tpl.Execute(w, s.newPageData("forum", s.es.GetThreads(num), num)); err != nil { log.Error(err) } } + +func (s *ssr) threadViewHandler(w http.ResponseWriter, r *http.Request) { + tpl, err := s.getTemplate("thread") + if err != nil { + log.Error(err) + return + } + + topicid, ok := mux.Vars(r)["topicid"] + if !ok { + log.Warn("empty topicid") + w.WriteHeader(http.StatusBadRequest) + w.Write([]byte("error: empty topicid")) + return + } + + posts := s.es.GetTopic(topicid) + thread := "nil" + if len(posts) > 0 { + thread = posts[0].Subg + } + + if err := tpl.Execute(w, s.newPageData(thread, posts, 1)); err != nil { + log.Error(err) + } +} + +func getPageNum(page string) int { + i, err := strconv.ParseInt(page, 0, 64) + if err != nil { + log.Error(err) + return 1 + } + if i < 1 { + return 1 + } + return int(i) +} + +func (s *ssr) echoViewHandler(w http.ResponseWriter, r *http.Request) { + tpl, err := s.getTemplate("echo") + if err != nil { + log.Error(err) + return + } + + vars := mux.Vars(r) + echo, ok := vars["echo"] + if !ok { + log.Warn("empty echo") + w.WriteHeader(http.StatusBadRequest) + w.Write([]byte("error: empty echo")) + return + } + + page := 1 + if _, ok := vars["page"]; ok { + page = getPageNum(vars["page"]) + } + + if err := tpl.Execute(w, s.newPageData(echo, s.es.GetThreads(page, echo), page)); err != nil { + log.Error(err) + } +} + +func (s *ssr) singleMessageHandler(w http.ResponseWriter, r *http.Request) { + tpl, err := s.getTemplate("message") + if err != nil { + log.Error(err) + return + } + + msgid, ok := mux.Vars(r)["msgid"] + if !ok { + log.Warn("empty msgid") + w.WriteHeader(http.StatusBadRequest) + w.Write([]byte("error: empty msgid")) + return + } + + if err := tpl.Execute(w, s.newPageData(msgid, s.es.GetMessage(msgid), 1)); err != nil { + log.Error(err) + } +} + +func (s *ssr) searchHandler(w http.ResponseWriter, r *http.Request) { + tpl, err := s.getTemplate("search") + if err != nil { + log.Error(err) + return + } + + q := r.URL.Query().Get("query") + if q != "" { + m, err := json.Marshal(q) + if err != nil { + log.Error(err) + m = []byte("") + } + + posts := s.es.DoSearch(string(m)) + for i := range posts { + posts[i].Date = parseTime(posts[i].Date) + } + + if err := tpl.Execute(w, s.newPageData("search", posts, 1)); err != nil { + log.Error(err) + } + } +} diff --git a/templates/common/header.html b/templates/common/header.html index 6cdbb29..20c1bd9 100644 --- a/templates/common/header.html +++ b/templates/common/header.html @@ -1,14 +1,14 @@ {{ define "header" }} - {{ if .CurrentPage }}staic | {{ .CurrentPage }}{{ else }}dynamic{{ end }} + {{ if .CurrentPage }}static | {{ .CurrentPage }}{{ else }}static{{ end }} -