2017-02-12 13:19:02 +03:00
|
|
|
package node
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
2018-11-04 10:21:49 +03:00
|
|
|
"encoding/json"
|
2021-04-02 17:11:12 +03:00
|
|
|
"io"
|
2017-02-12 13:19:02 +03:00
|
|
|
"io/ioutil"
|
|
|
|
"net/http"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
2021-03-25 18:11:00 +03:00
|
|
|
"time"
|
2018-11-04 10:21:49 +03:00
|
|
|
|
|
|
|
"fmt"
|
|
|
|
|
2018-11-04 12:05:23 +03:00
|
|
|
"encoding/base64"
|
|
|
|
|
2018-11-04 11:15:46 +03:00
|
|
|
"gitea.difrex.ru/Umbrella/fetcher/i2es"
|
2018-11-12 15:06:39 +03:00
|
|
|
log "github.com/sirupsen/logrus"
|
2017-02-12 13:19:02 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
const (
|
|
|
|
echoAgg = "echo_uniq"
|
|
|
|
)
|
|
|
|
|
2017-02-13 14:56:35 +03:00
|
|
|
// MakePlainTextMessage ...
|
2018-11-04 11:15:46 +03:00
|
|
|
func MakePlainTextMessage(hit i2es.ESDoc) []byte {
|
|
|
|
tags := "ii/ok"
|
|
|
|
if hit.Repto != "" {
|
|
|
|
tags += fmt.Sprintf("/repto/%s", hit.Repto)
|
|
|
|
}
|
|
|
|
m := []string{
|
|
|
|
tags,
|
|
|
|
hit.Echo,
|
|
|
|
hit.Date,
|
|
|
|
hit.Author,
|
|
|
|
hit.Address,
|
|
|
|
hit.To,
|
|
|
|
hit.Subg,
|
|
|
|
hit.Message,
|
|
|
|
}
|
2017-02-13 14:56:35 +03:00
|
|
|
|
2018-11-04 11:15:46 +03:00
|
|
|
return []byte(strings.Join(m, "\n"))
|
2017-02-13 14:56:35 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// GetPlainTextMessage ...
|
|
|
|
func (es ESConf) GetPlainTextMessage(msgid string) []byte {
|
2018-11-04 11:15:46 +03:00
|
|
|
var searchURI string
|
|
|
|
if es.Index != "" && es.Type != "" {
|
|
|
|
searchURI = strings.Join([]string{es.Host, es.Index, es.Type, "_search"}, "/")
|
|
|
|
} else {
|
|
|
|
searchURI = strings.Join([]string{es.Host, "search"}, "/")
|
|
|
|
}
|
2017-02-13 14:56:35 +03:00
|
|
|
|
|
|
|
searchQ := []byte(strings.Join([]string{
|
|
|
|
`{"query": {"match": {"_id": "`, msgid, `"}}}`}, ""))
|
|
|
|
|
|
|
|
req, err := http.NewRequest("POST", searchURI, bytes.NewBuffer(searchQ))
|
2018-11-05 18:24:46 +03:00
|
|
|
if err != nil {
|
|
|
|
log.Error(err.Error())
|
|
|
|
return []byte("")
|
|
|
|
}
|
|
|
|
|
|
|
|
req.Header.Add("Content-Type", "application/json")
|
|
|
|
|
2017-02-13 14:56:35 +03:00
|
|
|
client := &http.Client{}
|
|
|
|
resp, err := client.Do(req)
|
2018-11-04 10:21:49 +03:00
|
|
|
if err != nil {
|
|
|
|
log.Error(err.Error())
|
2018-11-05 18:24:46 +03:00
|
|
|
return []byte("")
|
2018-11-04 10:21:49 +03:00
|
|
|
}
|
2017-02-13 14:56:35 +03:00
|
|
|
|
|
|
|
defer resp.Body.Close()
|
2018-11-04 11:15:46 +03:00
|
|
|
var esr ESSearchResp
|
|
|
|
err = json.NewDecoder(resp.Body).Decode(&esr)
|
2017-02-13 14:56:35 +03:00
|
|
|
if err != nil {
|
2018-11-04 11:15:46 +03:00
|
|
|
log.Error(err.Error())
|
|
|
|
return []byte("")
|
2017-02-13 14:56:35 +03:00
|
|
|
}
|
|
|
|
|
2018-11-04 11:15:46 +03:00
|
|
|
if len(esr.Hits.Hits) > 0 {
|
|
|
|
return MakePlainTextMessage(esr.Hits.Hits[0].Source)
|
2017-02-13 14:56:35 +03:00
|
|
|
}
|
|
|
|
|
2018-11-04 11:15:46 +03:00
|
|
|
return []byte("")
|
2017-02-13 14:56:35 +03:00
|
|
|
}
|
|
|
|
|
2017-02-13 14:31:18 +03:00
|
|
|
// GetEchoMessageHashes ...
|
|
|
|
func (es ESConf) GetEchoMessageHashes(echo string) []string {
|
|
|
|
var hashes []string
|
2018-11-04 11:15:46 +03:00
|
|
|
var searchURI string
|
|
|
|
if es.Index != "" && es.Type != "" {
|
|
|
|
searchURI = strings.Join([]string{es.Host, es.Index, es.Type, "_search"}, "/")
|
|
|
|
} else {
|
|
|
|
searchURI = strings.Join([]string{es.Host, "search"}, "/")
|
|
|
|
}
|
2017-02-13 14:31:18 +03:00
|
|
|
|
|
|
|
searchQ := []byte(strings.Join([]string{
|
|
|
|
`{"sort": [
|
|
|
|
{"date":{ "order": "desc" }},{ "_score":{ "order": "desc" }}],
|
2021-04-02 17:11:12 +03:00
|
|
|
"query": {"query_string" : {"fields": ["echo.keyword"], "query":"`, echo, `"}}, "size": 500}`}, ""))
|
2017-02-13 14:31:18 +03:00
|
|
|
|
|
|
|
req, err := http.NewRequest("POST", searchURI, bytes.NewBuffer(searchQ))
|
2018-11-05 18:24:46 +03:00
|
|
|
if err != nil {
|
|
|
|
log.Error(err.Error())
|
|
|
|
return hashes
|
|
|
|
}
|
|
|
|
req.Header.Add("Content-Type", "application/json")
|
|
|
|
|
2017-02-13 14:31:18 +03:00
|
|
|
client := &http.Client{}
|
|
|
|
resp, err := client.Do(req)
|
2018-11-04 10:21:49 +03:00
|
|
|
if err != nil {
|
|
|
|
log.Error(err.Error())
|
2018-11-04 11:15:46 +03:00
|
|
|
return hashes
|
2018-11-04 10:21:49 +03:00
|
|
|
}
|
2017-02-13 14:31:18 +03:00
|
|
|
defer resp.Body.Close()
|
|
|
|
|
2021-04-09 22:04:49 +03:00
|
|
|
content, err := ioutil.ReadAll(resp.Body)
|
|
|
|
if err != nil {
|
|
|
|
log.Error(err.Error())
|
|
|
|
hashes = append(hashes, "error: Internal error")
|
|
|
|
return hashes
|
|
|
|
}
|
2018-11-05 18:24:46 +03:00
|
|
|
|
2018-11-04 11:15:46 +03:00
|
|
|
var esr ESSearchResp
|
2021-04-09 22:04:49 +03:00
|
|
|
err = json.Unmarshal(content, &esr)
|
2017-02-13 14:31:18 +03:00
|
|
|
if err != nil {
|
2018-11-04 11:15:46 +03:00
|
|
|
log.Error(err.Error())
|
|
|
|
hashes = append(hashes, "error: Internal error")
|
2017-02-13 14:31:18 +03:00
|
|
|
return hashes
|
|
|
|
}
|
|
|
|
|
2018-11-04 11:15:46 +03:00
|
|
|
for _, hit := range esr.Hits.Hits {
|
|
|
|
hashes = append(hashes, hit.Source.MsgID)
|
2017-02-13 14:31:18 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
return hashes
|
|
|
|
}
|
|
|
|
|
2017-02-14 00:05:12 +03:00
|
|
|
// GetLimitedEchoMessageHashes ...
|
|
|
|
func (es ESConf) GetLimitedEchoMessageHashes(echo string, offset int, limit int) []string {
|
|
|
|
var hashes []string
|
|
|
|
|
|
|
|
// Check offset
|
|
|
|
var order string
|
|
|
|
if offset <= 0 {
|
|
|
|
order = "desc"
|
|
|
|
} else {
|
|
|
|
order = "asc"
|
|
|
|
}
|
|
|
|
|
|
|
|
l := strconv.Itoa(limit)
|
|
|
|
|
|
|
|
searchQ := []byte(strings.Join([]string{
|
|
|
|
`{"sort": [
|
|
|
|
{"date":{ "order": "`, order, `" }},{ "_score":{ "order": "`, order, `" }}],
|
2021-04-02 17:11:12 +03:00
|
|
|
"query": {"query_string" : {"fields": ["msgid.keyword", "echo.keyword"], "query":"`, echo, `"}}, "size":`, l, `}`}, ""))
|
2017-02-14 00:05:12 +03:00
|
|
|
|
2021-03-25 18:11:00 +03:00
|
|
|
req, err := http.NewRequest("POST", es.searchURI(), bytes.NewBuffer(searchQ))
|
2018-11-05 18:24:46 +03:00
|
|
|
if err != nil {
|
|
|
|
log.Error(err.Error())
|
|
|
|
return hashes
|
|
|
|
}
|
|
|
|
req.Header.Add("Content-Type", "application/json")
|
|
|
|
|
2017-02-14 00:05:12 +03:00
|
|
|
client := &http.Client{}
|
|
|
|
resp, err := client.Do(req)
|
2018-11-04 10:21:49 +03:00
|
|
|
if err != nil {
|
|
|
|
log.Error(err.Error())
|
2018-11-04 11:15:46 +03:00
|
|
|
return hashes
|
2018-11-04 10:21:49 +03:00
|
|
|
}
|
2017-02-14 00:05:12 +03:00
|
|
|
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
2018-11-04 11:15:46 +03:00
|
|
|
var esr ESSearchResp
|
|
|
|
err = json.NewDecoder(resp.Body).Decode(&esr)
|
2017-02-14 00:05:12 +03:00
|
|
|
if err != nil {
|
2018-11-04 11:15:46 +03:00
|
|
|
log.Error(err.Error())
|
2017-02-14 00:05:12 +03:00
|
|
|
return hashes
|
|
|
|
}
|
2018-11-04 11:15:46 +03:00
|
|
|
for _, hit := range esr.Hits.Hits {
|
|
|
|
hashes = append(hashes, hit.Source.MsgID)
|
2017-02-14 00:05:12 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
return hashes
|
|
|
|
}
|
|
|
|
|
2021-03-30 19:47:25 +03:00
|
|
|
func (es ESConf) DoSearch(query string) []i2es.ESDoc {
|
|
|
|
q := `{"sort": [
|
|
|
|
{"date":{ "order": "desc" }},{ "_score":{ "order": "desc" }}],
|
|
|
|
"query": {"query_string" : {"fields": ["message", "subg"], "query":` + query + `}}, "size": 100}`
|
|
|
|
|
|
|
|
req, err := http.NewRequest("POST", es.searchURI(), bytes.NewBuffer([]byte(q)))
|
|
|
|
if err != nil {
|
|
|
|
log.Error(err.Error())
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
req.Header.Add("Content-Type", "application/json")
|
|
|
|
|
|
|
|
client := &http.Client{}
|
|
|
|
resp, err := client.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
log.Error(err.Error())
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
|
|
|
var esr ESSearchResp
|
|
|
|
err = json.NewDecoder(resp.Body).Decode(&esr)
|
|
|
|
if err != nil {
|
|
|
|
log.Error(err.Error())
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
var posts []i2es.ESDoc
|
|
|
|
for _, hit := range esr.Hits.Hits {
|
|
|
|
posts = append(posts, hit.Source)
|
|
|
|
}
|
|
|
|
|
|
|
|
return posts
|
|
|
|
}
|
|
|
|
|
2018-11-04 12:05:23 +03:00
|
|
|
func (es ESConf) GetUMMessages(msgs string) []string {
|
|
|
|
var encodedMessages []string
|
|
|
|
|
|
|
|
// First get messages list
|
|
|
|
messages := strings.Split(msgs, "/")
|
|
|
|
var searchURI string
|
|
|
|
if es.Index != "" && es.Type != "" {
|
|
|
|
searchURI = strings.Join([]string{es.Host, es.Index, es.Type, "_search"}, "/")
|
|
|
|
} else {
|
|
|
|
searchURI = strings.Join([]string{es.Host, "search"}, "/")
|
|
|
|
}
|
|
|
|
query := []byte(`
|
|
|
|
{
|
|
|
|
"query": {
|
|
|
|
"query_string" : {
|
2021-04-02 17:11:12 +03:00
|
|
|
"fields": ["msgid.keyword"],
|
2018-11-04 12:05:23 +03:00
|
|
|
"query":"` + strings.Join(messages, " OR ") + `"
|
|
|
|
}
|
|
|
|
},
|
|
|
|
"sort": [{"date":{ "order": "desc" }},
|
|
|
|
{ "_score":{ "order": "desc" }}
|
|
|
|
]
|
|
|
|
}`)
|
|
|
|
req, err := http.NewRequest("POST", searchURI, bytes.NewBuffer(query))
|
2018-11-05 18:24:46 +03:00
|
|
|
if err != nil {
|
|
|
|
log.Error(err.Error())
|
|
|
|
return encodedMessages
|
|
|
|
}
|
|
|
|
req.Header.Add("Content-Type", "application/json")
|
|
|
|
|
2018-11-04 12:05:23 +03:00
|
|
|
client := &http.Client{}
|
|
|
|
resp, err := client.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
log.Error(err.Error())
|
|
|
|
return encodedMessages
|
|
|
|
}
|
|
|
|
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
|
|
|
var esr ESSearchResp
|
|
|
|
err = json.NewDecoder(resp.Body).Decode(&esr)
|
|
|
|
if err != nil {
|
|
|
|
log.Error(err.Error())
|
|
|
|
return encodedMessages
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, hit := range esr.Hits.Hits {
|
2018-11-06 13:57:27 +03:00
|
|
|
m := fmt.Sprintf("%s:%s",
|
|
|
|
hit.Source.MsgID,
|
|
|
|
base64.StdEncoding.EncodeToString(MakePlainTextMessage(hit.Source)))
|
2018-11-04 12:05:23 +03:00
|
|
|
encodedMessages = append(encodedMessages, m)
|
|
|
|
}
|
|
|
|
|
|
|
|
return encodedMessages
|
|
|
|
}
|
|
|
|
|
2017-02-14 00:05:12 +03:00
|
|
|
// GetUEchoMessageHashes ...
|
|
|
|
func (es ESConf) GetUEchoMessageHashes(echoes string) []string {
|
|
|
|
var echohashes []string
|
|
|
|
// First get echoes list
|
|
|
|
el := strings.Split(echoes, "/")
|
|
|
|
|
|
|
|
// Check offset and limit
|
|
|
|
var offset int
|
|
|
|
var limit int
|
|
|
|
withOL := false
|
|
|
|
if strings.Contains(el[len(el)-1], ":") {
|
|
|
|
oflim := strings.Split(el[len(el)-1], ":")
|
|
|
|
o, err := strconv.Atoi(oflim[0])
|
|
|
|
l, err := strconv.Atoi(oflim[1])
|
|
|
|
if err != nil {
|
2021-03-25 18:25:40 +03:00
|
|
|
log.Error(err)
|
2017-02-14 00:05:12 +03:00
|
|
|
} else {
|
|
|
|
offset = o
|
|
|
|
limit = l
|
|
|
|
withOL = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
eh := make(map[string][]string)
|
|
|
|
var curEcho string
|
|
|
|
for i, echo := range el {
|
|
|
|
if echo == "" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if !strings.Contains(echo, ":") {
|
|
|
|
curEcho = echo
|
|
|
|
}
|
|
|
|
|
|
|
|
if withOL {
|
|
|
|
recEcho := es.GetLimitedEchoMessageHashes(curEcho, offset, limit)
|
|
|
|
eh[curEcho] = make([]string, len(curEcho))
|
|
|
|
eh[curEcho] = append(eh[curEcho], recEcho...)
|
|
|
|
|
|
|
|
} else {
|
|
|
|
recEcho := es.GetEchoMessageHashes(curEcho)
|
|
|
|
eh[curEcho] = make([]string, len(recEcho))
|
|
|
|
eh[curEcho] = append(eh[curEcho], recEcho...)
|
|
|
|
}
|
|
|
|
if i == len(el) {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Make standard output:
|
|
|
|
// echo.name
|
|
|
|
// Some20SimbolsHash333
|
|
|
|
for k, v := range eh {
|
|
|
|
echohashes = append(echohashes, k)
|
2021-04-09 22:04:49 +03:00
|
|
|
if k == "" || k == "\n" {
|
2017-02-14 00:05:12 +03:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
for _, e := range v {
|
|
|
|
if e == "" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
echohashes = append(echohashes, e)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-04-09 22:04:49 +03:00
|
|
|
return addNewLineToLastWord(echohashes)
|
2017-02-14 00:05:12 +03:00
|
|
|
}
|
|
|
|
|
2018-11-04 12:05:23 +03:00
|
|
|
// GetXC implements /x/c
|
|
|
|
func (es ESConf) GetXC(echoes string) []string {
|
|
|
|
var searchURI string
|
|
|
|
var counts []string
|
|
|
|
if es.Index != "" && es.Type != "" {
|
|
|
|
searchURI = strings.Join([]string{es.Host, es.Index, es.Type, "_search"}, "/")
|
|
|
|
} else {
|
|
|
|
searchURI = strings.Join([]string{es.Host, "search"}, "/")
|
|
|
|
}
|
|
|
|
|
|
|
|
query := []byte(`
|
|
|
|
{
|
|
|
|
"query": {
|
|
|
|
"query_string" : {
|
2021-04-02 17:11:12 +03:00
|
|
|
"fields": ["echo.keyword"],
|
2018-11-04 12:05:23 +03:00
|
|
|
"query": "` + strings.Join(strings.Split(echoes, "/"), " OR ") + `"
|
|
|
|
}
|
|
|
|
},
|
|
|
|
"size": 0,
|
|
|
|
"aggs": {
|
|
|
|
"uniqueEcho": {
|
|
|
|
"cardinality": {
|
2021-04-02 17:11:12 +03:00
|
|
|
"field": "echo.keyword"
|
2018-11-04 12:05:23 +03:00
|
|
|
}
|
|
|
|
},
|
|
|
|
"echo": {
|
|
|
|
"terms": {
|
2021-04-02 17:11:12 +03:00
|
|
|
"field": "echo.keyword",
|
2018-11-04 12:05:23 +03:00
|
|
|
"size": 1000
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
`)
|
|
|
|
req, err := http.NewRequest("POST", searchURI, bytes.NewBuffer(query))
|
2018-11-05 18:24:46 +03:00
|
|
|
if err != nil {
|
|
|
|
log.Error(err.Error())
|
|
|
|
return counts
|
|
|
|
}
|
|
|
|
req.Header.Add("Content-Type", "application/json")
|
|
|
|
|
2018-11-04 12:05:23 +03:00
|
|
|
client := &http.Client{}
|
|
|
|
resp, err := client.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
log.Error(err.Error())
|
|
|
|
return counts
|
|
|
|
}
|
|
|
|
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
|
|
|
var esr EchoAggregations
|
|
|
|
err = json.NewDecoder(resp.Body).Decode(&esr)
|
|
|
|
if err != nil {
|
|
|
|
log.Error(err.Error())
|
|
|
|
return counts
|
|
|
|
}
|
|
|
|
log.Infof("%+v", esr)
|
|
|
|
|
|
|
|
for _, hit := range esr.EchoAgg["echo"].Buckets {
|
|
|
|
counts = append(counts, fmt.Sprintf("%s:%d", hit.Key, hit.DocCount))
|
|
|
|
}
|
|
|
|
return counts
|
|
|
|
}
|
|
|
|
|
2021-03-26 11:11:14 +03:00
|
|
|
type ThreadBucket struct {
|
|
|
|
DocCount int64 `json:"doc_count"`
|
|
|
|
Key string `json:"key"`
|
|
|
|
Post Hits
|
|
|
|
}
|
|
|
|
|
2021-03-30 19:47:25 +03:00
|
|
|
var defaultEchoes = []string{`"idec.talks"`, `"pipe.2032"`, `"linux.14"`, `"develop.16"`, `"dynamic.local"`, `"std.club"`, `"std.hugeping"`, `"difrex.blog"`, `"ii.test.14"`}
|
|
|
|
|
|
|
|
func (es ESConf) GetTopic(topicID string) (posts []i2es.ESDoc) {
|
|
|
|
query := []byte(strings.Join([]string{
|
|
|
|
`{"sort": [{"date": {"order": "asc"}},
|
|
|
|
{"_score": {"order": "desc" }}], "size":1000,"query": {"term": {"topicid.keyword": "`, topicID, `"}}}`}, ""))
|
|
|
|
|
|
|
|
req, err := http.NewRequest("POST", es.searchURI(), bytes.NewReader([]byte(query)))
|
|
|
|
if err != nil {
|
|
|
|
log.Error(err)
|
|
|
|
return
|
|
|
|
}
|
2021-04-02 17:11:12 +03:00
|
|
|
req.Header.Add("Content-Type", "application/json")
|
2021-03-30 19:47:25 +03:00
|
|
|
|
|
|
|
client := &http.Client{}
|
|
|
|
resp, err := client.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
log.Error(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
|
|
|
var esr ESSearchResp
|
|
|
|
err = json.NewDecoder(resp.Body).Decode(&esr)
|
|
|
|
if err != nil {
|
|
|
|
log.Error(err.Error())
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, hit := range esr.Hits.Hits {
|
|
|
|
hit.Source.Message = strings.Trim(hit.Source.Message, "\n")
|
|
|
|
hit.Source.Date = parseTime(hit.Source.Date)
|
|
|
|
posts = append(posts, hit.Source)
|
|
|
|
}
|
|
|
|
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
func (es ESConf) GetMessage(msgID string) (posts []i2es.ESDoc) {
|
|
|
|
query := []byte(strings.Join([]string{
|
|
|
|
`{"sort": [{"date": {"order": "asc"}},
|
|
|
|
{"_score": {"order": "desc" }}], "size":1000,"query": {"term": {"msgid.keyword": "`, msgID, `"}}}`}, ""))
|
|
|
|
|
|
|
|
req, err := http.NewRequest("POST", es.searchURI(), bytes.NewReader([]byte(query)))
|
|
|
|
if err != nil {
|
|
|
|
log.Error(err)
|
|
|
|
return
|
|
|
|
}
|
2021-04-02 17:11:12 +03:00
|
|
|
req.Header.Add("Content-Type", "application/json")
|
2021-03-30 19:47:25 +03:00
|
|
|
|
|
|
|
client := &http.Client{}
|
|
|
|
resp, err := client.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
log.Error(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
|
|
|
var esr ESSearchResp
|
|
|
|
err = json.NewDecoder(resp.Body).Decode(&esr)
|
|
|
|
if err != nil {
|
|
|
|
log.Error(err.Error())
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, hit := range esr.Hits.Hits {
|
|
|
|
hit.Source.Message = strings.Trim(hit.Source.Message, "\n")
|
|
|
|
hit.Source.Date = parseTime(hit.Source.Date)
|
|
|
|
posts = append(posts, hit.Source)
|
|
|
|
}
|
|
|
|
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
func (es ESConf) GetThreads(pageNum int, echoes ...string) (posts []i2es.ESDoc) {
|
|
|
|
ech := defaultEchoes
|
|
|
|
if len(echoes) > 0 {
|
|
|
|
ech = []string{}
|
|
|
|
for _, echo := range echoes {
|
|
|
|
ech = append(ech, fmt.Sprintf(`"%s"`, echo))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
rangeStr := `"from":"now-30d","to":"now-0d"`
|
|
|
|
if pageNum > 1 {
|
|
|
|
to := 30*pageNum - 30
|
|
|
|
from := 30 * pageNum
|
|
|
|
rangeStr = fmt.Sprintf(`"from":"now-%dd","to":"now-%dd"`, from, to)
|
|
|
|
}
|
2021-04-09 22:04:49 +03:00
|
|
|
|
2021-03-30 19:47:25 +03:00
|
|
|
log.Debug(rangeStr)
|
|
|
|
|
2021-04-02 17:11:12 +03:00
|
|
|
query := `{"sort":[{"date":{"order":"desc"}}],"aggs":{"topics":{"terms":{"field":"topicid.keyword","size":100},"aggs":{"post":{"top_hits":{"size":1,"sort":[{"date":{"order":"desc"}}],"_source":{"include": ["subg","author","date","echo","topicid","address", "repto"]}}}}}},"query":{"bool":{"must":[{"range":{"date":{` + rangeStr + `}}},{"constant_score":{"filter":{"terms":{"echo.keyword": [` +
|
2021-03-30 19:47:25 +03:00
|
|
|
strings.Join(ech, ",") +
|
|
|
|
`]}}}}]}}}`
|
2021-04-02 17:11:12 +03:00
|
|
|
log.Debug("Run: ", query)
|
|
|
|
|
2021-03-26 11:11:14 +03:00
|
|
|
req, err := http.NewRequest("POST", es.searchURI(), bytes.NewReader([]byte(query)))
|
|
|
|
if err != nil {
|
|
|
|
log.Error(err)
|
|
|
|
return
|
|
|
|
}
|
2021-04-02 17:11:12 +03:00
|
|
|
req.Header.Add("Content-Type", "application/json")
|
2021-03-26 11:11:14 +03:00
|
|
|
|
|
|
|
client := &http.Client{}
|
|
|
|
resp, err := client.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
log.Error(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
|
|
|
var data ESAggsResp
|
|
|
|
if err := json.NewDecoder(resp.Body).Decode(&data); err != nil {
|
|
|
|
log.Error(err)
|
|
|
|
return
|
|
|
|
}
|
2021-03-30 19:47:25 +03:00
|
|
|
|
2021-03-26 11:11:14 +03:00
|
|
|
for _, bucket := range data.Aggregations.Topics.Buckets {
|
2021-04-02 17:11:12 +03:00
|
|
|
// Empty topicid
|
|
|
|
if bucket.Key == "" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
for _, post := range bucket.Post.Hits.Hits {
|
|
|
|
posts = append(posts, post.Source)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
func (es ESConf) GetThreadsYear(pageNum int, echoes ...string) (posts []i2es.ESDoc) {
|
|
|
|
ech := defaultEchoes
|
|
|
|
if len(echoes) > 0 {
|
|
|
|
ech = []string{}
|
|
|
|
for _, echo := range echoes {
|
|
|
|
ech = append(ech, fmt.Sprintf(`"%s"`, echo))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
query := `{"sort":[{"date":{"order":"desc"}}],"aggs":{"topics":{"terms":{"field":"topicid.keyword","size":500},"aggs":{"post":{"top_hits":{"size":1,"sort":[{"date":{"order":"desc"}}],"_source":{"include": ["subg","author","date","echo","topicid","address", "repto"]}}}}}},"query":{"bool":{"must":[{"range":{"date":{"from": "now-365d", "to": "now"}}}, {"constant_score":{"filter":{"terms":{"echo.keyword": [` +
|
|
|
|
strings.Join(ech, ",") +
|
|
|
|
`]}}}}]}}}`
|
|
|
|
log.Debug("Run: ", query)
|
|
|
|
|
|
|
|
req, err := http.NewRequest("POST", es.searchURI(), bytes.NewReader([]byte(query)))
|
|
|
|
if err != nil {
|
|
|
|
log.Error(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
req.Header.Add("Content-Type", "application/json")
|
|
|
|
|
|
|
|
client := &http.Client{}
|
|
|
|
resp, err := client.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
log.Error(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
|
|
|
if resp.StatusCode > 200 {
|
|
|
|
d, _ := io.ReadAll(resp.Body)
|
|
|
|
log.Debug(string(d))
|
|
|
|
}
|
|
|
|
|
|
|
|
var data ESAggsResp
|
|
|
|
if err := json.NewDecoder(resp.Body).Decode(&data); err != nil {
|
|
|
|
log.Error(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, bucket := range data.Aggregations.Topics.Buckets {
|
|
|
|
// Empty topicid
|
|
|
|
if bucket.Key == "" {
|
|
|
|
continue
|
|
|
|
}
|
2021-03-26 11:11:14 +03:00
|
|
|
for _, post := range bucket.Post.Hits.Hits {
|
|
|
|
posts = append(posts, post.Source)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-03-25 18:11:00 +03:00
|
|
|
func (es ESConf) GetLatestPosts(sum int) []i2es.ESDoc {
|
2021-04-11 12:35:09 +03:00
|
|
|
log.Debug(sum)
|
2021-03-25 18:11:00 +03:00
|
|
|
query := fmt.Sprintf(`{"sort": [{"date": {"order": "desc"}}, {"_score": {"order": "desc" }}], "size": %d}`, sum)
|
2021-04-02 17:11:12 +03:00
|
|
|
log.Debugf("Do %s request", query)
|
|
|
|
|
2021-03-25 18:11:00 +03:00
|
|
|
req, err := http.NewRequest("POST", es.searchURI(), bytes.NewBuffer([]byte(query)))
|
|
|
|
if err != nil {
|
|
|
|
log.Error(err.Error())
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
req.Header.Add("Content-Type", "application/json")
|
|
|
|
|
|
|
|
client := &http.Client{}
|
|
|
|
resp, err := client.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
log.Error(err.Error())
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
|
|
|
var esr ESSearchResp
|
|
|
|
err = json.NewDecoder(resp.Body).Decode(&esr)
|
|
|
|
if err != nil {
|
|
|
|
log.Error(err.Error())
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
var posts []i2es.ESDoc
|
|
|
|
for _, hit := range esr.Hits.Hits {
|
|
|
|
hit.Source.Date = parseTime(hit.Source.Date)
|
|
|
|
hit.Source.Message = strings.Trim(hit.Source.Message, "\n")
|
|
|
|
posts = append(posts, hit.Source)
|
2018-11-04 10:21:49 +03:00
|
|
|
}
|
2021-03-25 18:11:00 +03:00
|
|
|
|
|
|
|
return posts
|
|
|
|
}
|
|
|
|
|
|
|
|
func parseTime(t string) string {
|
|
|
|
i, err := strconv.ParseInt(t, 10, 64)
|
|
|
|
if err != nil {
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
ts := time.Unix(i, 0)
|
|
|
|
return ts.Format(time.UnixDate)
|
|
|
|
}
|
|
|
|
|
|
|
|
type echo struct {
|
|
|
|
Name string
|
|
|
|
Docs int64
|
|
|
|
}
|
|
|
|
|
|
|
|
func (es ESConf) GetEchoesList() []echo {
|
2021-04-09 22:04:49 +03:00
|
|
|
searchQ := []byte(`{
|
2018-11-04 10:21:49 +03:00
|
|
|
"size": 0,
|
|
|
|
"aggs": {
|
|
|
|
"uniqueEcho": {
|
|
|
|
"cardinality": {
|
2021-04-02 17:11:12 +03:00
|
|
|
"field": "echo.keyword"
|
2018-11-04 10:21:49 +03:00
|
|
|
}
|
|
|
|
},
|
|
|
|
"echo": {
|
|
|
|
"terms": {
|
2021-04-02 17:11:12 +03:00
|
|
|
"field": "echo.keyword",
|
2018-11-04 10:21:49 +03:00
|
|
|
"size": 1000
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}`)
|
2021-04-02 17:11:12 +03:00
|
|
|
log.Debugf("Do %s request", searchQ)
|
2021-03-25 18:11:00 +03:00
|
|
|
req, err := http.NewRequest("POST", es.searchURI(), bytes.NewBuffer(searchQ))
|
2018-11-05 18:24:46 +03:00
|
|
|
if err != nil {
|
|
|
|
log.Error(err.Error())
|
2021-03-25 18:11:00 +03:00
|
|
|
return nil
|
2018-11-05 18:24:46 +03:00
|
|
|
}
|
|
|
|
req.Header.Add("Content-Type", "application/json")
|
|
|
|
|
2017-02-12 13:19:02 +03:00
|
|
|
client := &http.Client{}
|
|
|
|
resp, err := client.Do(req)
|
|
|
|
if err != nil {
|
2018-11-04 10:21:49 +03:00
|
|
|
log.Error(err.Error())
|
2021-03-25 18:11:00 +03:00
|
|
|
return nil
|
2017-02-12 13:19:02 +03:00
|
|
|
}
|
|
|
|
|
2018-11-04 10:21:49 +03:00
|
|
|
defer resp.Body.Close()
|
|
|
|
|
|
|
|
var esr EchoAggregations
|
|
|
|
err = json.NewDecoder(resp.Body).Decode(&esr)
|
2017-02-12 13:19:02 +03:00
|
|
|
if err != nil {
|
2018-11-04 10:21:49 +03:00
|
|
|
log.Error(err.Error())
|
2021-03-25 18:11:00 +03:00
|
|
|
return nil
|
2017-02-12 13:19:02 +03:00
|
|
|
}
|
|
|
|
|
2021-03-25 18:11:00 +03:00
|
|
|
var echoes []echo
|
2018-11-04 10:21:49 +03:00
|
|
|
for _, bucket := range esr.EchoAgg["echo"].Buckets {
|
2021-03-25 18:11:00 +03:00
|
|
|
echoes = append(echoes, echo{bucket.Key, int64(bucket.DocCount)})
|
|
|
|
}
|
|
|
|
|
|
|
|
return echoes
|
|
|
|
}
|
|
|
|
|
|
|
|
// GetListTXT ...
|
|
|
|
func (es ESConf) GetListTXT() []byte {
|
|
|
|
var listTXT []string
|
|
|
|
echoes := es.GetEchoesList()
|
|
|
|
for _, echo := range echoes {
|
|
|
|
listTXT = append(listTXT, fmt.Sprintf("%s:%d:(TODO) description support", echo.Name, echo.Docs))
|
2017-02-12 13:19:02 +03:00
|
|
|
}
|
2021-03-25 18:11:00 +03:00
|
|
|
// Add new line to be more compatible with fetchers
|
|
|
|
listTXT[len(listTXT)-1] = listTXT[len(listTXT)-1] + "\n"
|
2017-02-13 10:24:21 +03:00
|
|
|
|
2021-03-25 18:11:00 +03:00
|
|
|
return []byte(strings.Join(listTXT, "\n"))
|
2017-02-12 13:19:02 +03:00
|
|
|
}
|