2017-02-12 13:19:02 +03:00
|
|
|
package node
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
// "encoding/json"
|
|
|
|
"gitea.difrex.ru/Umbrella/fetcher/i2es"
|
|
|
|
"github.com/Jeffail/gabs"
|
|
|
|
"io/ioutil"
|
2017-02-13 10:24:21 +03:00
|
|
|
"log"
|
2017-02-12 13:19:02 +03:00
|
|
|
"net/http"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
|
|
|
)
|
|
|
|
|
|
|
|
const (
|
|
|
|
echoAgg = "echo_uniq"
|
|
|
|
)
|
|
|
|
|
|
|
|
// ESConf ...
|
|
|
|
type ESConf i2es.ESConf
|
|
|
|
|
|
|
|
// Bucket ...
|
|
|
|
type Bucket struct {
|
|
|
|
Key string `json:"key"`
|
|
|
|
DocCount int `json:"doc_count"`
|
|
|
|
}
|
|
|
|
|
2017-02-13 14:56:35 +03:00
|
|
|
// MakePlainTextMessage ...
|
|
|
|
func MakePlainTextMessage(hit interface{}) string {
|
|
|
|
|
|
|
|
h := make(map[string]interface{})
|
|
|
|
h = hit.(map[string]interface{})
|
|
|
|
s := make(map[string]interface{})
|
|
|
|
s = h["_source"].(map[string]interface{})
|
|
|
|
|
|
|
|
m := []string{"ii/ok", s["echo"].(string), s["date"].(string), s["author"].(string), "null", s["to"].(string), s["subg"].(string), "", s["message"].(string)}
|
|
|
|
|
|
|
|
return strings.Join(m, "\n")
|
|
|
|
}
|
|
|
|
|
|
|
|
// GetPlainTextMessage ...
|
|
|
|
func (es ESConf) GetPlainTextMessage(msgid string) []byte {
|
|
|
|
var message []byte
|
|
|
|
|
|
|
|
searchURI := strings.Join([]string{es.Host, es.Index, es.Type, "_search"}, "/")
|
|
|
|
searchQ := []byte(strings.Join([]string{
|
|
|
|
`{"query": {"match": {"_id": "`, msgid, `"}}}`}, ""))
|
|
|
|
|
|
|
|
req, err := http.NewRequest("POST", searchURI, bytes.NewBuffer(searchQ))
|
|
|
|
client := &http.Client{}
|
|
|
|
resp, err := client.Do(req)
|
|
|
|
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
|
|
|
body, err := ioutil.ReadAll(resp.Body)
|
|
|
|
if err != nil {
|
|
|
|
return message
|
|
|
|
}
|
|
|
|
|
|
|
|
esresp, err := gabs.ParseJSON(body)
|
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
hits, _ := esresp.Path("hits.hits").Data().([]interface{})
|
|
|
|
|
|
|
|
return []byte(MakePlainTextMessage(hits[0]))
|
|
|
|
}
|
|
|
|
|
2017-02-13 14:31:18 +03:00
|
|
|
// GetEchoMessageHashes ...
|
|
|
|
func (es ESConf) GetEchoMessageHashes(echo string) []string {
|
|
|
|
var hashes []string
|
|
|
|
|
|
|
|
searchURI := strings.Join([]string{es.Host, es.Index, es.Type, "_search"}, "/")
|
|
|
|
searchQ := []byte(strings.Join([]string{
|
|
|
|
`{"sort": [
|
|
|
|
{"date":{ "order": "desc" }},{ "_score":{ "order": "desc" }}],
|
|
|
|
"query": {"query_string" : {"fields": ["msgid", "echo"], "query":"`, echo, `"}}, "size": 500}`}, ""))
|
|
|
|
|
|
|
|
req, err := http.NewRequest("POST", searchURI, bytes.NewBuffer(searchQ))
|
|
|
|
client := &http.Client{}
|
|
|
|
resp, err := client.Do(req)
|
|
|
|
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
|
|
|
body, err := ioutil.ReadAll(resp.Body)
|
|
|
|
if err != nil {
|
|
|
|
return hashes
|
|
|
|
}
|
|
|
|
|
|
|
|
esresp, err := gabs.ParseJSON(body)
|
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
hits, _ := esresp.Path("hits.hits").Data().([]interface{})
|
|
|
|
for _, hit := range hits {
|
|
|
|
h := make(map[string]interface{})
|
|
|
|
h = hit.(map[string]interface{})
|
|
|
|
source := make(map[string]interface{})
|
|
|
|
source = h["_source"].(map[string]interface{})
|
|
|
|
hashes = append(hashes, source["msgid"].(string))
|
|
|
|
}
|
|
|
|
|
|
|
|
return hashes
|
|
|
|
}
|
|
|
|
|
2017-02-12 13:19:02 +03:00
|
|
|
// GetListTXT ...
|
2017-02-13 10:24:21 +03:00
|
|
|
func (es ESConf) GetListTXT() []byte {
|
2017-02-12 13:19:02 +03:00
|
|
|
searchURI := strings.Join([]string{es.Host, es.Index, es.Type, "_search"}, "/")
|
|
|
|
searchQ := []byte(`{"aggs": {"echo_uniq": { "terms": { "field": "echo","size": 1000}}}}`)
|
2017-02-13 10:24:21 +03:00
|
|
|
log.Print("Search URI: ", searchURI)
|
2017-02-12 13:19:02 +03:00
|
|
|
|
|
|
|
req, err := http.NewRequest("POST", searchURI, bytes.NewBuffer(searchQ))
|
|
|
|
client := &http.Client{}
|
|
|
|
resp, err := client.Do(req)
|
|
|
|
|
2017-02-13 10:24:21 +03:00
|
|
|
defer resp.Body.Close()
|
2017-02-12 13:19:02 +03:00
|
|
|
|
|
|
|
body, err := ioutil.ReadAll(resp.Body)
|
|
|
|
if err != nil {
|
2017-02-13 10:24:21 +03:00
|
|
|
return []byte("")
|
2017-02-12 13:19:02 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
esresp, err := gabs.ParseJSON(body)
|
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
2017-02-13 10:24:21 +03:00
|
|
|
|
|
|
|
var uniq map[string]interface{}
|
|
|
|
uniq, _ = esresp.Path(strings.Join([]string{"aggregations", echoAgg}, ".")).Data().(map[string]interface{})
|
2017-02-12 13:19:02 +03:00
|
|
|
|
|
|
|
var echoes []string
|
2017-02-13 10:24:21 +03:00
|
|
|
for _, bucket := range uniq["buckets"].([]interface{}) {
|
|
|
|
b := make(map[string]interface{})
|
|
|
|
b = bucket.(map[string]interface{})
|
|
|
|
count := int(b["doc_count"].(float64))
|
|
|
|
c := strconv.Itoa(count)
|
|
|
|
echostr := strings.Join([]string{b["key"].(string), ":", c, ":"}, "")
|
2017-02-12 13:19:02 +03:00
|
|
|
echoes = append(echoes, echostr)
|
|
|
|
}
|
|
|
|
|
2017-02-13 10:24:21 +03:00
|
|
|
log.Print("Getting ", len(echoes), " echoes")
|
|
|
|
|
|
|
|
return []byte(strings.Join(echoes, "\n"))
|
2017-02-12 13:19:02 +03:00
|
|
|
}
|