2023-11-05 11:57:52 +08:00
|
|
|
package services
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
2023-12-18 00:58:20 +08:00
|
|
|
"encoding/csv"
|
|
|
|
"encoding/hex"
|
2023-11-16 00:21:54 +08:00
|
|
|
"encoding/json"
|
2023-11-05 11:57:52 +08:00
|
|
|
"errors"
|
|
|
|
"fmt"
|
|
|
|
"github.com/redis/go-redis/v9"
|
2023-12-18 00:58:20 +08:00
|
|
|
"github.com/wailsapp/wails/v2/pkg/runtime"
|
2023-11-05 11:57:52 +08:00
|
|
|
"net/url"
|
2023-12-18 00:58:20 +08:00
|
|
|
"os"
|
2023-11-17 01:24:04 +08:00
|
|
|
"slices"
|
2023-11-05 11:57:52 +08:00
|
|
|
"sort"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
|
|
|
"sync"
|
|
|
|
"sync/atomic"
|
|
|
|
"time"
|
|
|
|
"tinyrdm/backend/consts"
|
|
|
|
"tinyrdm/backend/types"
|
|
|
|
"tinyrdm/backend/utils/coll"
|
2024-01-27 14:36:14 +08:00
|
|
|
convutil "tinyrdm/backend/utils/convert"
|
2023-11-05 11:57:52 +08:00
|
|
|
redis2 "tinyrdm/backend/utils/redis"
|
|
|
|
sliceutil "tinyrdm/backend/utils/slice"
|
|
|
|
strutil "tinyrdm/backend/utils/string"
|
|
|
|
)
|
|
|
|
|
|
|
|
type slowLogItem struct {
|
|
|
|
Timestamp int64 `json:"timestamp"`
|
|
|
|
Client string `json:"client"`
|
|
|
|
Addr string `json:"addr"`
|
|
|
|
Cmd string `json:"cmd"`
|
|
|
|
Cost int64 `json:"cost"`
|
|
|
|
}
|
|
|
|
|
2023-11-08 23:45:33 +08:00
|
|
|
type entryCursor struct {
|
|
|
|
DB int
|
|
|
|
Type string
|
|
|
|
Key string
|
|
|
|
Pattern string
|
|
|
|
Cursor uint64
|
|
|
|
XLast string // last stream pos
|
|
|
|
}
|
|
|
|
|
2023-11-05 11:57:52 +08:00
|
|
|
type connectionItem struct {
|
2023-11-08 23:45:33 +08:00
|
|
|
client redis.UniversalClient
|
|
|
|
ctx context.Context
|
|
|
|
cancelFunc context.CancelFunc
|
|
|
|
cursor map[int]uint64 // current cursor of databases
|
|
|
|
entryCursor map[int]entryCursor // current entry cursor of databases
|
|
|
|
stepSize int64
|
2023-12-04 16:58:36 +08:00
|
|
|
db int // current database index
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
type browserService struct {
|
|
|
|
ctx context.Context
|
2023-12-06 18:25:51 +08:00
|
|
|
connMap map[string]*connectionItem
|
2023-11-05 11:57:52 +08:00
|
|
|
cmdHistory []cmdHistoryItem
|
2023-12-29 00:16:25 +08:00
|
|
|
mutex sync.Mutex
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
var browser *browserService
|
|
|
|
var onceBrowser sync.Once
|
|
|
|
|
|
|
|
func Browser() *browserService {
|
|
|
|
if browser == nil {
|
|
|
|
onceBrowser.Do(func() {
|
|
|
|
browser = &browserService{
|
2023-12-06 18:25:51 +08:00
|
|
|
connMap: map[string]*connectionItem{},
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
return browser
|
|
|
|
}
|
|
|
|
|
|
|
|
func (b *browserService) Start(ctx context.Context) {
|
|
|
|
b.ctx = ctx
|
|
|
|
}
|
|
|
|
|
|
|
|
func (b *browserService) Stop() {
|
|
|
|
for _, item := range b.connMap {
|
|
|
|
if item.client != nil {
|
2024-01-02 00:34:48 +08:00
|
|
|
if item.cancelFunc != nil {
|
|
|
|
item.cancelFunc()
|
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
item.client.Close()
|
|
|
|
}
|
|
|
|
}
|
2023-12-06 18:25:51 +08:00
|
|
|
b.connMap = map[string]*connectionItem{}
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
// OpenConnection open redis server connection
|
|
|
|
func (b *browserService) OpenConnection(name string) (resp types.JSResp) {
|
2023-12-06 18:25:51 +08:00
|
|
|
// get connection config
|
|
|
|
selConn := Connection().getConnection(name)
|
2023-12-12 16:15:30 +08:00
|
|
|
// correct last database index
|
|
|
|
lastDB := selConn.LastDB
|
|
|
|
if selConn.DBFilterType == "show" && !sliceutil.Contains(selConn.DBFilterList, lastDB) {
|
|
|
|
lastDB = selConn.DBFilterList[0]
|
|
|
|
} else if selConn.DBFilterType == "hide" && sliceutil.Contains(selConn.DBFilterList, lastDB) {
|
|
|
|
lastDB = selConn.DBFilterList[0]
|
|
|
|
}
|
|
|
|
if lastDB != selConn.LastDB {
|
|
|
|
Connection().SaveLastDB(name, lastDB)
|
|
|
|
}
|
|
|
|
|
|
|
|
item, err := b.getRedisClient(name, lastDB)
|
2023-11-05 11:57:52 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx := item.client, item.ctx
|
|
|
|
var totaldb int
|
|
|
|
if selConn.DBFilterType == "" || selConn.DBFilterType == "none" {
|
|
|
|
// get total databases
|
|
|
|
if config, err := client.ConfigGet(ctx, "databases").Result(); err == nil {
|
|
|
|
if total, err := strconv.Atoi(config["databases"]); err == nil {
|
|
|
|
totaldb = total
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// parse all db, response content like below
|
|
|
|
var dbs []types.ConnectionDB
|
|
|
|
var clusterKeyCount int64
|
|
|
|
cluster, isCluster := client.(*redis.ClusterClient)
|
|
|
|
if isCluster {
|
|
|
|
var keyCount atomic.Int64
|
|
|
|
err = cluster.ForEachMaster(ctx, func(ctx context.Context, cli *redis.Client) error {
|
|
|
|
if size, serr := cli.DBSize(ctx).Result(); serr != nil {
|
|
|
|
return serr
|
|
|
|
} else {
|
|
|
|
keyCount.Add(size)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = "get db size error:" + err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
clusterKeyCount = keyCount.Load()
|
|
|
|
|
|
|
|
// only one database in cluster mode
|
|
|
|
dbs = []types.ConnectionDB{
|
|
|
|
{
|
2023-12-25 16:22:29 +08:00
|
|
|
Name: "db0",
|
|
|
|
Index: 0,
|
|
|
|
MaxKeys: int(clusterKeyCount),
|
2023-11-05 11:57:52 +08:00
|
|
|
},
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// get database info
|
|
|
|
var res string
|
|
|
|
res, err = client.Info(ctx, "keyspace").Result()
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = "get server info fail:" + err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
info := b.parseInfo(res)
|
|
|
|
|
|
|
|
if totaldb <= 0 {
|
|
|
|
// cannot retrieve the database count by "CONFIG GET databases", try to get max index from keyspace
|
|
|
|
keyspace := info["Keyspace"]
|
|
|
|
var db, maxDB int
|
|
|
|
for dbName := range keyspace {
|
|
|
|
if db, err = strconv.Atoi(strings.TrimLeft(dbName, "db")); err == nil {
|
|
|
|
if maxDB < db {
|
|
|
|
maxDB = db
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
totaldb = maxDB + 1
|
|
|
|
}
|
|
|
|
|
|
|
|
queryDB := func(idx int) types.ConnectionDB {
|
|
|
|
dbName := "db" + strconv.Itoa(idx)
|
|
|
|
dbInfoStr := info["Keyspace"][dbName]
|
2024-01-20 11:17:45 +08:00
|
|
|
var alias string
|
|
|
|
if selConn.Alias != nil {
|
|
|
|
alias = selConn.Alias[idx]
|
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
if len(dbInfoStr) > 0 {
|
|
|
|
dbInfo := b.parseDBItemInfo(dbInfoStr)
|
|
|
|
return types.ConnectionDB{
|
|
|
|
Name: dbName,
|
2024-01-18 17:15:34 +08:00
|
|
|
Alias: alias,
|
2023-11-05 11:57:52 +08:00
|
|
|
Index: idx,
|
2023-12-25 16:22:29 +08:00
|
|
|
MaxKeys: dbInfo["keys"],
|
2023-11-05 11:57:52 +08:00
|
|
|
Expires: dbInfo["expires"],
|
|
|
|
AvgTTL: dbInfo["avg_ttl"],
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
return types.ConnectionDB{
|
|
|
|
Name: dbName,
|
2024-01-20 11:17:45 +08:00
|
|
|
Alias: alias,
|
2023-11-05 11:57:52 +08:00
|
|
|
Index: idx,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
switch selConn.DBFilterType {
|
|
|
|
case "show":
|
|
|
|
filterList := sliceutil.Unique(selConn.DBFilterList)
|
|
|
|
for _, idx := range filterList {
|
|
|
|
dbs = append(dbs, queryDB(idx))
|
|
|
|
}
|
|
|
|
case "hide":
|
|
|
|
hiddenList := coll.NewSet(selConn.DBFilterList...)
|
|
|
|
for idx := 0; idx < totaldb; idx++ {
|
|
|
|
if !hiddenList.Contains(idx) {
|
|
|
|
dbs = append(dbs, queryDB(idx))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
for idx := 0; idx < totaldb; idx++ {
|
|
|
|
dbs = append(dbs, queryDB(idx))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
resp.Success = true
|
|
|
|
resp.Data = map[string]any{
|
2023-12-02 02:03:29 +08:00
|
|
|
"db": dbs,
|
|
|
|
"view": selConn.KeyView,
|
|
|
|
"lastDB": selConn.LastDB,
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// CloseConnection close redis server connection
|
|
|
|
func (b *browserService) CloseConnection(name string) (resp types.JSResp) {
|
|
|
|
item, ok := b.connMap[name]
|
|
|
|
if ok {
|
|
|
|
delete(b.connMap, name)
|
|
|
|
if item.client != nil {
|
2024-01-02 00:34:48 +08:00
|
|
|
if item.cancelFunc != nil {
|
|
|
|
item.cancelFunc()
|
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
item.client.Close()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
resp.Success = true
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-12-19 20:10:01 +08:00
|
|
|
func (b *browserService) createRedisClient(selConn types.ConnectionConfig) (client redis.UniversalClient, err error) {
|
|
|
|
hook := redis2.NewHook(selConn.Name, func(cmd string, cost int64) {
|
|
|
|
now := time.Now()
|
|
|
|
//last := strings.LastIndex(cmd, ":")
|
|
|
|
//if last != -1 {
|
|
|
|
// cmd = cmd[:last]
|
|
|
|
//}
|
|
|
|
b.cmdHistory = append(b.cmdHistory, cmdHistoryItem{
|
|
|
|
Timestamp: now.UnixMilli(),
|
|
|
|
Server: selConn.Name,
|
|
|
|
Cmd: cmd,
|
|
|
|
Cost: cost,
|
2023-11-05 11:57:52 +08:00
|
|
|
})
|
2023-12-19 20:10:01 +08:00
|
|
|
})
|
|
|
|
|
|
|
|
client, err = Connection().createRedisClient(selConn)
|
|
|
|
if err != nil {
|
|
|
|
err = fmt.Errorf("create conenction error: %s", err.Error())
|
|
|
|
return
|
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
|
2023-12-19 20:10:01 +08:00
|
|
|
_ = client.Do(b.ctx, "CLIENT", "SETNAME", url.QueryEscape(selConn.Name)).Err()
|
|
|
|
// add hook to each node in cluster mode
|
|
|
|
if cluster, ok := client.(*redis.ClusterClient); ok {
|
|
|
|
err = cluster.ForEachShard(b.ctx, func(ctx context.Context, cli *redis.Client) error {
|
|
|
|
cli.AddHook(hook)
|
|
|
|
return nil
|
|
|
|
})
|
2023-11-05 11:57:52 +08:00
|
|
|
if err != nil {
|
2023-12-19 20:10:01 +08:00
|
|
|
err = fmt.Errorf("get cluster nodes error: %s", err.Error())
|
2023-11-05 11:57:52 +08:00
|
|
|
return
|
|
|
|
}
|
2023-12-19 20:10:01 +08:00
|
|
|
} else {
|
|
|
|
client.AddHook(hook)
|
|
|
|
}
|
2023-11-21 18:13:26 +08:00
|
|
|
|
2023-12-19 20:10:01 +08:00
|
|
|
if _, err = client.Ping(b.ctx).Result(); err != nil && !errors.Is(err, redis.Nil) {
|
|
|
|
err = errors.New("can not connect to redis server:" + err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
|
2023-12-19 20:10:01 +08:00
|
|
|
// get a redis client from local cache or create a new open
|
|
|
|
// if db >= 0, will also switch to db index
|
|
|
|
func (b *browserService) getRedisClient(server string, db int) (item *connectionItem, err error) {
|
2023-12-29 00:16:25 +08:00
|
|
|
b.mutex.Lock()
|
|
|
|
defer b.mutex.Unlock()
|
|
|
|
|
2023-12-19 20:10:01 +08:00
|
|
|
var ok bool
|
|
|
|
var client redis.UniversalClient
|
|
|
|
if item, ok = b.connMap[server]; ok {
|
2024-01-12 11:18:41 +08:00
|
|
|
if item.db == db || db < 0 {
|
|
|
|
// return without switch database directly
|
2023-11-05 11:57:52 +08:00
|
|
|
return
|
|
|
|
}
|
2024-01-02 00:34:48 +08:00
|
|
|
|
|
|
|
// close previous connection if database is not the same
|
|
|
|
if item.cancelFunc != nil {
|
|
|
|
item.cancelFunc()
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
2024-01-02 00:34:48 +08:00
|
|
|
item.client.Close()
|
|
|
|
delete(b.connMap, server)
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
|
2024-01-02 00:34:48 +08:00
|
|
|
// recreate new connection after switch database
|
|
|
|
selConn := Connection().getConnection(server)
|
|
|
|
if selConn == nil {
|
|
|
|
err = fmt.Errorf("no match connection \"%s\"", server)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
var connConfig = selConn.ConnectionConfig
|
|
|
|
connConfig.LastDB = db
|
|
|
|
client, err = b.createRedisClient(connConfig)
|
2024-01-20 17:59:51 +08:00
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
2024-01-02 00:34:48 +08:00
|
|
|
ctx, cancelFunc := context.WithCancel(b.ctx)
|
|
|
|
item = &connectionItem{
|
|
|
|
client: client,
|
|
|
|
ctx: ctx,
|
|
|
|
cancelFunc: cancelFunc,
|
|
|
|
cursor: map[int]uint64{},
|
|
|
|
entryCursor: map[int]entryCursor{},
|
|
|
|
stepSize: int64(selConn.LoadSize),
|
|
|
|
db: db,
|
|
|
|
}
|
|
|
|
if item.stepSize <= 0 {
|
|
|
|
item.stepSize = consts.DEFAULT_LOAD_SIZE
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
2024-01-02 00:34:48 +08:00
|
|
|
b.connMap[server] = item
|
2023-11-05 11:57:52 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-11-05 13:00:03 +08:00
|
|
|
// load current database size
|
|
|
|
func (b *browserService) loadDBSize(ctx context.Context, client redis.UniversalClient) int64 {
|
2023-12-04 16:58:36 +08:00
|
|
|
keyCount, _ := client.DBSize(ctx).Result()
|
|
|
|
return keyCount
|
2023-11-05 13:00:03 +08:00
|
|
|
}
|
|
|
|
|
2023-11-05 11:57:52 +08:00
|
|
|
// save current scan cursor
|
2023-12-19 20:10:01 +08:00
|
|
|
func (b *browserService) setClientCursor(server string, db int, cursor uint64) {
|
|
|
|
if _, ok := b.connMap[server]; ok {
|
2023-11-05 11:57:52 +08:00
|
|
|
if cursor == 0 {
|
2023-12-19 20:10:01 +08:00
|
|
|
delete(b.connMap[server].cursor, db)
|
2023-11-05 11:57:52 +08:00
|
|
|
} else {
|
2023-12-19 20:10:01 +08:00
|
|
|
b.connMap[server].cursor[db] = cursor
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// parse command response content which use "redis info"
|
|
|
|
// # Keyspace\r\ndb0:keys=2,expires=1,avg_ttl=1877111749\r\ndb1:keys=33,expires=0,avg_ttl=0\r\ndb3:keys=17,expires=0,avg_ttl=0\r\ndb5:keys=3,expires=0,avg_ttl=0\r\n
|
|
|
|
func (b *browserService) parseInfo(info string) map[string]map[string]string {
|
|
|
|
parsedInfo := map[string]map[string]string{}
|
|
|
|
lines := strings.Split(info, "\r\n")
|
|
|
|
if len(lines) > 0 {
|
|
|
|
var subInfo map[string]string
|
|
|
|
for _, line := range lines {
|
|
|
|
if strings.HasPrefix(line, "#") {
|
|
|
|
subInfo = map[string]string{}
|
|
|
|
parsedInfo[strings.TrimSpace(strings.TrimLeft(line, "#"))] = subInfo
|
|
|
|
} else {
|
|
|
|
items := strings.SplitN(line, ":", 2)
|
|
|
|
if len(items) < 2 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
subInfo[items[0]] = items[1]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return parsedInfo
|
|
|
|
}
|
|
|
|
|
|
|
|
// parse db item value, content format like below
|
|
|
|
// keys=2,expires=1,avg_ttl=1877111749
|
|
|
|
func (b *browserService) parseDBItemInfo(info string) map[string]int {
|
|
|
|
ret := map[string]int{}
|
|
|
|
items := strings.Split(info, ",")
|
|
|
|
for _, item := range items {
|
|
|
|
kv := strings.SplitN(item, "=", 2)
|
|
|
|
if len(kv) > 1 {
|
|
|
|
ret[kv[0]], _ = strconv.Atoi(kv[1])
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return ret
|
|
|
|
}
|
|
|
|
|
|
|
|
// ServerInfo get server info
|
|
|
|
func (b *browserService) ServerInfo(name string) (resp types.JSResp) {
|
2023-12-06 18:25:51 +08:00
|
|
|
item, err := b.getRedisClient(name, -1)
|
2023-11-05 11:57:52 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx := item.client, item.ctx
|
|
|
|
// get database info
|
|
|
|
res, err := client.Info(ctx).Result()
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = "get server info fail:" + err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
resp.Success = true
|
|
|
|
resp.Data = b.parseInfo(res)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// OpenDatabase open select database, and list all keys
|
|
|
|
// @param path contain connection name and db name
|
2023-12-04 16:58:36 +08:00
|
|
|
func (b *browserService) OpenDatabase(server string, db int) (resp types.JSResp) {
|
|
|
|
b.setClientCursor(server, db, 0)
|
2023-12-01 17:59:04 +08:00
|
|
|
|
2023-12-04 16:58:36 +08:00
|
|
|
item, err := b.getRedisClient(server, db)
|
2023-12-01 17:59:04 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
client, ctx := item.client, item.ctx
|
|
|
|
maxKeys := b.loadDBSize(ctx, client)
|
|
|
|
|
|
|
|
resp.Success = true
|
|
|
|
resp.Data = map[string]any{
|
|
|
|
"maxKeys": maxKeys,
|
|
|
|
}
|
|
|
|
return
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
// scan keys
|
|
|
|
// @return loaded keys
|
|
|
|
// @return next cursor
|
|
|
|
// @return scan error
|
|
|
|
func (b *browserService) scanKeys(ctx context.Context, client redis.UniversalClient, match, keyType string, cursor uint64, count int64) ([]any, uint64, error) {
|
|
|
|
var err error
|
|
|
|
filterType := len(keyType) > 0
|
|
|
|
scanSize := int64(Preferences().GetScanSize())
|
|
|
|
// define sub scan function
|
|
|
|
scan := func(ctx context.Context, cli redis.UniversalClient, appendFunc func(k []any)) error {
|
|
|
|
var loadedKey []string
|
|
|
|
var scanCount int64
|
|
|
|
for {
|
|
|
|
if filterType {
|
|
|
|
loadedKey, cursor, err = cli.ScanType(ctx, cursor, match, scanSize, keyType).Result()
|
|
|
|
} else {
|
|
|
|
loadedKey, cursor, err = cli.Scan(ctx, cursor, match, scanSize).Result()
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
} else {
|
|
|
|
ks := sliceutil.Map(loadedKey, func(i int) any {
|
|
|
|
return strutil.EncodeRedisKey(loadedKey[i])
|
|
|
|
})
|
|
|
|
scanCount += int64(len(ks))
|
|
|
|
appendFunc(ks)
|
|
|
|
}
|
|
|
|
|
|
|
|
if (count > 0 && scanCount > count) || cursor == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-12-25 16:22:29 +08:00
|
|
|
keys := make([]any, 0)
|
2023-11-05 11:57:52 +08:00
|
|
|
if cluster, ok := client.(*redis.ClusterClient); ok {
|
|
|
|
// cluster mode
|
|
|
|
var mutex sync.Mutex
|
|
|
|
err = cluster.ForEachMaster(ctx, func(ctx context.Context, cli *redis.Client) error {
|
2023-12-01 17:59:04 +08:00
|
|
|
// FIXME: BUG? can not fully load in cluster mode? maybe remove the shared "cursor"
|
2023-11-05 11:57:52 +08:00
|
|
|
return scan(ctx, cli, func(k []any) {
|
|
|
|
mutex.Lock()
|
|
|
|
keys = append(keys, k...)
|
|
|
|
mutex.Unlock()
|
|
|
|
})
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
err = scan(ctx, client, func(k []any) {
|
|
|
|
keys = append(keys, k...)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
if err != nil {
|
2023-12-25 16:22:29 +08:00
|
|
|
return keys, cursor, err
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
return keys, cursor, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// LoadNextKeys load next key from saved cursor
|
2024-01-05 18:24:38 +08:00
|
|
|
func (b *browserService) LoadNextKeys(server string, db int, match, keyType string) (resp types.JSResp) {
|
|
|
|
item, err := b.getRedisClient(server, db)
|
2023-11-05 11:57:52 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx, count := item.client, item.ctx, item.stepSize
|
|
|
|
cursor := item.cursor[db]
|
|
|
|
keys, cursor, err := b.scanKeys(ctx, client, match, keyType, cursor, count)
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
2024-01-05 18:24:38 +08:00
|
|
|
b.setClientCursor(server, db, cursor)
|
2023-11-05 13:00:03 +08:00
|
|
|
maxKeys := b.loadDBSize(ctx, client)
|
2023-11-05 11:57:52 +08:00
|
|
|
|
|
|
|
resp.Success = true
|
|
|
|
resp.Data = map[string]any{
|
2023-11-05 13:00:03 +08:00
|
|
|
"keys": keys,
|
|
|
|
"end": cursor == 0,
|
|
|
|
"maxKeys": maxKeys,
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-12-26 01:13:21 +08:00
|
|
|
// LoadNextAllKeys load next all keys
|
2024-01-05 18:24:38 +08:00
|
|
|
func (b *browserService) LoadNextAllKeys(server string, db int, match, keyType string) (resp types.JSResp) {
|
|
|
|
item, err := b.getRedisClient(server, db)
|
2023-11-05 11:57:52 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx := item.client, item.ctx
|
|
|
|
cursor := item.cursor[db]
|
|
|
|
keys, _, err := b.scanKeys(ctx, client, match, keyType, cursor, 0)
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
2024-01-05 18:24:38 +08:00
|
|
|
b.setClientCursor(server, db, 0)
|
2023-11-10 11:52:54 +08:00
|
|
|
maxKeys := b.loadDBSize(ctx, client)
|
2023-11-05 11:57:52 +08:00
|
|
|
|
|
|
|
resp.Success = true
|
|
|
|
resp.Data = map[string]any{
|
2023-11-10 11:52:54 +08:00
|
|
|
"keys": keys,
|
|
|
|
"maxKeys": maxKeys,
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-12-26 01:13:21 +08:00
|
|
|
// LoadAllKeys load all keys
|
2024-01-05 18:24:38 +08:00
|
|
|
func (b *browserService) LoadAllKeys(server string, db int, match, keyType string) (resp types.JSResp) {
|
|
|
|
item, err := b.getRedisClient(server, db)
|
2023-12-26 01:13:21 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx := item.client, item.ctx
|
|
|
|
keys, _, err := b.scanKeys(ctx, client, match, keyType, 0, 0)
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
resp.Success = true
|
|
|
|
resp.Data = map[string]any{
|
|
|
|
"keys": keys,
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-12-04 15:43:25 +08:00
|
|
|
func (b *browserService) GetKeyType(param types.KeySummaryParam) (resp types.JSResp) {
|
|
|
|
item, err := b.getRedisClient(param.Server, param.DB)
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx := item.client, item.ctx
|
|
|
|
key := strutil.DecodeRedisKey(param.Key)
|
|
|
|
var keyType string
|
|
|
|
keyType, err = client.Type(ctx, key).Result()
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if keyType == "none" {
|
|
|
|
resp.Msg = "key not exists"
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
var data types.KeySummary
|
|
|
|
data.Type = strings.ToLower(keyType)
|
|
|
|
|
|
|
|
resp.Success = true
|
|
|
|
resp.Data = data
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-11-08 23:45:33 +08:00
|
|
|
// GetKeySummary get key summary info
|
|
|
|
func (b *browserService) GetKeySummary(param types.KeySummaryParam) (resp types.JSResp) {
|
|
|
|
item, err := b.getRedisClient(param.Server, param.DB)
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx := item.client, item.ctx
|
|
|
|
key := strutil.DecodeRedisKey(param.Key)
|
2023-12-14 16:35:37 +08:00
|
|
|
|
|
|
|
pipe := client.Pipeline()
|
|
|
|
typeVal := pipe.Type(ctx, key)
|
|
|
|
ttlVal := pipe.TTL(ctx, key)
|
|
|
|
_, err = pipe.Exec(ctx)
|
2023-11-08 23:45:33 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-12-14 16:35:37 +08:00
|
|
|
if typeVal.Err() != nil {
|
|
|
|
resp.Msg = typeVal.Err().Error()
|
|
|
|
return
|
|
|
|
}
|
2024-01-11 13:04:00 +08:00
|
|
|
size, _ := client.MemoryUsage(ctx, key, 0).Result()
|
2023-12-14 16:35:37 +08:00
|
|
|
data := types.KeySummary{
|
|
|
|
Type: strings.ToLower(typeVal.Val()),
|
2024-01-11 13:04:00 +08:00
|
|
|
Size: size,
|
2023-12-14 16:35:37 +08:00
|
|
|
}
|
|
|
|
if data.Type == "none" {
|
2023-11-08 23:45:33 +08:00
|
|
|
resp.Msg = "key not exists"
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-12-14 16:35:37 +08:00
|
|
|
if ttlVal.Err() != nil {
|
2023-11-08 23:45:33 +08:00
|
|
|
data.TTL = -1
|
|
|
|
} else {
|
2023-12-14 16:35:37 +08:00
|
|
|
if ttlVal.Val() < 0 {
|
2023-11-08 23:45:33 +08:00
|
|
|
data.TTL = -1
|
|
|
|
} else {
|
2023-12-14 16:35:37 +08:00
|
|
|
data.TTL = int64(ttlVal.Val().Seconds())
|
2023-11-08 23:45:33 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
switch data.Type {
|
|
|
|
case "string":
|
2023-12-14 16:35:37 +08:00
|
|
|
data.Length, err = client.StrLen(ctx, key).Result()
|
2023-11-08 23:45:33 +08:00
|
|
|
case "list":
|
2023-12-14 16:35:37 +08:00
|
|
|
data.Length, err = client.LLen(ctx, key).Result()
|
2023-11-08 23:45:33 +08:00
|
|
|
case "hash":
|
2023-12-14 16:35:37 +08:00
|
|
|
data.Length, err = client.HLen(ctx, key).Result()
|
2023-11-08 23:45:33 +08:00
|
|
|
case "set":
|
2023-12-14 16:35:37 +08:00
|
|
|
data.Length, err = client.SCard(ctx, key).Result()
|
2023-11-08 23:45:33 +08:00
|
|
|
case "zset":
|
2023-12-14 16:35:37 +08:00
|
|
|
data.Length, err = client.ZCard(ctx, key).Result()
|
2023-11-08 23:45:33 +08:00
|
|
|
case "stream":
|
2023-12-14 16:35:37 +08:00
|
|
|
data.Length, err = client.XLen(ctx, key).Result()
|
2023-11-08 23:45:33 +08:00
|
|
|
default:
|
2023-12-14 16:35:37 +08:00
|
|
|
err = errors.New("unknown key type")
|
|
|
|
}
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
2023-11-08 23:45:33 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
resp.Success = true
|
|
|
|
resp.Data = data
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// GetKeyDetail get key detail
|
|
|
|
func (b *browserService) GetKeyDetail(param types.KeyDetailParam) (resp types.JSResp) {
|
|
|
|
item, err := b.getRedisClient(param.Server, param.DB)
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx, entryCors := item.client, item.ctx, item.entryCursor
|
|
|
|
key := strutil.DecodeRedisKey(param.Key)
|
|
|
|
var keyType string
|
|
|
|
keyType, err = client.Type(ctx, key).Result()
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if keyType == "none" {
|
|
|
|
resp.Msg = "key not exists"
|
|
|
|
return
|
|
|
|
}
|
2023-11-14 17:15:02 +08:00
|
|
|
var doConvert bool
|
|
|
|
if (len(param.Decode) > 0 && param.Decode != types.DECODE_NONE) ||
|
|
|
|
(len(param.Format) > 0 && param.Format != types.FORMAT_RAW) {
|
|
|
|
doConvert = true
|
|
|
|
}
|
2023-11-08 23:45:33 +08:00
|
|
|
|
|
|
|
var data types.KeyDetail
|
2024-01-06 22:13:26 +08:00
|
|
|
data.KeyType = strings.ToLower(keyType)
|
2023-11-08 23:45:33 +08:00
|
|
|
//var cursor uint64
|
|
|
|
matchPattern := param.MatchPattern
|
|
|
|
if len(matchPattern) <= 0 {
|
|
|
|
matchPattern = "*"
|
|
|
|
}
|
|
|
|
|
|
|
|
// define get entry cursor function
|
2023-11-20 16:23:27 +08:00
|
|
|
getEntryCursor := func() (uint64, string, bool) {
|
2023-11-08 23:45:33 +08:00
|
|
|
if entry, ok := entryCors[param.DB]; !ok || entry.Key != key || entry.Pattern != matchPattern {
|
|
|
|
// not the same key or match pattern, reset cursor
|
|
|
|
entry = entryCursor{
|
|
|
|
DB: param.DB,
|
|
|
|
Key: key,
|
|
|
|
Pattern: matchPattern,
|
|
|
|
Cursor: 0,
|
|
|
|
}
|
|
|
|
entryCors[param.DB] = entry
|
2023-11-20 16:23:27 +08:00
|
|
|
return 0, "", true
|
2023-11-08 23:45:33 +08:00
|
|
|
} else {
|
2023-11-20 16:23:27 +08:00
|
|
|
return entry.Cursor, entry.XLast, false
|
2023-11-08 23:45:33 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
// define set entry cursor function
|
|
|
|
setEntryCursor := func(cursor uint64) {
|
|
|
|
entryCors[param.DB] = entryCursor{
|
|
|
|
DB: param.DB,
|
|
|
|
Type: "",
|
|
|
|
Key: key,
|
|
|
|
Pattern: matchPattern,
|
|
|
|
Cursor: cursor,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// define set last stream pos function
|
|
|
|
setEntryXLast := func(last string) {
|
|
|
|
entryCors[param.DB] = entryCursor{
|
|
|
|
DB: param.DB,
|
|
|
|
Type: "",
|
|
|
|
Key: key,
|
|
|
|
Pattern: matchPattern,
|
|
|
|
XLast: last,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-02-03 15:06:23 +08:00
|
|
|
decoder := Preferences().GetDecoder()
|
|
|
|
|
2024-01-06 22:13:26 +08:00
|
|
|
switch data.KeyType {
|
2023-11-08 23:45:33 +08:00
|
|
|
case "string":
|
|
|
|
var str string
|
|
|
|
str, err = client.Get(ctx, key).Result()
|
2023-11-13 15:28:13 +08:00
|
|
|
data.Value = strutil.EncodeRedisKey(str)
|
2023-11-13 22:31:18 +08:00
|
|
|
//data.Value, data.Decode, data.Format = strutil.ConvertTo(str, param.Decode, param.Format)
|
2023-11-08 23:45:33 +08:00
|
|
|
|
|
|
|
case "list":
|
2023-11-20 16:23:27 +08:00
|
|
|
loadListHandle := func() ([]types.ListEntryItem, bool, bool, error) {
|
2023-11-14 14:49:16 +08:00
|
|
|
var loadVal []string
|
2023-11-08 23:45:33 +08:00
|
|
|
var cursor uint64
|
2023-11-20 16:23:27 +08:00
|
|
|
var reset bool
|
2023-11-14 17:15:02 +08:00
|
|
|
var subErr error
|
2023-11-20 16:23:27 +08:00
|
|
|
doFilter := matchPattern != "*"
|
2023-11-21 17:06:26 +08:00
|
|
|
if param.Full || doFilter {
|
2023-11-08 23:45:33 +08:00
|
|
|
// load all
|
2023-11-20 16:23:27 +08:00
|
|
|
cursor, reset = 0, true
|
2023-11-14 17:15:02 +08:00
|
|
|
loadVal, subErr = client.LRange(ctx, key, 0, -1).Result()
|
2023-11-08 23:45:33 +08:00
|
|
|
} else {
|
2023-11-16 00:21:54 +08:00
|
|
|
if param.Reset {
|
2023-11-20 16:23:27 +08:00
|
|
|
cursor, reset = 0, true
|
2023-11-16 00:21:54 +08:00
|
|
|
} else {
|
2023-11-20 16:23:27 +08:00
|
|
|
cursor, _, reset = getEntryCursor()
|
2023-11-16 00:21:54 +08:00
|
|
|
}
|
2023-11-08 23:45:33 +08:00
|
|
|
scanSize := int64(Preferences().GetScanSize())
|
2023-11-14 17:15:02 +08:00
|
|
|
loadVal, subErr = client.LRange(ctx, key, int64(cursor), int64(cursor)+scanSize-1).Result()
|
2023-11-08 23:45:33 +08:00
|
|
|
cursor = cursor + uint64(scanSize)
|
2023-11-14 14:49:16 +08:00
|
|
|
if len(loadVal) < int(scanSize) {
|
2023-11-08 23:45:33 +08:00
|
|
|
cursor = 0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
setEntryCursor(cursor)
|
2023-11-14 14:49:16 +08:00
|
|
|
|
2023-11-20 16:23:27 +08:00
|
|
|
items := make([]types.ListEntryItem, 0, len(loadVal))
|
|
|
|
for _, val := range loadVal {
|
|
|
|
if doFilter && !strings.Contains(val, param.MatchPattern) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
items = append(items, types.ListEntryItem{
|
|
|
|
Value: val,
|
|
|
|
})
|
2023-11-14 14:49:16 +08:00
|
|
|
if doConvert {
|
2024-02-03 15:06:23 +08:00
|
|
|
if dv, _, _ := convutil.ConvertTo(val, param.Decode, param.Format, decoder); dv != val {
|
2023-11-20 16:23:27 +08:00
|
|
|
items[len(items)-1].DisplayValue = dv
|
2023-11-14 14:49:16 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-11-14 17:15:02 +08:00
|
|
|
if subErr != nil {
|
2023-11-20 16:23:27 +08:00
|
|
|
return items, reset, false, subErr
|
2023-11-08 23:45:33 +08:00
|
|
|
}
|
2023-11-20 16:23:27 +08:00
|
|
|
return items, reset, cursor == 0, nil
|
2023-11-08 23:45:33 +08:00
|
|
|
}
|
|
|
|
|
2023-11-20 16:23:27 +08:00
|
|
|
data.Value, data.Reset, data.End, err = loadListHandle()
|
|
|
|
data.Match, data.Decode, data.Format = param.MatchPattern, param.Decode, param.Format
|
2023-11-14 14:49:16 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
2023-11-08 23:45:33 +08:00
|
|
|
|
|
|
|
case "hash":
|
2023-11-20 18:38:23 +08:00
|
|
|
if !strings.HasPrefix(matchPattern, "*") {
|
|
|
|
matchPattern = "*" + matchPattern
|
|
|
|
}
|
|
|
|
if !strings.HasSuffix(matchPattern, "*") {
|
|
|
|
matchPattern = matchPattern + "*"
|
|
|
|
}
|
2023-11-20 16:23:27 +08:00
|
|
|
loadHashHandle := func() ([]types.HashEntryItem, bool, bool, error) {
|
2023-11-15 23:41:53 +08:00
|
|
|
var items []types.HashEntryItem
|
2023-11-08 23:45:33 +08:00
|
|
|
var loadedVal []string
|
|
|
|
var cursor uint64
|
2023-11-20 16:23:27 +08:00
|
|
|
var reset bool
|
2023-11-14 17:15:02 +08:00
|
|
|
var subErr error
|
2023-11-15 23:41:53 +08:00
|
|
|
scanSize := int64(Preferences().GetScanSize())
|
2023-11-20 18:38:23 +08:00
|
|
|
if param.Full || matchPattern != "*" {
|
2023-11-08 23:45:33 +08:00
|
|
|
// load all
|
2023-11-20 16:23:27 +08:00
|
|
|
cursor, reset = 0, true
|
2023-12-25 16:22:29 +08:00
|
|
|
items = []types.HashEntryItem{}
|
2023-11-08 23:45:33 +08:00
|
|
|
for {
|
2023-11-20 18:38:23 +08:00
|
|
|
loadedVal, cursor, subErr = client.HScan(ctx, key, cursor, matchPattern, scanSize).Result()
|
2023-11-14 17:15:02 +08:00
|
|
|
if subErr != nil {
|
2023-11-20 16:23:27 +08:00
|
|
|
return nil, reset, false, subErr
|
2023-11-08 23:45:33 +08:00
|
|
|
}
|
|
|
|
for i := 0; i < len(loadedVal); i += 2 {
|
2023-11-13 22:31:18 +08:00
|
|
|
items = append(items, types.HashEntryItem{
|
2023-11-14 14:49:16 +08:00
|
|
|
Key: loadedVal[i],
|
|
|
|
Value: strutil.EncodeRedisKey(loadedVal[i+1]),
|
2023-11-13 22:31:18 +08:00
|
|
|
})
|
2023-11-14 14:49:16 +08:00
|
|
|
if doConvert {
|
2024-02-03 15:06:23 +08:00
|
|
|
if dv, _, _ := convutil.ConvertTo(loadedVal[i+1], param.Decode, param.Format, decoder); dv != loadedVal[i+1] {
|
2023-11-15 23:41:53 +08:00
|
|
|
items[len(items)-1].DisplayValue = dv
|
2023-11-14 14:49:16 +08:00
|
|
|
}
|
|
|
|
}
|
2023-11-08 23:45:33 +08:00
|
|
|
}
|
|
|
|
if cursor == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
2023-11-16 00:21:54 +08:00
|
|
|
if param.Reset {
|
2023-11-20 16:23:27 +08:00
|
|
|
cursor, reset = 0, true
|
2023-11-16 00:21:54 +08:00
|
|
|
} else {
|
2023-11-20 16:23:27 +08:00
|
|
|
cursor, _, reset = getEntryCursor()
|
2023-11-16 00:21:54 +08:00
|
|
|
}
|
2023-11-14 17:15:02 +08:00
|
|
|
loadedVal, cursor, subErr = client.HScan(ctx, key, cursor, matchPattern, scanSize).Result()
|
|
|
|
if subErr != nil {
|
2023-11-20 16:23:27 +08:00
|
|
|
return nil, reset, false, subErr
|
2023-11-08 23:45:33 +08:00
|
|
|
}
|
2023-11-15 23:41:53 +08:00
|
|
|
loadedLen := len(loadedVal)
|
|
|
|
items = make([]types.HashEntryItem, loadedLen/2)
|
|
|
|
for i := 0; i < loadedLen; i += 2 {
|
|
|
|
items[i/2].Key = loadedVal[i]
|
|
|
|
items[i/2].Value = strutil.EncodeRedisKey(loadedVal[i+1])
|
2023-11-14 14:49:16 +08:00
|
|
|
if doConvert {
|
2024-02-03 15:06:23 +08:00
|
|
|
if dv, _, _ := convutil.ConvertTo(loadedVal[i+1], param.Decode, param.Format, decoder); dv != loadedVal[i+1] {
|
2023-11-14 14:49:16 +08:00
|
|
|
items[i/2].DisplayValue = dv
|
|
|
|
}
|
|
|
|
}
|
2023-11-08 23:45:33 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
setEntryCursor(cursor)
|
2023-11-20 16:23:27 +08:00
|
|
|
return items, reset, cursor == 0, nil
|
2023-11-08 23:45:33 +08:00
|
|
|
}
|
|
|
|
|
2023-11-20 16:23:27 +08:00
|
|
|
data.Value, data.Reset, data.End, err = loadHashHandle()
|
|
|
|
data.Match, data.Decode, data.Format = param.MatchPattern, param.Decode, param.Format
|
2023-11-08 23:45:33 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
case "set":
|
2023-11-21 16:44:33 +08:00
|
|
|
if !strings.HasPrefix(matchPattern, "*") {
|
|
|
|
matchPattern = "*" + matchPattern
|
|
|
|
}
|
|
|
|
if !strings.HasSuffix(matchPattern, "*") {
|
|
|
|
matchPattern = matchPattern + "*"
|
|
|
|
}
|
2023-11-20 16:23:27 +08:00
|
|
|
loadSetHandle := func() ([]types.SetEntryItem, bool, bool, error) {
|
2023-11-14 17:15:02 +08:00
|
|
|
var items []types.SetEntryItem
|
2023-11-08 23:45:33 +08:00
|
|
|
var cursor uint64
|
2023-11-20 16:23:27 +08:00
|
|
|
var reset bool
|
2023-11-14 17:15:02 +08:00
|
|
|
var subErr error
|
2023-11-08 23:45:33 +08:00
|
|
|
var loadedKey []string
|
2023-11-15 23:41:53 +08:00
|
|
|
scanSize := int64(Preferences().GetScanSize())
|
2023-11-21 16:44:33 +08:00
|
|
|
if param.Full || matchPattern != "*" {
|
2023-11-08 23:45:33 +08:00
|
|
|
// load all
|
2023-11-20 16:23:27 +08:00
|
|
|
cursor, reset = 0, true
|
2023-12-25 16:22:29 +08:00
|
|
|
items = []types.SetEntryItem{}
|
2023-11-08 23:45:33 +08:00
|
|
|
for {
|
2023-11-21 16:44:33 +08:00
|
|
|
loadedKey, cursor, subErr = client.SScan(ctx, key, cursor, matchPattern, scanSize).Result()
|
2023-11-14 17:15:02 +08:00
|
|
|
if subErr != nil {
|
2023-11-20 16:23:27 +08:00
|
|
|
return items, reset, false, subErr
|
2023-11-08 23:45:33 +08:00
|
|
|
}
|
2023-11-15 23:41:53 +08:00
|
|
|
for _, val := range loadedKey {
|
|
|
|
items = append(items, types.SetEntryItem{
|
|
|
|
Value: val,
|
|
|
|
})
|
|
|
|
if doConvert {
|
2024-02-03 15:06:23 +08:00
|
|
|
if dv, _, _ := convutil.ConvertTo(val, param.Decode, param.Format, decoder); dv != val {
|
2023-11-15 23:41:53 +08:00
|
|
|
items[len(items)-1].DisplayValue = dv
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-11-08 23:45:33 +08:00
|
|
|
if cursor == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
2023-11-16 00:21:54 +08:00
|
|
|
if param.Reset {
|
2023-11-20 16:23:27 +08:00
|
|
|
cursor, reset = 0, true
|
2023-11-16 00:21:54 +08:00
|
|
|
} else {
|
2023-11-20 16:23:27 +08:00
|
|
|
cursor, _, reset = getEntryCursor()
|
2023-11-16 00:21:54 +08:00
|
|
|
}
|
2023-11-21 16:44:33 +08:00
|
|
|
loadedKey, cursor, subErr = client.SScan(ctx, key, cursor, matchPattern, scanSize).Result()
|
2023-11-15 23:41:53 +08:00
|
|
|
items = make([]types.SetEntryItem, len(loadedKey))
|
|
|
|
for i, val := range loadedKey {
|
|
|
|
items[i].Value = val
|
|
|
|
if doConvert {
|
2024-02-03 15:06:23 +08:00
|
|
|
if dv, _, _ := convutil.ConvertTo(val, param.Decode, param.Format, decoder); dv != val {
|
2023-11-15 23:41:53 +08:00
|
|
|
items[i].DisplayValue = dv
|
|
|
|
}
|
2023-11-14 17:15:02 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-11-15 23:41:53 +08:00
|
|
|
setEntryCursor(cursor)
|
2023-11-20 16:23:27 +08:00
|
|
|
return items, reset, cursor == 0, nil
|
2023-11-08 23:45:33 +08:00
|
|
|
}
|
|
|
|
|
2023-11-20 16:23:27 +08:00
|
|
|
data.Value, data.Reset, data.End, err = loadSetHandle()
|
|
|
|
data.Match, data.Decode, data.Format = param.MatchPattern, param.Decode, param.Format
|
2023-11-08 23:45:33 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
case "zset":
|
2023-11-21 16:44:33 +08:00
|
|
|
if !strings.HasPrefix(matchPattern, "*") {
|
|
|
|
matchPattern = "*" + matchPattern
|
|
|
|
}
|
|
|
|
if !strings.HasSuffix(matchPattern, "*") {
|
|
|
|
matchPattern = matchPattern + "*"
|
|
|
|
}
|
2023-11-20 16:23:27 +08:00
|
|
|
loadZSetHandle := func() ([]types.ZSetEntryItem, bool, bool, error) {
|
2023-11-15 23:41:53 +08:00
|
|
|
var items []types.ZSetEntryItem
|
2023-11-20 16:23:27 +08:00
|
|
|
var reset bool
|
2023-11-08 23:45:33 +08:00
|
|
|
var cursor uint64
|
|
|
|
scanSize := int64(Preferences().GetScanSize())
|
2023-11-24 00:22:40 +08:00
|
|
|
doFilter := matchPattern != "*"
|
|
|
|
if param.Full || doFilter {
|
2023-11-08 23:45:33 +08:00
|
|
|
// load all
|
2023-11-24 00:22:40 +08:00
|
|
|
var loadedVal []string
|
2023-11-20 16:23:27 +08:00
|
|
|
cursor, reset = 0, true
|
2023-12-25 16:22:29 +08:00
|
|
|
items = []types.ZSetEntryItem{}
|
2023-11-08 23:45:33 +08:00
|
|
|
for {
|
2023-11-21 16:44:33 +08:00
|
|
|
loadedVal, cursor, err = client.ZScan(ctx, key, cursor, matchPattern, scanSize).Result()
|
2023-11-08 23:45:33 +08:00
|
|
|
if err != nil {
|
2023-11-20 16:23:27 +08:00
|
|
|
return items, reset, false, err
|
2023-11-08 23:45:33 +08:00
|
|
|
}
|
|
|
|
var score float64
|
|
|
|
for i := 0; i < len(loadedVal); i += 2 {
|
|
|
|
if score, err = strconv.ParseFloat(loadedVal[i+1], 64); err == nil {
|
2023-11-15 23:41:53 +08:00
|
|
|
items = append(items, types.ZSetEntryItem{
|
2023-11-08 23:45:33 +08:00
|
|
|
Value: loadedVal[i],
|
|
|
|
Score: score,
|
|
|
|
})
|
2023-11-15 23:41:53 +08:00
|
|
|
if doConvert {
|
2024-02-03 15:06:23 +08:00
|
|
|
if dv, _, _ := convutil.ConvertTo(loadedVal[i], param.Decode, param.Format, decoder); dv != loadedVal[i] {
|
2023-11-15 23:41:53 +08:00
|
|
|
items[len(items)-1].DisplayValue = dv
|
|
|
|
}
|
|
|
|
}
|
2023-11-08 23:45:33 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
if cursor == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
2023-11-16 00:21:54 +08:00
|
|
|
if param.Reset {
|
2023-11-20 16:23:27 +08:00
|
|
|
cursor, reset = 0, true
|
2023-11-16 00:21:54 +08:00
|
|
|
} else {
|
2023-11-20 16:23:27 +08:00
|
|
|
cursor, _, reset = getEntryCursor()
|
2023-11-16 00:21:54 +08:00
|
|
|
}
|
2023-11-24 00:22:40 +08:00
|
|
|
var loadedVal []redis.Z
|
|
|
|
loadedVal, err = client.ZRangeWithScores(ctx, key, int64(cursor), int64(cursor)+scanSize-1).Result()
|
|
|
|
cursor = cursor + uint64(scanSize)
|
|
|
|
if len(loadedVal) < int(scanSize) {
|
|
|
|
cursor = 0
|
|
|
|
}
|
|
|
|
|
|
|
|
items = make([]types.ZSetEntryItem, 0, len(loadedVal))
|
|
|
|
for _, z := range loadedVal {
|
2023-12-01 18:42:39 +08:00
|
|
|
val := strutil.AnyToString(z.Member, "", 0)
|
2023-11-24 00:22:40 +08:00
|
|
|
if doFilter && !strings.Contains(val, param.MatchPattern) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
items = append(items, types.ZSetEntryItem{
|
|
|
|
Score: z.Score,
|
|
|
|
Value: val,
|
|
|
|
})
|
|
|
|
if doConvert {
|
2024-02-03 15:06:23 +08:00
|
|
|
if dv, _, _ := convutil.ConvertTo(val, param.Decode, param.Format, decoder); dv != val {
|
2023-11-24 00:22:40 +08:00
|
|
|
items[len(items)-1].DisplayValue = dv
|
2023-11-15 23:41:53 +08:00
|
|
|
}
|
2023-11-08 23:45:33 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
setEntryCursor(cursor)
|
2023-11-20 16:23:27 +08:00
|
|
|
return items, reset, cursor == 0, nil
|
2023-11-08 23:45:33 +08:00
|
|
|
}
|
|
|
|
|
2023-11-20 16:23:27 +08:00
|
|
|
data.Value, data.Reset, data.End, err = loadZSetHandle()
|
|
|
|
data.Match, data.Decode, data.Format = param.MatchPattern, param.Decode, param.Format
|
2023-11-08 23:45:33 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
case "stream":
|
2023-11-20 16:23:27 +08:00
|
|
|
loadStreamHandle := func() ([]types.StreamEntryItem, bool, bool, error) {
|
2023-11-08 23:45:33 +08:00
|
|
|
var msgs []redis.XMessage
|
|
|
|
var last string
|
2023-11-20 16:23:27 +08:00
|
|
|
var reset bool
|
2023-11-21 17:06:26 +08:00
|
|
|
doFilter := matchPattern != "*"
|
|
|
|
if param.Full || doFilter {
|
2023-11-08 23:45:33 +08:00
|
|
|
// load all
|
2023-11-20 16:23:27 +08:00
|
|
|
last, reset = "", true
|
2023-11-08 23:45:33 +08:00
|
|
|
msgs, err = client.XRevRange(ctx, key, "+", "-").Result()
|
|
|
|
} else {
|
|
|
|
scanSize := int64(Preferences().GetScanSize())
|
2023-11-16 00:21:54 +08:00
|
|
|
if param.Reset {
|
|
|
|
last = ""
|
|
|
|
} else {
|
2023-11-20 16:23:27 +08:00
|
|
|
_, last, reset = getEntryCursor()
|
2023-11-16 00:21:54 +08:00
|
|
|
}
|
2023-11-08 23:45:33 +08:00
|
|
|
if len(last) <= 0 {
|
|
|
|
last = "+"
|
|
|
|
}
|
|
|
|
if last != "+" {
|
|
|
|
// add 1 more item when continue scan
|
|
|
|
msgs, err = client.XRevRangeN(ctx, key, last, "-", scanSize+1).Result()
|
|
|
|
msgs = msgs[1:]
|
|
|
|
} else {
|
|
|
|
msgs, err = client.XRevRangeN(ctx, key, last, "-", scanSize).Result()
|
|
|
|
}
|
|
|
|
scanCount := len(msgs)
|
|
|
|
if scanCount <= 0 || scanCount < int(scanSize) {
|
|
|
|
last = ""
|
|
|
|
} else if scanCount > 0 {
|
|
|
|
last = msgs[scanCount-1].ID
|
|
|
|
}
|
|
|
|
}
|
|
|
|
setEntryXLast(last)
|
2023-11-21 17:06:26 +08:00
|
|
|
items := make([]types.StreamEntryItem, 0, len(msgs))
|
|
|
|
for _, msg := range msgs {
|
|
|
|
it := types.StreamEntryItem{
|
|
|
|
ID: msg.ID,
|
|
|
|
Value: msg.Values,
|
|
|
|
}
|
2023-11-16 00:21:54 +08:00
|
|
|
if vb, merr := json.Marshal(msg.Values); merr != nil {
|
2023-11-21 17:06:26 +08:00
|
|
|
it.DisplayValue = "{}"
|
2023-11-16 00:21:54 +08:00
|
|
|
} else {
|
2024-02-03 15:06:23 +08:00
|
|
|
it.DisplayValue, _, _ = convutil.ConvertTo(string(vb), types.DECODE_NONE, types.FORMAT_JSON, decoder)
|
2023-11-21 17:06:26 +08:00
|
|
|
}
|
|
|
|
if doFilter && !strings.Contains(it.DisplayValue, param.MatchPattern) {
|
|
|
|
continue
|
2023-11-16 00:21:54 +08:00
|
|
|
}
|
2023-11-21 17:06:26 +08:00
|
|
|
items = append(items, it)
|
2023-11-08 23:45:33 +08:00
|
|
|
}
|
|
|
|
if err != nil {
|
2023-11-20 16:23:27 +08:00
|
|
|
return items, reset, false, err
|
2023-11-08 23:45:33 +08:00
|
|
|
}
|
2023-11-20 16:23:27 +08:00
|
|
|
return items, reset, last == "", nil
|
2023-11-08 23:45:33 +08:00
|
|
|
}
|
|
|
|
|
2023-11-20 16:23:27 +08:00
|
|
|
data.Value, data.Reset, data.End, err = loadStreamHandle()
|
|
|
|
data.Match, data.Decode, data.Format = param.MatchPattern, param.Decode, param.Format
|
2023-11-08 23:45:33 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
resp.Success = true
|
|
|
|
resp.Data = data
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-11-13 15:28:13 +08:00
|
|
|
// ConvertValue convert value with decode method and format
|
2023-11-13 22:31:18 +08:00
|
|
|
// blank decode indicate auto decode
|
|
|
|
// blank format indicate auto format
|
2023-11-13 15:28:13 +08:00
|
|
|
func (b *browserService) ConvertValue(value any, decode, format string) (resp types.JSResp) {
|
|
|
|
str := strutil.DecodeRedisKey(value)
|
2024-02-03 15:06:23 +08:00
|
|
|
value, decode, format = convutil.ConvertTo(str, decode, format, Preferences().GetDecoder())
|
2023-11-05 11:57:52 +08:00
|
|
|
resp.Success = true
|
|
|
|
resp.Data = map[string]any{
|
|
|
|
"value": value,
|
2023-11-13 15:28:13 +08:00
|
|
|
"decode": decode,
|
|
|
|
"format": format,
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// SetKeyValue set value by key
|
|
|
|
// @param ttl <= 0 means keep current ttl
|
2023-11-13 15:28:13 +08:00
|
|
|
func (b *browserService) SetKeyValue(param types.SetKeyParam) (resp types.JSResp) {
|
|
|
|
item, err := b.getRedisClient(param.Server, param.DB)
|
2023-11-05 11:57:52 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx := item.client, item.ctx
|
2023-11-13 15:28:13 +08:00
|
|
|
key := strutil.DecodeRedisKey(param.Key)
|
2023-11-05 11:57:52 +08:00
|
|
|
var expiration time.Duration
|
2023-11-13 15:28:13 +08:00
|
|
|
if param.TTL < 0 {
|
2023-11-05 11:57:52 +08:00
|
|
|
if expiration, err = client.PTTL(ctx, key).Result(); err != nil {
|
|
|
|
expiration = redis.KeepTTL
|
|
|
|
}
|
|
|
|
} else {
|
2023-11-13 15:28:13 +08:00
|
|
|
expiration = time.Duration(param.TTL) * time.Second
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
2023-11-13 15:28:13 +08:00
|
|
|
// use default decode type and format
|
|
|
|
if len(param.Decode) <= 0 {
|
|
|
|
param.Decode = types.DECODE_NONE
|
|
|
|
}
|
|
|
|
if len(param.Format) <= 0 {
|
2023-11-13 22:41:33 +08:00
|
|
|
param.Format = types.FORMAT_RAW
|
2023-11-13 15:28:13 +08:00
|
|
|
}
|
|
|
|
switch strings.ToLower(param.KeyType) {
|
2023-11-05 11:57:52 +08:00
|
|
|
case "string":
|
2023-11-13 15:28:13 +08:00
|
|
|
if str, ok := param.Value.(string); !ok {
|
2023-11-05 11:57:52 +08:00
|
|
|
resp.Msg = "invalid string value"
|
|
|
|
return
|
|
|
|
} else {
|
|
|
|
var saveStr string
|
2024-02-03 15:06:23 +08:00
|
|
|
if saveStr, err = convutil.SaveAs(str, param.Format, param.Decode, Preferences().GetDecoder()); err != nil {
|
2023-11-14 14:49:16 +08:00
|
|
|
resp.Msg = fmt.Sprintf(`save to type "%s" fail: %s`, param.Format, err.Error())
|
2023-11-05 11:57:52 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
_, err = client.Set(ctx, key, saveStr, 0).Result()
|
|
|
|
// set expiration lonely, not "keepttl"
|
|
|
|
if err == nil && expiration > 0 {
|
|
|
|
client.Expire(ctx, key, expiration)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
case "list":
|
2023-11-13 15:28:13 +08:00
|
|
|
if strs, ok := param.Value.([]any); !ok {
|
2023-11-05 11:57:52 +08:00
|
|
|
resp.Msg = "invalid list value"
|
|
|
|
return
|
|
|
|
} else {
|
|
|
|
err = client.LPush(ctx, key, strs...).Err()
|
|
|
|
if err == nil && expiration > 0 {
|
|
|
|
client.Expire(ctx, key, expiration)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
case "hash":
|
2023-11-13 15:28:13 +08:00
|
|
|
if strs, ok := param.Value.([]any); !ok {
|
2023-11-05 11:57:52 +08:00
|
|
|
resp.Msg = "invalid hash value"
|
|
|
|
return
|
|
|
|
} else {
|
|
|
|
total := len(strs)
|
|
|
|
if total > 1 {
|
|
|
|
_, err = client.Pipelined(ctx, func(pipe redis.Pipeliner) error {
|
|
|
|
for i := 0; i < total; i += 2 {
|
|
|
|
pipe.HSet(ctx, key, strs[i], strs[i+1])
|
|
|
|
}
|
|
|
|
if expiration > 0 {
|
|
|
|
pipe.Expire(ctx, key, expiration)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
case "set":
|
2023-11-13 15:28:13 +08:00
|
|
|
if strs, ok := param.Value.([]any); !ok || len(strs) <= 0 {
|
2023-11-05 11:57:52 +08:00
|
|
|
resp.Msg = "invalid set value"
|
|
|
|
return
|
|
|
|
} else {
|
|
|
|
if len(strs) > 0 {
|
|
|
|
err = client.SAdd(ctx, key, strs...).Err()
|
|
|
|
if err == nil && expiration > 0 {
|
|
|
|
client.Expire(ctx, key, expiration)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
case "zset":
|
2023-11-13 15:28:13 +08:00
|
|
|
if strs, ok := param.Value.([]any); !ok || len(strs) <= 0 {
|
2023-11-05 11:57:52 +08:00
|
|
|
resp.Msg = "invalid zset value"
|
|
|
|
return
|
|
|
|
} else {
|
|
|
|
if len(strs) > 1 {
|
|
|
|
var members []redis.Z
|
|
|
|
for i := 0; i < len(strs); i += 2 {
|
|
|
|
score, _ := strconv.ParseFloat(strs[i+1].(string), 64)
|
|
|
|
members = append(members, redis.Z{
|
|
|
|
Score: score,
|
2024-01-09 15:10:03 +08:00
|
|
|
Member: strs[i],
|
2023-11-05 11:57:52 +08:00
|
|
|
})
|
|
|
|
}
|
|
|
|
err = client.ZAdd(ctx, key, members...).Err()
|
|
|
|
if err == nil && expiration > 0 {
|
|
|
|
client.Expire(ctx, key, expiration)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
case "stream":
|
2023-11-13 15:28:13 +08:00
|
|
|
if strs, ok := param.Value.([]any); !ok {
|
2023-11-05 11:57:52 +08:00
|
|
|
resp.Msg = "invalid stream value"
|
|
|
|
return
|
|
|
|
} else {
|
|
|
|
if len(strs) > 2 {
|
|
|
|
err = client.XAdd(ctx, &redis.XAddArgs{
|
|
|
|
Stream: key,
|
|
|
|
ID: strs[0].(string),
|
|
|
|
Values: strs[1:],
|
|
|
|
}).Err()
|
|
|
|
if err == nil && expiration > 0 {
|
|
|
|
client.Expire(ctx, key, expiration)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
resp.Success = true
|
2023-11-28 01:11:45 +08:00
|
|
|
//resp.Data = map[string]any{
|
|
|
|
// "value": param.Value,
|
|
|
|
//}
|
2023-11-05 11:57:52 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-11-17 16:26:03 +08:00
|
|
|
// SetHashValue update hash field
|
2023-11-13 15:28:13 +08:00
|
|
|
func (b *browserService) SetHashValue(param types.SetHashParam) (resp types.JSResp) {
|
|
|
|
item, err := b.getRedisClient(param.Server, param.DB)
|
2023-11-05 11:57:52 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx := item.client, item.ctx
|
2023-11-13 15:28:13 +08:00
|
|
|
key := strutil.DecodeRedisKey(param.Key)
|
|
|
|
str := strutil.DecodeRedisKey(param.Value)
|
2023-11-17 16:26:03 +08:00
|
|
|
var saveStr, displayStr string
|
2024-02-03 15:06:23 +08:00
|
|
|
decoder := Preferences().GetDecoder()
|
|
|
|
if saveStr, err = convutil.SaveAs(str, param.Format, param.Decode, decoder); err != nil {
|
2023-11-14 14:49:16 +08:00
|
|
|
resp.Msg = fmt.Sprintf(`save to type "%s" fail: %s`, param.Format, err.Error())
|
2023-11-13 15:28:13 +08:00
|
|
|
return
|
|
|
|
}
|
2023-11-17 17:20:32 +08:00
|
|
|
if len(param.RetDecode) > 0 && len(param.RetFormat) > 0 {
|
2024-02-03 15:06:23 +08:00
|
|
|
displayStr, _, _ = convutil.ConvertTo(saveStr, param.RetDecode, param.RetFormat, decoder)
|
2023-11-17 17:20:32 +08:00
|
|
|
}
|
2023-11-17 16:26:03 +08:00
|
|
|
var updated, added, removed []types.HashEntryItem
|
|
|
|
var replaced []types.HashReplaceItem
|
|
|
|
var affect int64
|
|
|
|
if len(param.NewField) <= 0 {
|
2023-11-05 11:57:52 +08:00
|
|
|
// new field is empty, delete old field
|
2023-11-13 15:28:13 +08:00
|
|
|
_, err = client.HDel(ctx, key, param.Field).Result()
|
2023-11-17 16:26:03 +08:00
|
|
|
removed = append(removed, types.HashEntryItem{
|
|
|
|
Key: param.Field,
|
|
|
|
})
|
|
|
|
} else if len(param.Field) <= 0 || param.Field == param.NewField {
|
|
|
|
affect, err = client.HSet(ctx, key, param.NewField, saveStr).Result()
|
|
|
|
if affect <= 0 {
|
|
|
|
// update field value
|
|
|
|
updated = append(updated, types.HashEntryItem{
|
|
|
|
Key: param.NewField,
|
|
|
|
Value: saveStr,
|
|
|
|
DisplayValue: displayStr,
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
// add new field
|
|
|
|
added = append(added, types.HashEntryItem{
|
|
|
|
Key: param.NewField,
|
|
|
|
Value: saveStr,
|
|
|
|
DisplayValue: displayStr,
|
|
|
|
})
|
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
} else {
|
|
|
|
// remove old field and add new field
|
2023-11-13 15:28:13 +08:00
|
|
|
if _, err = client.HDel(ctx, key, param.Field).Result(); err != nil {
|
2023-11-05 11:57:52 +08:00
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
2023-11-17 18:41:15 +08:00
|
|
|
|
2023-11-17 16:26:03 +08:00
|
|
|
affect, err = client.HSet(ctx, key, param.NewField, saveStr).Result()
|
|
|
|
if affect <= 0 {
|
2023-11-17 18:41:15 +08:00
|
|
|
// no new filed added, just update exists item
|
2023-11-17 16:26:03 +08:00
|
|
|
removed = append(removed, types.HashEntryItem{
|
|
|
|
Key: param.Field,
|
|
|
|
})
|
|
|
|
updated = append(updated, types.HashEntryItem{
|
|
|
|
Key: param.NewField,
|
|
|
|
Value: saveStr,
|
|
|
|
DisplayValue: displayStr,
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
// add new field
|
|
|
|
replaced = append(replaced, types.HashReplaceItem{
|
|
|
|
Key: param.Field,
|
|
|
|
NewKey: param.NewField,
|
|
|
|
Value: saveStr,
|
|
|
|
DisplayValue: displayStr,
|
|
|
|
})
|
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
resp.Success = true
|
2023-11-17 16:26:03 +08:00
|
|
|
resp.Data = struct {
|
|
|
|
Added []types.HashEntryItem `json:"added,omitempty"`
|
|
|
|
Removed []types.HashEntryItem `json:"removed,omitempty"`
|
|
|
|
Updated []types.HashEntryItem `json:"updated,omitempty"`
|
|
|
|
Replaced []types.HashReplaceItem `json:"replaced,omitempty"`
|
|
|
|
}{
|
|
|
|
Added: added,
|
|
|
|
Removed: removed,
|
|
|
|
Updated: updated,
|
|
|
|
Replaced: replaced,
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// AddHashField add or update hash field
|
2024-01-05 18:24:38 +08:00
|
|
|
func (b *browserService) AddHashField(server string, db int, k any, action int, fieldItems []any) (resp types.JSResp) {
|
|
|
|
item, err := b.getRedisClient(server, db)
|
2023-11-05 11:57:52 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx := item.client, item.ctx
|
|
|
|
key := strutil.DecodeRedisKey(k)
|
2023-11-17 01:24:04 +08:00
|
|
|
var updated []types.HashEntryItem
|
|
|
|
var added []types.HashEntryItem
|
2023-11-05 11:57:52 +08:00
|
|
|
switch action {
|
|
|
|
case 1:
|
|
|
|
// ignore duplicated fields
|
|
|
|
for i := 0; i < len(fieldItems); i += 2 {
|
2023-11-17 01:24:04 +08:00
|
|
|
field, value := strutil.DecodeRedisKey(fieldItems[i]), strutil.DecodeRedisKey(fieldItems[i+1])
|
|
|
|
if succ, _ := client.HSetNX(ctx, key, field, value).Result(); succ {
|
|
|
|
added = append(added, types.HashEntryItem{
|
|
|
|
Key: field,
|
|
|
|
Value: value,
|
|
|
|
DisplayValue: "", // TODO: convert to display value
|
|
|
|
})
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
// overwrite duplicated fields
|
|
|
|
total := len(fieldItems)
|
|
|
|
if total > 1 {
|
|
|
|
for i := 0; i < total; i += 2 {
|
2023-11-17 01:24:04 +08:00
|
|
|
field, value := strutil.DecodeRedisKey(fieldItems[i]), strutil.DecodeRedisKey(fieldItems[i+1])
|
|
|
|
if affect, _ := client.HSet(ctx, key, field, value).Result(); affect > 0 {
|
|
|
|
added = append(added, types.HashEntryItem{
|
|
|
|
Key: field,
|
|
|
|
Value: value,
|
|
|
|
DisplayValue: "", // TODO: convert to display value
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
updated = append(updated, types.HashEntryItem{
|
|
|
|
Key: field,
|
|
|
|
Value: value,
|
|
|
|
DisplayValue: "", // TODO: convert to display value
|
|
|
|
})
|
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
resp.Success = true
|
2023-11-17 01:24:04 +08:00
|
|
|
resp.Data = struct {
|
|
|
|
Added []types.HashEntryItem `json:"added,omitempty"`
|
|
|
|
Updated []types.HashEntryItem `json:"updated,omitempty"`
|
|
|
|
}{
|
|
|
|
Added: added,
|
|
|
|
Updated: updated,
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// AddListItem add item to list or remove from it
|
2024-01-05 18:24:38 +08:00
|
|
|
func (b *browserService) AddListItem(server string, db int, k any, action int, items []any) (resp types.JSResp) {
|
|
|
|
item, err := b.getRedisClient(server, db)
|
2023-11-05 11:57:52 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx := item.client, item.ctx
|
|
|
|
key := strutil.DecodeRedisKey(k)
|
2023-11-17 01:24:04 +08:00
|
|
|
var leftPush, rightPush []types.ListEntryItem
|
2023-11-05 11:57:52 +08:00
|
|
|
switch action {
|
|
|
|
case 0:
|
|
|
|
// push to head
|
2023-11-17 01:24:04 +08:00
|
|
|
slices.Reverse(items)
|
2023-11-05 11:57:52 +08:00
|
|
|
_, err = client.LPush(ctx, key, items...).Result()
|
2023-11-17 01:24:04 +08:00
|
|
|
for i := len(items) - 1; i >= 0; i-- {
|
|
|
|
leftPush = append(leftPush, types.ListEntryItem{
|
|
|
|
Value: items[i],
|
|
|
|
DisplayValue: "", // TODO: convert to display value
|
|
|
|
})
|
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
default:
|
|
|
|
// append to tail
|
|
|
|
_, err = client.RPush(ctx, key, items...).Result()
|
2023-11-17 01:24:04 +08:00
|
|
|
for _, it := range items {
|
|
|
|
rightPush = append(rightPush, types.ListEntryItem{
|
|
|
|
Value: it,
|
|
|
|
DisplayValue: "", // TODO: convert to display value
|
|
|
|
})
|
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
resp.Success = true
|
2023-11-17 01:24:04 +08:00
|
|
|
resp.Data = struct {
|
|
|
|
Left []types.ListEntryItem `json:"left,omitempty"`
|
|
|
|
Right []types.ListEntryItem `json:"right,omitempty"`
|
|
|
|
}{
|
|
|
|
Left: leftPush,
|
|
|
|
Right: rightPush,
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// SetListItem update or remove list item by index
|
2023-11-14 14:49:16 +08:00
|
|
|
func (b *browserService) SetListItem(param types.SetListParam) (resp types.JSResp) {
|
|
|
|
item, err := b.getRedisClient(param.Server, param.DB)
|
2023-11-05 11:57:52 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx := item.client, item.ctx
|
2023-11-14 14:49:16 +08:00
|
|
|
key := strutil.DecodeRedisKey(param.Key)
|
|
|
|
str := strutil.DecodeRedisKey(param.Value)
|
2023-11-17 17:20:32 +08:00
|
|
|
var replaced, removed []types.ListReplaceItem
|
2023-11-14 14:49:16 +08:00
|
|
|
if len(str) <= 0 {
|
2023-11-05 11:57:52 +08:00
|
|
|
// remove from list
|
2023-11-14 14:49:16 +08:00
|
|
|
err = client.LSet(ctx, key, param.Index, "---VALUE_REMOVED_BY_TINY_RDM---").Err()
|
2023-11-05 11:57:52 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
err = client.LRem(ctx, key, 1, "---VALUE_REMOVED_BY_TINY_RDM---").Err()
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
2023-11-17 17:20:32 +08:00
|
|
|
removed = append(removed, types.ListReplaceItem{
|
|
|
|
Index: param.Index,
|
|
|
|
})
|
2023-11-05 11:57:52 +08:00
|
|
|
} else {
|
|
|
|
// replace index value
|
2023-11-14 14:49:16 +08:00
|
|
|
var saveStr string
|
2024-02-03 15:06:23 +08:00
|
|
|
decoder := Preferences().GetDecoder()
|
|
|
|
if saveStr, err = convutil.SaveAs(str, param.Format, param.Decode, decoder); err != nil {
|
2023-11-14 14:49:16 +08:00
|
|
|
resp.Msg = fmt.Sprintf(`save to type "%s" fail: %s`, param.Format, err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
err = client.LSet(ctx, key, param.Index, saveStr).Err()
|
2023-11-05 11:57:52 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
2023-11-17 17:20:32 +08:00
|
|
|
var displayStr string
|
|
|
|
if len(param.RetDecode) > 0 && len(param.RetFormat) > 0 {
|
2024-02-03 15:06:23 +08:00
|
|
|
displayStr, _, _ = convutil.ConvertTo(saveStr, param.RetDecode, param.RetFormat, decoder)
|
2023-11-17 17:20:32 +08:00
|
|
|
}
|
|
|
|
replaced = append(replaced, types.ListReplaceItem{
|
|
|
|
Index: param.Index,
|
|
|
|
Value: saveStr,
|
|
|
|
DisplayValue: displayStr,
|
|
|
|
})
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
resp.Success = true
|
2023-11-17 17:20:32 +08:00
|
|
|
resp.Data = struct {
|
|
|
|
Removed []types.ListReplaceItem `json:"removed,omitempty"`
|
|
|
|
Replaced []types.ListReplaceItem `json:"replaced,omitempty"`
|
|
|
|
}{
|
|
|
|
Removed: removed,
|
|
|
|
Replaced: replaced,
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// SetSetItem add members to set or remove from set
|
2023-11-15 23:41:53 +08:00
|
|
|
func (b *browserService) SetSetItem(server string, db int, k any, remove bool, members []any) (resp types.JSResp) {
|
|
|
|
item, err := b.getRedisClient(server, db)
|
2023-11-05 11:57:52 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx := item.client, item.ctx
|
|
|
|
key := strutil.DecodeRedisKey(k)
|
2023-11-17 01:24:04 +08:00
|
|
|
var added, removed []types.SetEntryItem
|
2023-11-08 23:45:33 +08:00
|
|
|
var affected int64
|
2023-11-05 11:57:52 +08:00
|
|
|
if remove {
|
2023-11-17 01:24:04 +08:00
|
|
|
for _, member := range members {
|
|
|
|
if affected, _ = client.SRem(ctx, key, member).Result(); affected > 0 {
|
|
|
|
removed = append(removed, types.SetEntryItem{
|
2023-11-17 16:26:03 +08:00
|
|
|
Value: member,
|
2023-11-17 01:24:04 +08:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
} else {
|
2023-11-17 01:24:04 +08:00
|
|
|
for _, member := range members {
|
|
|
|
if affected, _ = client.SAdd(ctx, key, member).Result(); affected > 0 {
|
|
|
|
added = append(added, types.SetEntryItem{
|
|
|
|
Value: member,
|
|
|
|
DisplayValue: "", // TODO: convert to display value
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
resp.Success = true
|
2023-11-17 01:24:04 +08:00
|
|
|
resp.Data = struct {
|
|
|
|
Added []types.SetEntryItem `json:"added,omitempty"`
|
|
|
|
Removed []types.SetEntryItem `json:"removed,omitempty"`
|
|
|
|
Affected int64 `json:"affected"`
|
|
|
|
}{
|
|
|
|
Added: added,
|
|
|
|
Removed: removed,
|
|
|
|
Affected: affected,
|
2023-11-08 23:45:33 +08:00
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// UpdateSetItem replace member of set
|
2023-11-14 17:15:02 +08:00
|
|
|
func (b *browserService) UpdateSetItem(param types.SetSetParam) (resp types.JSResp) {
|
|
|
|
item, err := b.getRedisClient(param.Server, param.DB)
|
2023-11-05 11:57:52 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx := item.client, item.ctx
|
2023-11-14 17:15:02 +08:00
|
|
|
key := strutil.DecodeRedisKey(param.Key)
|
2023-11-17 18:41:15 +08:00
|
|
|
var added, removed []types.SetEntryItem
|
|
|
|
var affect int64
|
2023-11-14 17:15:02 +08:00
|
|
|
// remove old value
|
|
|
|
str := strutil.DecodeRedisKey(param.Value)
|
2023-11-17 18:41:15 +08:00
|
|
|
if affect, _ = client.SRem(ctx, key, str).Result(); affect > 0 {
|
|
|
|
removed = append(removed, types.SetEntryItem{
|
|
|
|
Value: str,
|
|
|
|
})
|
|
|
|
}
|
2023-11-14 17:15:02 +08:00
|
|
|
|
|
|
|
// insert new value
|
|
|
|
str = strutil.DecodeRedisKey(param.NewValue)
|
2024-02-03 15:06:23 +08:00
|
|
|
decoder := Preferences().GetDecoder()
|
2023-11-14 17:15:02 +08:00
|
|
|
var saveStr string
|
2024-02-03 15:06:23 +08:00
|
|
|
if saveStr, err = convutil.SaveAs(str, param.Format, param.Decode, decoder); err != nil {
|
2023-11-14 17:15:02 +08:00
|
|
|
resp.Msg = fmt.Sprintf(`save to type "%s" fail: %s`, param.Format, err.Error())
|
|
|
|
return
|
|
|
|
}
|
2023-11-17 18:41:15 +08:00
|
|
|
if affect, _ = client.SAdd(ctx, key, saveStr).Result(); affect > 0 {
|
|
|
|
// add new item
|
|
|
|
var displayStr string
|
|
|
|
if len(param.RetDecode) > 0 && len(param.RetFormat) > 0 {
|
2024-02-03 15:06:23 +08:00
|
|
|
displayStr, _, _ = convutil.ConvertTo(saveStr, param.RetDecode, param.RetFormat, decoder)
|
2023-11-17 18:41:15 +08:00
|
|
|
}
|
|
|
|
added = append(added, types.SetEntryItem{
|
|
|
|
Value: saveStr,
|
|
|
|
DisplayValue: displayStr,
|
|
|
|
})
|
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
resp.Success = true
|
2023-11-17 18:41:15 +08:00
|
|
|
resp.Data = struct {
|
|
|
|
Added []types.SetEntryItem `json:"added,omitempty"`
|
|
|
|
Removed []types.SetEntryItem `json:"removed,omitempty"`
|
|
|
|
}{
|
|
|
|
Added: added,
|
|
|
|
Removed: removed,
|
2023-11-14 17:15:02 +08:00
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// UpdateZSetValue update value of sorted set member
|
2023-11-15 23:41:53 +08:00
|
|
|
func (b *browserService) UpdateZSetValue(param types.SetZSetParam) (resp types.JSResp) {
|
|
|
|
item, err := b.getRedisClient(param.Server, param.DB)
|
2023-11-05 11:57:52 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx := item.client, item.ctx
|
2023-11-15 23:41:53 +08:00
|
|
|
key := strutil.DecodeRedisKey(param.Key)
|
|
|
|
val, newVal := strutil.DecodeRedisKey(param.Value), strutil.DecodeRedisKey(param.NewValue)
|
2023-11-17 18:41:15 +08:00
|
|
|
var added, updated, removed []types.ZSetEntryItem
|
|
|
|
var replaced []types.ZSetReplaceItem
|
|
|
|
var affect int64
|
2024-02-03 15:06:23 +08:00
|
|
|
decoder := Preferences().GetDecoder()
|
2023-11-15 23:41:53 +08:00
|
|
|
if len(newVal) <= 0 {
|
|
|
|
// no new value, delete value
|
2023-11-17 18:41:15 +08:00
|
|
|
if affect, err = client.ZRem(ctx, key, val).Result(); affect > 0 {
|
|
|
|
//removed = append(removed, val)
|
|
|
|
removed = append(removed, types.ZSetEntryItem{
|
|
|
|
Value: val,
|
|
|
|
})
|
2023-11-08 23:45:33 +08:00
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
} else {
|
2023-11-15 23:41:53 +08:00
|
|
|
var saveVal string
|
2024-02-03 15:06:23 +08:00
|
|
|
if saveVal, err = convutil.SaveAs(newVal, param.Format, param.Decode, decoder); err != nil {
|
2023-11-15 23:41:53 +08:00
|
|
|
resp.Msg = fmt.Sprintf(`save to type "%s" fail: %s`, param.Format, err.Error())
|
|
|
|
return
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
|
2023-11-15 23:41:53 +08:00
|
|
|
if saveVal == val {
|
2023-11-17 18:41:15 +08:00
|
|
|
affect, err = client.ZAdd(ctx, key, redis.Z{
|
2023-11-15 23:41:53 +08:00
|
|
|
Score: param.Score,
|
|
|
|
Member: saveVal,
|
|
|
|
}).Result()
|
2024-02-03 15:06:23 +08:00
|
|
|
displayValue, _, _ := convutil.ConvertTo(val, param.RetDecode, param.RetFormat, decoder)
|
2023-11-17 18:41:15 +08:00
|
|
|
if affect > 0 {
|
|
|
|
// add new item
|
|
|
|
added = append(added, types.ZSetEntryItem{
|
|
|
|
Score: param.Score,
|
|
|
|
Value: val,
|
|
|
|
DisplayValue: displayValue,
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
// update score only
|
|
|
|
updated = append(updated, types.ZSetEntryItem{
|
|
|
|
Score: param.Score,
|
|
|
|
Value: val,
|
|
|
|
DisplayValue: displayValue,
|
|
|
|
})
|
2023-11-15 23:41:53 +08:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// remove old value and add new one
|
|
|
|
_, err = client.ZRem(ctx, key, val).Result()
|
2023-11-17 18:41:15 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
2023-11-15 23:41:53 +08:00
|
|
|
}
|
|
|
|
|
2023-11-17 18:41:15 +08:00
|
|
|
affect, err = client.ZAdd(ctx, key, redis.Z{
|
2023-11-15 23:41:53 +08:00
|
|
|
Score: param.Score,
|
|
|
|
Member: saveVal,
|
|
|
|
}).Result()
|
2024-02-03 15:06:23 +08:00
|
|
|
displayValue, _, _ := convutil.ConvertTo(saveVal, param.RetDecode, param.RetFormat, decoder)
|
2023-11-17 18:41:15 +08:00
|
|
|
if affect <= 0 {
|
|
|
|
// no new value added, just update exists item
|
|
|
|
removed = append(removed, types.ZSetEntryItem{
|
|
|
|
Value: val,
|
|
|
|
})
|
|
|
|
updated = append(updated, types.ZSetEntryItem{
|
|
|
|
Score: param.Score,
|
|
|
|
Value: saveVal,
|
|
|
|
DisplayValue: displayValue,
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
// add new field
|
|
|
|
replaced = append(replaced, types.ZSetReplaceItem{
|
|
|
|
Score: param.Score,
|
|
|
|
Value: val,
|
|
|
|
NewValue: saveVal,
|
|
|
|
DisplayValue: displayValue,
|
|
|
|
})
|
2023-11-15 23:41:53 +08:00
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
resp.Success = true
|
2023-11-17 18:41:15 +08:00
|
|
|
resp.Data = struct {
|
|
|
|
Added []types.ZSetEntryItem `json:"added,omitempty"`
|
|
|
|
Updated []types.ZSetEntryItem `json:"updated,omitempty"`
|
|
|
|
Replaced []types.ZSetReplaceItem `json:"replaced,omitempty"`
|
|
|
|
Removed []types.ZSetEntryItem `json:"removed,omitempty"`
|
|
|
|
}{
|
|
|
|
Added: added,
|
|
|
|
Updated: updated,
|
|
|
|
Replaced: replaced,
|
|
|
|
Removed: removed,
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// AddZSetValue add item to sorted set
|
2024-01-05 18:24:38 +08:00
|
|
|
func (b *browserService) AddZSetValue(server string, db int, k any, action int, valueScore map[string]float64) (resp types.JSResp) {
|
|
|
|
item, err := b.getRedisClient(server, db)
|
2023-11-05 11:57:52 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx := item.client, item.ctx
|
|
|
|
key := strutil.DecodeRedisKey(k)
|
|
|
|
|
2023-11-17 01:24:04 +08:00
|
|
|
var added, updated []types.ZSetEntryItem
|
2023-11-05 11:57:52 +08:00
|
|
|
switch action {
|
|
|
|
case 1:
|
|
|
|
// ignore duplicated fields
|
2023-11-17 01:24:04 +08:00
|
|
|
for m, s := range valueScore {
|
|
|
|
if affect, _ := client.ZAddNX(ctx, key, redis.Z{Score: s, Member: m}).Result(); affect > 0 {
|
|
|
|
added = append(added, types.ZSetEntryItem{
|
|
|
|
Score: s,
|
|
|
|
Value: m,
|
|
|
|
DisplayValue: "", // TODO: convert to display value
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
default:
|
|
|
|
// overwrite duplicated fields
|
2023-11-17 01:24:04 +08:00
|
|
|
for m, s := range valueScore {
|
|
|
|
if affect, _ := client.ZAdd(ctx, key, redis.Z{Score: s, Member: m}).Result(); affect > 0 {
|
|
|
|
added = append(added, types.ZSetEntryItem{
|
|
|
|
Score: s,
|
|
|
|
Value: m,
|
|
|
|
DisplayValue: "", // TODO: convert to display value
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
updated = append(updated, types.ZSetEntryItem{
|
|
|
|
Score: s,
|
|
|
|
Value: m,
|
|
|
|
DisplayValue: "", // TODO: convert to display value
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
resp.Success = true
|
2023-11-17 01:24:04 +08:00
|
|
|
resp.Data = struct {
|
|
|
|
Added []types.ZSetEntryItem `json:"added,omitempty"`
|
|
|
|
Updated []types.ZSetEntryItem `json:"updated,omitempty"`
|
|
|
|
}{
|
|
|
|
Added: added,
|
|
|
|
Updated: updated,
|
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// AddStreamValue add stream field
|
2024-01-05 18:24:38 +08:00
|
|
|
func (b *browserService) AddStreamValue(server string, db int, k any, ID string, fieldItems []any) (resp types.JSResp) {
|
|
|
|
item, err := b.getRedisClient(server, db)
|
2023-11-05 11:57:52 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx := item.client, item.ctx
|
|
|
|
key := strutil.DecodeRedisKey(k)
|
2023-11-08 23:45:33 +08:00
|
|
|
var updateID string
|
|
|
|
updateID, err = client.XAdd(ctx, &redis.XAddArgs{
|
2023-11-05 11:57:52 +08:00
|
|
|
Stream: key,
|
|
|
|
ID: ID,
|
|
|
|
Values: fieldItems,
|
|
|
|
}).Result()
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-11-17 01:24:04 +08:00
|
|
|
updateValues := make(map[string]any, len(fieldItems)/2)
|
|
|
|
for i := 0; i < len(fieldItems)/2; i += 2 {
|
|
|
|
updateValues[fieldItems[i].(string)] = fieldItems[i+1]
|
|
|
|
}
|
|
|
|
vb, _ := json.Marshal(updateValues)
|
2024-02-03 15:06:23 +08:00
|
|
|
displayValue, _, _ := convutil.ConvertTo(string(vb), types.DECODE_NONE, types.FORMAT_JSON, Preferences().GetDecoder())
|
2023-11-17 01:24:04 +08:00
|
|
|
|
2023-11-05 11:57:52 +08:00
|
|
|
resp.Success = true
|
2023-11-17 01:24:04 +08:00
|
|
|
resp.Data = struct {
|
|
|
|
Added []types.StreamEntryItem `json:"added,omitempty"`
|
|
|
|
}{
|
|
|
|
Added: []types.StreamEntryItem{
|
|
|
|
{
|
|
|
|
ID: updateID,
|
|
|
|
Value: updateValues,
|
|
|
|
DisplayValue: displayValue, // TODO: convert to display value
|
|
|
|
},
|
|
|
|
},
|
2023-11-08 23:45:33 +08:00
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// RemoveStreamValues remove stream values by id
|
2024-01-05 18:24:38 +08:00
|
|
|
func (b *browserService) RemoveStreamValues(server string, db int, k any, IDs []string) (resp types.JSResp) {
|
|
|
|
item, err := b.getRedisClient(server, db)
|
2023-11-05 11:57:52 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx := item.client, item.ctx
|
|
|
|
key := strutil.DecodeRedisKey(k)
|
2023-11-08 23:45:33 +08:00
|
|
|
|
|
|
|
var affected int64
|
|
|
|
affected, err = client.XDel(ctx, key, IDs...).Result()
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-11-05 11:57:52 +08:00
|
|
|
resp.Success = true
|
2023-11-18 00:51:40 +08:00
|
|
|
resp.Data = struct {
|
|
|
|
Affected int64 `json:"affected"`
|
|
|
|
}{
|
|
|
|
Affected: affected,
|
2023-11-08 23:45:33 +08:00
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// SetKeyTTL set ttl of key
|
2024-01-05 18:24:38 +08:00
|
|
|
func (b *browserService) SetKeyTTL(server string, db int, k any, ttl int64) (resp types.JSResp) {
|
|
|
|
item, err := b.getRedisClient(server, db)
|
2023-11-05 11:57:52 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx := item.client, item.ctx
|
|
|
|
key := strutil.DecodeRedisKey(k)
|
|
|
|
if ttl < 0 {
|
|
|
|
if err = client.Persist(ctx, key).Err(); err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
} else {
|
2024-01-05 00:36:48 +08:00
|
|
|
expiration := time.Duration(ttl) * time.Second
|
2023-11-05 11:57:52 +08:00
|
|
|
if err = client.Expire(ctx, key, expiration).Err(); err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
resp.Success = true
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2024-01-05 00:36:48 +08:00
|
|
|
// BatchSetTTL batch set ttl
|
|
|
|
func (b *browserService) BatchSetTTL(server string, db int, ks []any, ttl int64, serialNo string) (resp types.JSResp) {
|
|
|
|
conf := Connection().getConnection(server)
|
|
|
|
if conf == nil {
|
|
|
|
resp.Msg = fmt.Sprintf("no connection profile named: %s", server)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
var client redis.UniversalClient
|
|
|
|
var err error
|
|
|
|
var connConfig = conf.ConnectionConfig
|
|
|
|
connConfig.LastDB = db
|
|
|
|
if client, err = b.createRedisClient(connConfig); err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
ctx, cancelFunc := context.WithCancel(b.ctx)
|
|
|
|
defer client.Close()
|
|
|
|
defer cancelFunc()
|
|
|
|
|
|
|
|
//cancelEvent := "ttling:stop:" + serialNo
|
|
|
|
//runtime.EventsOnce(ctx, cancelEvent, func(data ...any) {
|
|
|
|
// cancelFunc()
|
|
|
|
//})
|
|
|
|
//processEvent := "ttling:" + serialNo
|
|
|
|
total := len(ks)
|
|
|
|
var failed, updated atomic.Int64
|
|
|
|
var canceled bool
|
|
|
|
|
|
|
|
expiration := time.Now().Add(time.Duration(ttl) * time.Second)
|
|
|
|
del := func(ctx context.Context, cli redis.UniversalClient) error {
|
|
|
|
startTime := time.Now().Add(-10 * time.Second)
|
|
|
|
for i, k := range ks {
|
|
|
|
// emit progress per second
|
|
|
|
//param := map[string]any{
|
|
|
|
// "total": total,
|
|
|
|
// "progress": i + 1,
|
|
|
|
// "processing": k,
|
|
|
|
//}
|
|
|
|
if i >= total-1 || time.Now().Sub(startTime).Milliseconds() > 100 {
|
|
|
|
startTime = time.Now()
|
|
|
|
//runtime.EventsEmit(b.ctx, processEvent, param)
|
|
|
|
// do some sleep to prevent blocking the Redis server
|
|
|
|
time.Sleep(10 * time.Millisecond)
|
|
|
|
}
|
|
|
|
|
|
|
|
key := strutil.DecodeRedisKey(k)
|
|
|
|
var expErr error
|
|
|
|
if ttl < 0 {
|
|
|
|
expErr = cli.Persist(ctx, key).Err()
|
|
|
|
} else {
|
|
|
|
expErr = cli.ExpireAt(ctx, key, expiration).Err()
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
failed.Add(1)
|
|
|
|
} else {
|
|
|
|
// save deleted key
|
|
|
|
updated.Add(1)
|
|
|
|
}
|
|
|
|
if errors.Is(expErr, context.Canceled) || canceled {
|
|
|
|
canceled = true
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
if cluster, ok := client.(*redis.ClusterClient); ok {
|
|
|
|
// cluster mode
|
|
|
|
err = cluster.ForEachMaster(ctx, func(ctx context.Context, cli *redis.Client) error {
|
|
|
|
return del(ctx, cli)
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
err = del(ctx, client)
|
|
|
|
}
|
|
|
|
|
|
|
|
//runtime.EventsOff(ctx, cancelEvent)
|
|
|
|
resp.Success = true
|
|
|
|
resp.Data = struct {
|
|
|
|
Canceled bool `json:"canceled"`
|
|
|
|
Updated int64 `json:"updated"`
|
|
|
|
Failed int64 `json:"failed"`
|
|
|
|
}{
|
|
|
|
Canceled: canceled,
|
|
|
|
Updated: updated.Load(),
|
|
|
|
Failed: failed.Load(),
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-11-05 11:57:52 +08:00
|
|
|
// DeleteKey remove redis key
|
2023-12-03 12:27:05 +08:00
|
|
|
func (b *browserService) DeleteKey(server string, db int, k any, async bool) (resp types.JSResp) {
|
|
|
|
item, err := b.getRedisClient(server, db)
|
2023-11-05 11:57:52 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx := item.client, item.ctx
|
|
|
|
key := strutil.DecodeRedisKey(k)
|
|
|
|
var deletedKeys []string
|
|
|
|
if strings.HasSuffix(key, "*") {
|
|
|
|
// delete by prefix
|
|
|
|
var mutex sync.Mutex
|
2023-11-10 15:57:19 +08:00
|
|
|
supportUnlink := true
|
2023-11-05 11:57:52 +08:00
|
|
|
del := func(ctx context.Context, cli redis.UniversalClient) error {
|
|
|
|
handleDel := func(ks []string) error {
|
2023-11-10 15:57:19 +08:00
|
|
|
var delErr error
|
|
|
|
if async && supportUnlink {
|
|
|
|
supportUnlink = false
|
|
|
|
if delErr = cli.Unlink(ctx, ks...).Err(); delErr != nil {
|
|
|
|
// not support unlink? try del command
|
|
|
|
delErr = cli.Del(ctx, ks...).Err()
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
2023-11-10 15:57:19 +08:00
|
|
|
} else {
|
|
|
|
delErr = cli.Del(ctx, ks...).Err()
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
mutex.Lock()
|
|
|
|
deletedKeys = append(deletedKeys, ks...)
|
|
|
|
mutex.Unlock()
|
|
|
|
|
2023-11-10 15:57:19 +08:00
|
|
|
return delErr
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
scanSize := int64(Preferences().GetScanSize())
|
|
|
|
iter := cli.Scan(ctx, 0, key, scanSize).Iterator()
|
|
|
|
resultKeys := make([]string, 0, 100)
|
|
|
|
for iter.Next(ctx) {
|
|
|
|
resultKeys = append(resultKeys, iter.Val())
|
2023-11-10 15:57:19 +08:00
|
|
|
if len(resultKeys) >= 20 {
|
2023-11-05 11:57:52 +08:00
|
|
|
handleDel(resultKeys)
|
|
|
|
resultKeys = resultKeys[:0:cap(resultKeys)]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(resultKeys) > 0 {
|
|
|
|
handleDel(resultKeys)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
if cluster, ok := client.(*redis.ClusterClient); ok {
|
|
|
|
// cluster mode
|
|
|
|
err = cluster.ForEachMaster(ctx, func(ctx context.Context, cli *redis.Client) error {
|
|
|
|
return del(ctx, cli)
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
err = del(ctx, client)
|
|
|
|
}
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// delete key only
|
|
|
|
if async {
|
2023-11-10 15:57:19 +08:00
|
|
|
if err = client.Unlink(ctx, key).Err(); err != nil {
|
|
|
|
if err = client.Del(ctx, key).Err(); err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
} else {
|
2023-11-10 15:57:19 +08:00
|
|
|
if err = client.Del(ctx, key).Err(); err != nil {
|
2023-11-05 11:57:52 +08:00
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
deletedKeys = append(deletedKeys, key)
|
|
|
|
}
|
|
|
|
|
|
|
|
resp.Success = true
|
|
|
|
resp.Data = map[string]any{
|
2023-11-05 13:00:03 +08:00
|
|
|
"deleted": deletedKeys,
|
|
|
|
"deleteCount": len(deletedKeys),
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-12-03 12:27:05 +08:00
|
|
|
// DeleteOneKey delete one key
|
|
|
|
func (b *browserService) DeleteOneKey(server string, db int, k any) (resp types.JSResp) {
|
|
|
|
item, err := b.getRedisClient(server, db)
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx := item.client, item.ctx
|
|
|
|
key := strutil.DecodeRedisKey(k)
|
|
|
|
if cluster, ok := client.(*redis.ClusterClient); ok {
|
|
|
|
// cluster mode
|
|
|
|
err = cluster.ForEachMaster(ctx, func(ctx context.Context, cli *redis.Client) error {
|
|
|
|
return cli.Del(ctx, key).Err()
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
err = client.Del(ctx, key).Err()
|
|
|
|
}
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
resp.Success = true
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-12-19 20:10:01 +08:00
|
|
|
// DeleteKeys delete keys sync with notification
|
2024-01-23 17:37:00 +08:00
|
|
|
func (b *browserService) DeleteKeys(server string, db int, ks []any, serialNo string) (resp types.JSResp) {
|
2023-12-19 20:10:01 +08:00
|
|
|
// connect a new connection to export keys
|
|
|
|
conf := Connection().getConnection(server)
|
|
|
|
if conf == nil {
|
|
|
|
resp.Msg = fmt.Sprintf("no connection profile named: %s", server)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
var client redis.UniversalClient
|
|
|
|
var err error
|
|
|
|
var connConfig = conf.ConnectionConfig
|
|
|
|
connConfig.LastDB = db
|
|
|
|
if client, err = b.createRedisClient(connConfig); err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
ctx, cancelFunc := context.WithCancel(b.ctx)
|
|
|
|
defer client.Close()
|
|
|
|
defer cancelFunc()
|
|
|
|
|
|
|
|
cancelEvent := "delete:stop:" + serialNo
|
2024-01-23 11:26:15 +08:00
|
|
|
cancelStopEvent := runtime.EventsOnce(ctx, cancelEvent, func(data ...any) {
|
2023-12-19 20:10:01 +08:00
|
|
|
cancelFunc()
|
|
|
|
})
|
|
|
|
total := len(ks)
|
|
|
|
var failed atomic.Int64
|
|
|
|
var canceled bool
|
|
|
|
var deletedKeys = make([]any, 0, total)
|
|
|
|
var mutex sync.Mutex
|
|
|
|
del := func(ctx context.Context, cli redis.UniversalClient) error {
|
2024-01-23 17:37:00 +08:00
|
|
|
const batchSize = 1000
|
|
|
|
for i := 0; i < total; i += batchSize {
|
|
|
|
pipe := cli.Pipeline()
|
|
|
|
for j := 0; j < batchSize; j++ {
|
|
|
|
if i+j < total {
|
|
|
|
pipe.Del(ctx, strutil.DecodeRedisKey(ks[i+j]))
|
2024-01-05 00:36:48 +08:00
|
|
|
}
|
2024-01-23 11:26:15 +08:00
|
|
|
}
|
2024-01-23 17:37:00 +08:00
|
|
|
cmders, delErr := pipe.Exec(ctx)
|
|
|
|
for j, cmder := range cmders {
|
|
|
|
if cmder.(*redis.IntCmd).Val() != 1 {
|
2024-01-23 11:26:15 +08:00
|
|
|
failed.Add(1)
|
|
|
|
} else {
|
|
|
|
// save deleted key
|
|
|
|
mutex.Lock()
|
2024-01-23 17:37:00 +08:00
|
|
|
deletedKeys = append(deletedKeys, ks[i+j])
|
2024-01-23 11:26:15 +08:00
|
|
|
mutex.Unlock()
|
|
|
|
}
|
2023-12-19 20:10:01 +08:00
|
|
|
}
|
|
|
|
if errors.Is(delErr, context.Canceled) || canceled {
|
|
|
|
canceled = true
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
if cluster, ok := client.(*redis.ClusterClient); ok {
|
|
|
|
// cluster mode
|
|
|
|
err = cluster.ForEachMaster(ctx, func(ctx context.Context, cli *redis.Client) error {
|
|
|
|
return del(ctx, cli)
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
err = del(ctx, client)
|
|
|
|
}
|
|
|
|
|
2024-01-23 11:26:15 +08:00
|
|
|
cancelStopEvent()
|
2023-12-19 20:10:01 +08:00
|
|
|
resp.Success = true
|
|
|
|
resp.Data = struct {
|
|
|
|
Canceled bool `json:"canceled"`
|
|
|
|
Deleted any `json:"deleted"`
|
|
|
|
Failed int64 `json:"failed"`
|
|
|
|
}{
|
|
|
|
Canceled: canceled,
|
|
|
|
Deleted: deletedKeys,
|
|
|
|
Failed: failed.Load(),
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-12-18 00:58:20 +08:00
|
|
|
// ExportKey export keys
|
2023-12-27 17:41:51 +08:00
|
|
|
func (b *browserService) ExportKey(server string, db int, ks []any, path string, includeExpire bool) (resp types.JSResp) {
|
2023-12-18 00:58:20 +08:00
|
|
|
// connect a new connection to export keys
|
|
|
|
conf := Connection().getConnection(server)
|
|
|
|
if conf == nil {
|
|
|
|
resp.Msg = fmt.Sprintf("no connection profile named: %s", server)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
var client redis.UniversalClient
|
|
|
|
var err error
|
|
|
|
var connConfig = conf.ConnectionConfig
|
|
|
|
connConfig.LastDB = db
|
2023-12-19 20:10:01 +08:00
|
|
|
if client, err = b.createRedisClient(connConfig); err != nil {
|
2023-12-18 00:58:20 +08:00
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
ctx, cancelFunc := context.WithCancel(b.ctx)
|
2023-12-19 20:10:01 +08:00
|
|
|
defer client.Close()
|
2023-12-18 00:58:20 +08:00
|
|
|
defer cancelFunc()
|
|
|
|
|
|
|
|
file, err := os.Create(path)
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
defer file.Close()
|
|
|
|
|
|
|
|
writer := csv.NewWriter(file)
|
|
|
|
defer writer.Flush()
|
|
|
|
|
2024-01-23 11:26:15 +08:00
|
|
|
cancelStopEvent := runtime.EventsOnce(ctx, "export:stop:"+path, func(data ...any) {
|
2023-12-18 00:58:20 +08:00
|
|
|
cancelFunc()
|
|
|
|
})
|
|
|
|
processEvent := "exporting:" + path
|
|
|
|
total := len(ks)
|
|
|
|
var exported, failed int64
|
|
|
|
var canceled bool
|
2023-12-27 15:44:08 +08:00
|
|
|
startTime := time.Now().Add(-10 * time.Second)
|
2023-12-18 00:58:20 +08:00
|
|
|
for i, k := range ks {
|
2023-12-27 15:44:08 +08:00
|
|
|
if i >= total-1 || time.Now().Sub(startTime).Milliseconds() > 100 {
|
|
|
|
startTime = time.Now()
|
|
|
|
param := map[string]any{
|
|
|
|
"total": total,
|
|
|
|
"progress": i + 1,
|
|
|
|
"processing": k,
|
|
|
|
}
|
2024-01-23 11:26:15 +08:00
|
|
|
runtime.EventsEmit(ctx, processEvent, param)
|
2023-12-18 00:58:20 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
key := strutil.DecodeRedisKey(k)
|
|
|
|
content, dumpErr := client.Dump(ctx, key).Bytes()
|
2023-12-19 20:10:01 +08:00
|
|
|
if errors.Is(dumpErr, context.Canceled) || canceled {
|
2023-12-18 00:58:20 +08:00
|
|
|
canceled = true
|
|
|
|
break
|
|
|
|
}
|
2023-12-27 17:41:51 +08:00
|
|
|
record := []string{hex.EncodeToString([]byte(key)), hex.EncodeToString(content)}
|
|
|
|
if includeExpire {
|
|
|
|
if dur, ttlErr := client.PTTL(ctx, key).Result(); ttlErr == nil && dur > 0 {
|
|
|
|
record = append(record, strconv.FormatInt(time.Now().Add(dur).UnixMilli(), 10))
|
|
|
|
} else {
|
|
|
|
record = append(record, "-1")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if err = writer.Write(record); err != nil {
|
2023-12-18 00:58:20 +08:00
|
|
|
failed += 1
|
|
|
|
} else {
|
|
|
|
exported += 1
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-23 11:26:15 +08:00
|
|
|
cancelStopEvent()
|
2023-12-18 00:58:20 +08:00
|
|
|
resp.Success = true
|
|
|
|
resp.Data = struct {
|
|
|
|
Canceled bool `json:"canceled"`
|
|
|
|
Exported int64 `json:"exported"`
|
|
|
|
Failed int64 `json:"failed"`
|
|
|
|
}{
|
|
|
|
Canceled: canceled,
|
|
|
|
Exported: exported,
|
|
|
|
Failed: failed,
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-12-27 15:44:08 +08:00
|
|
|
// ImportCSV import data from csv file
|
2024-01-05 17:46:12 +08:00
|
|
|
func (b *browserService) ImportCSV(server string, db int, path string, conflict int, ttl int64) (resp types.JSResp) {
|
2023-12-27 15:44:08 +08:00
|
|
|
// connect a new connection to export keys
|
|
|
|
conf := Connection().getConnection(server)
|
|
|
|
if conf == nil {
|
|
|
|
resp.Msg = fmt.Sprintf("no connection profile named: %s", server)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
var client redis.UniversalClient
|
|
|
|
var err error
|
|
|
|
var connConfig = conf.ConnectionConfig
|
|
|
|
connConfig.LastDB = db
|
|
|
|
if client, err = b.createRedisClient(connConfig); err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
ctx, cancelFunc := context.WithCancel(b.ctx)
|
|
|
|
defer client.Close()
|
|
|
|
defer cancelFunc()
|
|
|
|
|
|
|
|
file, err := os.Open(path)
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
defer file.Close()
|
|
|
|
|
|
|
|
reader := csv.NewReader(file)
|
|
|
|
|
|
|
|
cancelEvent := "import:stop:" + path
|
2024-01-23 11:26:15 +08:00
|
|
|
cancelStopEvent := runtime.EventsOnce(ctx, cancelEvent, func(data ...any) {
|
2023-12-27 15:44:08 +08:00
|
|
|
cancelFunc()
|
|
|
|
})
|
|
|
|
processEvent := "importing:" + path
|
|
|
|
var line []string
|
|
|
|
var readErr error
|
|
|
|
var key, value []byte
|
2024-01-05 17:46:12 +08:00
|
|
|
var ttlValue time.Duration
|
2023-12-27 15:44:08 +08:00
|
|
|
var imported, ignored int64
|
|
|
|
var canceled bool
|
|
|
|
startTime := time.Now().Add(-10 * time.Second)
|
|
|
|
for {
|
|
|
|
readErr = nil
|
|
|
|
|
2024-01-05 17:46:12 +08:00
|
|
|
ttlValue = redis.KeepTTL
|
2023-12-27 15:44:08 +08:00
|
|
|
line, readErr = reader.Read()
|
|
|
|
if readErr != nil {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(line) < 1 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if key, readErr = hex.DecodeString(line[0]); readErr != nil {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if value, readErr = hex.DecodeString(line[1]); readErr != nil {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
// get ttl
|
2024-01-06 01:25:20 +08:00
|
|
|
if ttl < 0 && len(line) > 2 {
|
2024-01-05 17:46:12 +08:00
|
|
|
// use previous
|
2023-12-27 17:41:51 +08:00
|
|
|
if expire, ttlErr := strconv.ParseInt(line[2], 10, 64); ttlErr == nil && expire > 0 {
|
2024-01-05 17:46:12 +08:00
|
|
|
ttlValue = time.UnixMilli(expire).Sub(time.Now())
|
2023-12-27 15:44:08 +08:00
|
|
|
}
|
2024-01-05 17:46:12 +08:00
|
|
|
} else if ttl > 0 {
|
|
|
|
// custom ttl
|
|
|
|
ttlValue = time.Duration(ttl) * time.Second
|
2023-12-27 15:44:08 +08:00
|
|
|
}
|
|
|
|
if conflict == 0 {
|
2024-01-05 17:46:12 +08:00
|
|
|
readErr = client.RestoreReplace(ctx, string(key), ttlValue, string(value)).Err()
|
2023-12-27 15:44:08 +08:00
|
|
|
} else {
|
|
|
|
keyStr := string(key)
|
|
|
|
// go-redis may crash when batch calling restore
|
|
|
|
// use "exists" to filter first
|
|
|
|
if n, _ := client.Exists(ctx, keyStr).Result(); n <= 0 {
|
2024-01-05 17:46:12 +08:00
|
|
|
readErr = client.Restore(ctx, keyStr, ttlValue, string(value)).Err()
|
2023-12-27 15:44:08 +08:00
|
|
|
} else {
|
2024-01-05 17:46:12 +08:00
|
|
|
readErr = errors.New("key already existed")
|
2023-12-27 15:44:08 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
if readErr != nil {
|
|
|
|
// restore fail
|
|
|
|
ignored += 1
|
|
|
|
} else {
|
|
|
|
imported += 1
|
|
|
|
}
|
|
|
|
if errors.Is(readErr, context.Canceled) || canceled {
|
|
|
|
canceled = true
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
|
|
|
if time.Now().Sub(startTime).Milliseconds() > 100 {
|
|
|
|
startTime = time.Now()
|
|
|
|
param := map[string]any{
|
|
|
|
"imported": imported,
|
|
|
|
"ignored": ignored,
|
|
|
|
//"processing": string(key),
|
|
|
|
}
|
|
|
|
runtime.EventsEmit(b.ctx, processEvent, param)
|
|
|
|
// do some sleep to prevent blocking the Redis server
|
|
|
|
time.Sleep(10 * time.Millisecond)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-23 11:26:15 +08:00
|
|
|
cancelStopEvent()
|
2023-12-27 15:44:08 +08:00
|
|
|
resp.Success = true
|
|
|
|
resp.Data = struct {
|
|
|
|
Canceled bool `json:"canceled"`
|
|
|
|
Imported int64 `json:"imported"`
|
|
|
|
Ignored int64 `json:"ignored"`
|
|
|
|
}{
|
|
|
|
Canceled: canceled,
|
|
|
|
Imported: imported,
|
|
|
|
Ignored: ignored,
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-11-05 11:57:52 +08:00
|
|
|
// FlushDB flush database
|
2024-01-05 18:24:38 +08:00
|
|
|
func (b *browserService) FlushDB(server string, db int, async bool) (resp types.JSResp) {
|
|
|
|
item, err := b.getRedisClient(server, db)
|
2023-11-05 11:57:52 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-11-10 15:57:19 +08:00
|
|
|
flush := func(ctx context.Context, cli redis.UniversalClient, async bool) error {
|
|
|
|
_, e := cli.TxPipelined(ctx, func(pipe redis.Pipeliner) error {
|
2023-11-05 11:57:52 +08:00
|
|
|
pipe.Select(ctx, db)
|
|
|
|
if async {
|
|
|
|
pipe.FlushDBAsync(ctx)
|
|
|
|
} else {
|
|
|
|
pipe.FlushDB(ctx)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
})
|
2023-11-10 15:57:19 +08:00
|
|
|
return e
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx := item.client, item.ctx
|
|
|
|
if cluster, ok := client.(*redis.ClusterClient); ok {
|
|
|
|
// cluster mode
|
|
|
|
err = cluster.ForEachMaster(ctx, func(ctx context.Context, cli *redis.Client) error {
|
2023-11-10 15:57:19 +08:00
|
|
|
return flush(ctx, cli, async)
|
2023-11-05 11:57:52 +08:00
|
|
|
})
|
2023-11-10 15:57:19 +08:00
|
|
|
// try sync mode if error cause
|
|
|
|
if err != nil && async {
|
|
|
|
err = cluster.ForEachMaster(ctx, func(ctx context.Context, cli *redis.Client) error {
|
|
|
|
return flush(ctx, cli, false)
|
|
|
|
})
|
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
} else {
|
2023-11-10 15:57:19 +08:00
|
|
|
if err = flush(ctx, client, async); err != nil && async {
|
|
|
|
// try sync mode if error cause
|
|
|
|
err = flush(ctx, client, false)
|
|
|
|
}
|
2023-11-05 11:57:52 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
resp.Success = true
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// RenameKey rename key
|
2024-01-05 18:24:38 +08:00
|
|
|
func (b *browserService) RenameKey(server string, db int, key, newKey string) (resp types.JSResp) {
|
|
|
|
item, err := b.getRedisClient(server, db)
|
2023-11-05 11:57:52 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx := item.client, item.ctx
|
|
|
|
if _, ok := client.(*redis.ClusterClient); ok {
|
|
|
|
resp.Msg = "RENAME not support in cluster mode yet"
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if _, err = client.RenameNX(ctx, key, newKey).Result(); err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
resp.Success = true
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// GetCmdHistory get redis command history
|
|
|
|
func (b *browserService) GetCmdHistory(pageNo, pageSize int) (resp types.JSResp) {
|
|
|
|
resp.Success = true
|
|
|
|
if pageSize <= 0 || pageNo <= 0 {
|
|
|
|
// return all history
|
|
|
|
resp.Data = map[string]any{
|
|
|
|
"list": b.cmdHistory,
|
|
|
|
"pageNo": 1,
|
|
|
|
"pageSize": -1,
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
total := len(b.cmdHistory)
|
|
|
|
startIndex := total / pageSize * (pageNo - 1)
|
|
|
|
endIndex := min(startIndex+pageSize, total)
|
|
|
|
resp.Data = map[string]any{
|
|
|
|
"list": b.cmdHistory[startIndex:endIndex],
|
|
|
|
"pageNo": pageNo,
|
|
|
|
"pageSize": pageSize,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// CleanCmdHistory clean redis command history
|
|
|
|
func (b *browserService) CleanCmdHistory() (resp types.JSResp) {
|
|
|
|
b.cmdHistory = []cmdHistoryItem{}
|
|
|
|
resp.Success = true
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// GetSlowLogs get slow log list
|
2024-01-17 15:48:23 +08:00
|
|
|
func (b *browserService) GetSlowLogs(server string, num int64) (resp types.JSResp) {
|
|
|
|
item, err := b.getRedisClient(server, -1)
|
2023-11-05 11:57:52 +08:00
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx := item.client, item.ctx
|
|
|
|
var logs []redis.SlowLog
|
|
|
|
if cluster, ok := client.(*redis.ClusterClient); ok {
|
|
|
|
// cluster mode
|
|
|
|
var mu sync.Mutex
|
|
|
|
err = cluster.ForEachShard(ctx, func(ctx context.Context, cli *redis.Client) error {
|
|
|
|
if subLogs, _ := client.SlowLogGet(ctx, num).Result(); len(subLogs) > 0 {
|
|
|
|
mu.Lock()
|
|
|
|
logs = append(logs, subLogs...)
|
|
|
|
mu.Unlock()
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
logs, err = client.SlowLogGet(ctx, num).Result()
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
sort.Slice(logs, func(i, j int) bool {
|
|
|
|
return logs[i].Time.UnixMilli() > logs[j].Time.UnixMilli()
|
|
|
|
})
|
|
|
|
if len(logs) > int(num) {
|
|
|
|
logs = logs[:num]
|
|
|
|
}
|
|
|
|
|
|
|
|
list := sliceutil.Map(logs, func(i int) slowLogItem {
|
|
|
|
var name string
|
|
|
|
var e error
|
|
|
|
if name, e = url.QueryUnescape(logs[i].ClientName); e != nil {
|
|
|
|
name = logs[i].ClientName
|
|
|
|
}
|
|
|
|
return slowLogItem{
|
|
|
|
Timestamp: logs[i].Time.UnixMilli(),
|
|
|
|
Client: name,
|
|
|
|
Addr: logs[i].ClientAddr,
|
|
|
|
Cmd: sliceutil.JoinString(logs[i].Args, " "),
|
|
|
|
Cost: logs[i].Duration.Milliseconds(),
|
|
|
|
}
|
|
|
|
})
|
|
|
|
|
|
|
|
resp.Success = true
|
|
|
|
resp.Data = map[string]any{
|
|
|
|
"list": list,
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
2024-01-25 00:29:15 +08:00
|
|
|
|
|
|
|
// GetClientList get all connected client info
|
|
|
|
func (b *browserService) GetClientList(server string) (resp types.JSResp) {
|
|
|
|
item, err := b.getRedisClient(server, -1)
|
|
|
|
if err != nil {
|
|
|
|
resp.Msg = err.Error()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
parseContent := func(content string) []map[string]string {
|
|
|
|
lines := strings.Split(content, "\n")
|
|
|
|
list := make([]map[string]string, 0, len(lines))
|
|
|
|
for _, line := range lines {
|
|
|
|
line = strings.TrimSpace(line)
|
|
|
|
if len(line) > 0 {
|
|
|
|
items := strings.Split(line, " ")
|
|
|
|
itemKV := map[string]string{}
|
|
|
|
for _, it := range items {
|
|
|
|
kv := strings.SplitN(it, "=", 2)
|
|
|
|
if len(kv) > 1 {
|
|
|
|
itemKV[kv[0]] = kv[1]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
list = append(list, itemKV)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return list
|
|
|
|
}
|
|
|
|
|
|
|
|
client, ctx := item.client, item.ctx
|
|
|
|
var fullList []map[string]string
|
|
|
|
var mutex sync.Mutex
|
|
|
|
if cluster, ok := client.(*redis.ClusterClient); ok {
|
|
|
|
cluster.ForEachMaster(ctx, func(ctx context.Context, cli *redis.Client) error {
|
|
|
|
mutex.Lock()
|
|
|
|
defer mutex.Unlock()
|
|
|
|
fullList = append(fullList, parseContent(cli.ClientList(ctx).Val())...)
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
fullList = append(fullList, parseContent(client.ClientList(ctx).Val())...)
|
|
|
|
}
|
|
|
|
|
|
|
|
resp.Success = true
|
|
|
|
resp.Data = map[string]any{
|
|
|
|
"list": fullList,
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|