mirror of
https://github.com/MHSanaei/3x-ui.git
synced 2025-10-27 02:24:40 +00:00
Compare commits
9 commits
37c17357fc
...
83f8a03b50
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
83f8a03b50 | ||
|
|
b45e63a14a | ||
|
|
3007bcff97 | ||
|
|
55f1d72af5 | ||
|
|
806ecbd7c5 | ||
|
|
ae79b43cdb | ||
|
|
e64e6327ef | ||
|
|
9f024b9e6a | ||
|
|
eacfbc86b5 |
10 changed files with 247 additions and 32 deletions
3
.github/workflows/docker.yml
vendored
3
.github/workflows/docker.yml
vendored
|
|
@ -1,4 +1,7 @@
|
||||||
name: Release 3X-UI for Docker
|
name: Release 3X-UI for Docker
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
push:
|
push:
|
||||||
|
|
|
||||||
|
|
@ -49,6 +49,7 @@ RUN chmod +x \
|
||||||
/usr/bin/x-ui
|
/usr/bin/x-ui
|
||||||
|
|
||||||
ENV XUI_ENABLE_FAIL2BAN="true"
|
ENV XUI_ENABLE_FAIL2BAN="true"
|
||||||
|
EXPOSE 2053
|
||||||
VOLUME [ "/etc/x-ui" ]
|
VOLUME [ "/etc/x-ui" ]
|
||||||
CMD [ "./x-ui" ]
|
CMD [ "./x-ui" ]
|
||||||
ENTRYPOINT [ "/app/DockerEntrypoint.sh" ]
|
ENTRYPOINT [ "/app/DockerEntrypoint.sh" ]
|
||||||
|
|
|
||||||
|
|
@ -26,7 +26,7 @@ const (
|
||||||
Debug LogLevel = "debug"
|
Debug LogLevel = "debug"
|
||||||
Info LogLevel = "info"
|
Info LogLevel = "info"
|
||||||
Notice LogLevel = "notice"
|
Notice LogLevel = "notice"
|
||||||
Warn LogLevel = "warn"
|
Warning LogLevel = "warning"
|
||||||
Error LogLevel = "error"
|
Error LogLevel = "error"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
||||||
2
main.go
2
main.go
|
|
@ -35,7 +35,7 @@ func runWebServer() {
|
||||||
logger.InitLogger(logging.INFO)
|
logger.InitLogger(logging.INFO)
|
||||||
case config.Notice:
|
case config.Notice:
|
||||||
logger.InitLogger(logging.NOTICE)
|
logger.InitLogger(logging.NOTICE)
|
||||||
case config.Warn:
|
case config.Warning:
|
||||||
logger.InitLogger(logging.WARNING)
|
logger.InitLogger(logging.WARNING)
|
||||||
case config.Error:
|
case config.Error:
|
||||||
logger.InitLogger(logging.ERROR)
|
logger.InitLogger(logging.ERROR)
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,8 @@
|
||||||
package random
|
package random
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"math/rand"
|
"crypto/rand"
|
||||||
|
"math/big"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
|
@ -40,12 +41,21 @@ func init() {
|
||||||
func Seq(n int) string {
|
func Seq(n int) string {
|
||||||
runes := make([]rune, n)
|
runes := make([]rune, n)
|
||||||
for i := 0; i < n; i++ {
|
for i := 0; i < n; i++ {
|
||||||
runes[i] = allSeq[rand.Intn(len(allSeq))]
|
idx, err := rand.Int(rand.Reader, big.NewInt(int64(len(allSeq))))
|
||||||
|
if err != nil {
|
||||||
|
panic("crypto/rand failed: " + err.Error())
|
||||||
|
}
|
||||||
|
runes[i] = allSeq[idx.Int64()]
|
||||||
}
|
}
|
||||||
return string(runes)
|
return string(runes)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Num generates a random integer between 0 and n-1.
|
// Num generates a random integer between 0 and n-1.
|
||||||
func Num(n int) int {
|
func Num(n int) int {
|
||||||
return rand.Intn(n)
|
bn := big.NewInt(int64(n))
|
||||||
|
r, err := rand.Int(rand.Reader, bn)
|
||||||
|
if err != nil {
|
||||||
|
panic("crypto/rand failed: " + err.Error())
|
||||||
|
}
|
||||||
|
return int(r.Int64())
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -138,6 +138,14 @@ func (a *ServerController) installXray(c *gin.Context) {
|
||||||
// updateGeofile updates the specified geo file for Xray.
|
// updateGeofile updates the specified geo file for Xray.
|
||||||
func (a *ServerController) updateGeofile(c *gin.Context) {
|
func (a *ServerController) updateGeofile(c *gin.Context) {
|
||||||
fileName := c.Param("fileName")
|
fileName := c.Param("fileName")
|
||||||
|
|
||||||
|
// Validate the filename for security (prevent path traversal attacks)
|
||||||
|
if fileName != "" && !a.serverService.IsValidGeofileName(fileName) {
|
||||||
|
jsonMsg(c, I18nWeb(c, "pages.index.geofileUpdatePopover"),
|
||||||
|
fmt.Errorf("invalid filename: contains unsafe characters or path traversal patterns"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
err := a.serverService.UpdateGeofile(fileName)
|
err := a.serverService.UpdateGeofile(fileName)
|
||||||
jsonMsg(c, I18nWeb(c, "pages.index.geofileUpdatePopover"), err)
|
jsonMsg(c, I18nWeb(c, "pages.index.geofileUpdatePopover"), err)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1959,6 +1959,15 @@ func (s *InboundService) GetClientTrafficTgBot(tgId int64) ([]*xray.ClientTraffi
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Populate UUID and other client data for each traffic record
|
||||||
|
for i := range traffics {
|
||||||
|
if ct, client, e := s.GetClientByEmail(traffics[i].Email); e == nil && ct != nil && client != nil {
|
||||||
|
traffics[i].Enable = client.Enable
|
||||||
|
traffics[i].UUID = client.ID
|
||||||
|
traffics[i].SubId = client.SubID
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return traffics, nil
|
return traffics, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1971,6 +1980,7 @@ func (s *InboundService) GetClientTrafficByEmail(email string) (traffic *xray.Cl
|
||||||
}
|
}
|
||||||
if t != nil && client != nil {
|
if t != nil && client != nil {
|
||||||
t.Enable = client.Enable
|
t.Enable = client.Enable
|
||||||
|
t.UUID = client.ID
|
||||||
t.SubId = client.SubID
|
t.SubId = client.SubID
|
||||||
return t, nil
|
return t, nil
|
||||||
}
|
}
|
||||||
|
|
@ -2012,6 +2022,7 @@ func (s *InboundService) GetClientTrafficByID(id string) ([]xray.ClientTraffic,
|
||||||
for i := range traffics {
|
for i := range traffics {
|
||||||
if ct, client, e := s.GetClientByEmail(traffics[i].Email); e == nil && ct != nil && client != nil {
|
if ct, client, e := s.GetClientByEmail(traffics[i].Email); e == nil && ct != nil && client != nil {
|
||||||
traffics[i].Enable = client.Enable
|
traffics[i].Enable = client.Enable
|
||||||
|
traffics[i].UUID = client.ID
|
||||||
traffics[i].SubId = client.SubID
|
traffics[i].SubId = client.SubID
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,7 @@ import (
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
"runtime"
|
"runtime"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
@ -697,14 +698,39 @@ func (s *ServerService) GetLogs(count string, level string, syslog string) []str
|
||||||
var lines []string
|
var lines []string
|
||||||
|
|
||||||
if syslog == "true" {
|
if syslog == "true" {
|
||||||
cmdArgs := []string{"journalctl", "-u", "x-ui", "--no-pager", "-n", count, "-p", level}
|
// Check if running on Windows - journalctl is not available
|
||||||
// Run the command
|
if runtime.GOOS == "windows" {
|
||||||
cmd := exec.Command(cmdArgs[0], cmdArgs[1:]...)
|
return []string{"Syslog is not supported on Windows. Please use application logs instead by unchecking the 'Syslog' option."}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate and sanitize count parameter
|
||||||
|
countInt, err := strconv.Atoi(count)
|
||||||
|
if err != nil || countInt < 1 || countInt > 10000 {
|
||||||
|
return []string{"Invalid count parameter - must be a number between 1 and 10000"}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate level parameter - only allow valid syslog levels
|
||||||
|
validLevels := map[string]bool{
|
||||||
|
"0": true, "emerg": true,
|
||||||
|
"1": true, "alert": true,
|
||||||
|
"2": true, "crit": true,
|
||||||
|
"3": true, "err": true,
|
||||||
|
"4": true, "warning": true,
|
||||||
|
"5": true, "notice": true,
|
||||||
|
"6": true, "info": true,
|
||||||
|
"7": true, "debug": true,
|
||||||
|
}
|
||||||
|
if !validLevels[level] {
|
||||||
|
return []string{"Invalid level parameter - must be a valid syslog level"}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use hardcoded command with validated parameters
|
||||||
|
cmd := exec.Command("journalctl", "-u", "x-ui", "--no-pager", "-n", strconv.Itoa(countInt), "-p", level)
|
||||||
var out bytes.Buffer
|
var out bytes.Buffer
|
||||||
cmd.Stdout = &out
|
cmd.Stdout = &out
|
||||||
err := cmd.Run()
|
err = cmd.Run()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return []string{"Failed to run journalctl command!"}
|
return []string{"Failed to run journalctl command! Make sure systemd is available and x-ui service is registered."}
|
||||||
}
|
}
|
||||||
lines = strings.Split(out.String(), "\n")
|
lines = strings.Split(out.String(), "\n")
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -971,6 +997,35 @@ func (s *ServerService) ImportDB(file multipart.File) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// IsValidGeofileName validates that the filename is safe for geofile operations.
|
||||||
|
// It checks for path traversal attempts and ensures the filename contains only safe characters.
|
||||||
|
func (s *ServerService) IsValidGeofileName(filename string) bool {
|
||||||
|
if filename == "" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for path traversal attempts
|
||||||
|
if strings.Contains(filename, "..") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for path separators (both forward and backward slash)
|
||||||
|
if strings.ContainsAny(filename, `/\`) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for absolute path indicators
|
||||||
|
if filepath.IsAbs(filename) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Additional security: only allow alphanumeric, dots, underscores, and hyphens
|
||||||
|
// This is stricter than the general filename regex
|
||||||
|
validGeofilePattern := `^[a-zA-Z0-9._-]+\.dat$`
|
||||||
|
matched, _ := regexp.MatchString(validGeofilePattern, filename)
|
||||||
|
return matched
|
||||||
|
}
|
||||||
|
|
||||||
func (s *ServerService) UpdateGeofile(fileName string) error {
|
func (s *ServerService) UpdateGeofile(fileName string) error {
|
||||||
files := []struct {
|
files := []struct {
|
||||||
URL string
|
URL string
|
||||||
|
|
@ -984,6 +1039,25 @@ func (s *ServerService) UpdateGeofile(fileName string) error {
|
||||||
{"https://github.com/runetfreedom/russia-v2ray-rules-dat/releases/latest/download/geosite.dat", "geosite_RU.dat"},
|
{"https://github.com/runetfreedom/russia-v2ray-rules-dat/releases/latest/download/geosite.dat", "geosite_RU.dat"},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Strict allowlist check to avoid writing uncontrolled files
|
||||||
|
if fileName != "" {
|
||||||
|
// Use the centralized validation function
|
||||||
|
if !s.IsValidGeofileName(fileName) {
|
||||||
|
return common.NewErrorf("Invalid geofile name: contains unsafe path characters: %s", fileName)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure the filename matches exactly one from our allowlist
|
||||||
|
isAllowed := false
|
||||||
|
for _, file := range files {
|
||||||
|
if fileName == file.FileName {
|
||||||
|
isAllowed = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !isAllowed {
|
||||||
|
return common.NewErrorf("Invalid geofile name: %s not in allowlist", fileName)
|
||||||
|
}
|
||||||
|
}
|
||||||
downloadFile := func(url, destPath string) error {
|
downloadFile := func(url, destPath string) error {
|
||||||
resp, err := http.Get(url)
|
resp, err := http.Get(url)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -1009,14 +1083,17 @@ func (s *ServerService) UpdateGeofile(fileName string) error {
|
||||||
|
|
||||||
if fileName == "" {
|
if fileName == "" {
|
||||||
for _, file := range files {
|
for _, file := range files {
|
||||||
destPath := fmt.Sprintf("%s/%s", config.GetBinFolderPath(), file.FileName)
|
// Sanitize the filename from our allowlist as an extra precaution
|
||||||
|
destPath := filepath.Join(config.GetBinFolderPath(), filepath.Base(file.FileName))
|
||||||
|
|
||||||
if err := downloadFile(file.URL, destPath); err != nil {
|
if err := downloadFile(file.URL, destPath); err != nil {
|
||||||
errorMessages = append(errorMessages, fmt.Sprintf("Error downloading Geofile '%s': %v", file.FileName, err))
|
errorMessages = append(errorMessages, fmt.Sprintf("Error downloading Geofile '%s': %v", file.FileName, err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
destPath := fmt.Sprintf("%s/%s", config.GetBinFolderPath(), fileName)
|
// Use filepath.Base to ensure we only get the filename component, no path traversal
|
||||||
|
safeName := filepath.Base(fileName)
|
||||||
|
destPath := filepath.Join(config.GetBinFolderPath(), safeName)
|
||||||
|
|
||||||
var fileURL string
|
var fileURL string
|
||||||
for _, file := range files {
|
for _, file := range files {
|
||||||
|
|
@ -1028,12 +1105,12 @@ func (s *ServerService) UpdateGeofile(fileName string) error {
|
||||||
|
|
||||||
if fileURL == "" {
|
if fileURL == "" {
|
||||||
errorMessages = append(errorMessages, fmt.Sprintf("File '%s' not found in the list of Geofiles", fileName))
|
errorMessages = append(errorMessages, fmt.Sprintf("File '%s' not found in the list of Geofiles", fileName))
|
||||||
}
|
} else {
|
||||||
|
|
||||||
if err := downloadFile(fileURL, destPath); err != nil {
|
if err := downloadFile(fileURL, destPath); err != nil {
|
||||||
errorMessages = append(errorMessages, fmt.Sprintf("Error downloading Geofile '%s': %v", fileName, err))
|
errorMessages = append(errorMessages, fmt.Sprintf("Error downloading Geofile '%s': %v", fileName, err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
err := s.RestartXrayService()
|
err := s.RestartXrayService()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
||||||
|
|
@ -16,6 +16,7 @@ import (
|
||||||
"regexp"
|
"regexp"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/mhsanaei/3x-ui/v2/config"
|
"github.com/mhsanaei/3x-ui/v2/config"
|
||||||
|
|
@ -44,6 +45,23 @@ var (
|
||||||
hostname string
|
hostname string
|
||||||
hashStorage *global.HashStorage
|
hashStorage *global.HashStorage
|
||||||
|
|
||||||
|
// Performance improvements
|
||||||
|
messageWorkerPool chan struct{} // Semaphore for limiting concurrent message processing
|
||||||
|
optimizedHTTPClient *http.Client // HTTP client with connection pooling and timeouts
|
||||||
|
|
||||||
|
// Simple cache for frequently accessed data
|
||||||
|
statusCache struct {
|
||||||
|
data *Status
|
||||||
|
timestamp time.Time
|
||||||
|
mutex sync.RWMutex
|
||||||
|
}
|
||||||
|
|
||||||
|
serverStatsCache struct {
|
||||||
|
data string
|
||||||
|
timestamp time.Time
|
||||||
|
mutex sync.RWMutex
|
||||||
|
}
|
||||||
|
|
||||||
// clients data to adding new client
|
// clients data to adding new client
|
||||||
receiver_inbound_ID int
|
receiver_inbound_ID int
|
||||||
client_Id string
|
client_Id string
|
||||||
|
|
@ -100,6 +118,46 @@ func (t *Tgbot) GetHashStorage() *global.HashStorage {
|
||||||
return hashStorage
|
return hashStorage
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// getCachedStatus returns cached server status if it's fresh enough (less than 5 seconds old)
|
||||||
|
func (t *Tgbot) getCachedStatus() (*Status, bool) {
|
||||||
|
statusCache.mutex.RLock()
|
||||||
|
defer statusCache.mutex.RUnlock()
|
||||||
|
|
||||||
|
if statusCache.data != nil && time.Since(statusCache.timestamp) < 5*time.Second {
|
||||||
|
return statusCache.data, true
|
||||||
|
}
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
|
||||||
|
// setCachedStatus updates the status cache
|
||||||
|
func (t *Tgbot) setCachedStatus(status *Status) {
|
||||||
|
statusCache.mutex.Lock()
|
||||||
|
defer statusCache.mutex.Unlock()
|
||||||
|
|
||||||
|
statusCache.data = status
|
||||||
|
statusCache.timestamp = time.Now()
|
||||||
|
}
|
||||||
|
|
||||||
|
// getCachedServerStats returns cached server stats if it's fresh enough (less than 10 seconds old)
|
||||||
|
func (t *Tgbot) getCachedServerStats() (string, bool) {
|
||||||
|
serverStatsCache.mutex.RLock()
|
||||||
|
defer serverStatsCache.mutex.RUnlock()
|
||||||
|
|
||||||
|
if serverStatsCache.data != "" && time.Since(serverStatsCache.timestamp) < 10*time.Second {
|
||||||
|
return serverStatsCache.data, true
|
||||||
|
}
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
|
||||||
|
// setCachedServerStats updates the server stats cache
|
||||||
|
func (t *Tgbot) setCachedServerStats(stats string) {
|
||||||
|
serverStatsCache.mutex.Lock()
|
||||||
|
defer serverStatsCache.mutex.Unlock()
|
||||||
|
|
||||||
|
serverStatsCache.data = stats
|
||||||
|
serverStatsCache.timestamp = time.Now()
|
||||||
|
}
|
||||||
|
|
||||||
// Start initializes and starts the Telegram bot with the provided translation files.
|
// Start initializes and starts the Telegram bot with the provided translation files.
|
||||||
func (t *Tgbot) Start(i18nFS embed.FS) error {
|
func (t *Tgbot) Start(i18nFS embed.FS) error {
|
||||||
// Initialize localizer
|
// Initialize localizer
|
||||||
|
|
@ -111,6 +169,20 @@ func (t *Tgbot) Start(i18nFS embed.FS) error {
|
||||||
// Initialize hash storage to store callback queries
|
// Initialize hash storage to store callback queries
|
||||||
hashStorage = global.NewHashStorage(20 * time.Minute)
|
hashStorage = global.NewHashStorage(20 * time.Minute)
|
||||||
|
|
||||||
|
// Initialize worker pool for concurrent message processing (max 10 concurrent handlers)
|
||||||
|
messageWorkerPool = make(chan struct{}, 10)
|
||||||
|
|
||||||
|
// Initialize optimized HTTP client with connection pooling
|
||||||
|
optimizedHTTPClient = &http.Client{
|
||||||
|
Timeout: 15 * time.Second,
|
||||||
|
Transport: &http.Transport{
|
||||||
|
MaxIdleConns: 100,
|
||||||
|
MaxIdleConnsPerHost: 10,
|
||||||
|
IdleConnTimeout: 30 * time.Second,
|
||||||
|
DisableKeepAlives: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
t.SetHostname()
|
t.SetHostname()
|
||||||
|
|
||||||
// Get Telegram bot token
|
// Get Telegram bot token
|
||||||
|
|
@ -271,7 +343,7 @@ func (t *Tgbot) decodeQuery(query string) (string, error) {
|
||||||
// OnReceive starts the message receiving loop for the Telegram bot.
|
// OnReceive starts the message receiving loop for the Telegram bot.
|
||||||
func (t *Tgbot) OnReceive() {
|
func (t *Tgbot) OnReceive() {
|
||||||
params := telego.GetUpdatesParams{
|
params := telego.GetUpdatesParams{
|
||||||
Timeout: 10,
|
Timeout: 30, // Increased timeout to reduce API calls
|
||||||
}
|
}
|
||||||
|
|
||||||
updates, _ := bot.UpdatesViaLongPolling(context.Background(), ¶ms)
|
updates, _ := bot.UpdatesViaLongPolling(context.Background(), ¶ms)
|
||||||
|
|
@ -285,14 +357,26 @@ func (t *Tgbot) OnReceive() {
|
||||||
}, th.TextEqual(t.I18nBot("tgbot.buttons.closeKeyboard")))
|
}, th.TextEqual(t.I18nBot("tgbot.buttons.closeKeyboard")))
|
||||||
|
|
||||||
botHandler.HandleMessage(func(ctx *th.Context, message telego.Message) error {
|
botHandler.HandleMessage(func(ctx *th.Context, message telego.Message) error {
|
||||||
|
// Use goroutine with worker pool for concurrent command processing
|
||||||
|
go func() {
|
||||||
|
messageWorkerPool <- struct{}{} // Acquire worker
|
||||||
|
defer func() { <-messageWorkerPool }() // Release worker
|
||||||
|
|
||||||
delete(userStates, message.Chat.ID)
|
delete(userStates, message.Chat.ID)
|
||||||
t.answerCommand(&message, message.Chat.ID, checkAdmin(message.From.ID))
|
t.answerCommand(&message, message.Chat.ID, checkAdmin(message.From.ID))
|
||||||
|
}()
|
||||||
return nil
|
return nil
|
||||||
}, th.AnyCommand())
|
}, th.AnyCommand())
|
||||||
|
|
||||||
botHandler.HandleCallbackQuery(func(ctx *th.Context, query telego.CallbackQuery) error {
|
botHandler.HandleCallbackQuery(func(ctx *th.Context, query telego.CallbackQuery) error {
|
||||||
|
// Use goroutine with worker pool for concurrent callback processing
|
||||||
|
go func() {
|
||||||
|
messageWorkerPool <- struct{}{} // Acquire worker
|
||||||
|
defer func() { <-messageWorkerPool }() // Release worker
|
||||||
|
|
||||||
delete(userStates, query.Message.GetChat().ID)
|
delete(userStates, query.Message.GetChat().ID)
|
||||||
t.answerCallback(&query, checkAdmin(query.From.ID))
|
t.answerCallback(&query, checkAdmin(query.From.ID))
|
||||||
|
}()
|
||||||
return nil
|
return nil
|
||||||
}, th.AnyCallbackQueryWithMessage())
|
}, th.AnyCallbackQueryWithMessage())
|
||||||
|
|
||||||
|
|
@ -2099,7 +2183,10 @@ func (t *Tgbot) SendMsgToTgbot(chatId int64, msg string, replyMarkup ...telego.R
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Warning("Error sending telegram message :", err)
|
logger.Warning("Error sending telegram message :", err)
|
||||||
}
|
}
|
||||||
time.Sleep(500 * time.Millisecond)
|
// Reduced delay to improve performance (only needed for rate limiting)
|
||||||
|
if n < len(allMessages)-1 { // Only delay between messages, not after the last one
|
||||||
|
time.Sleep(100 * time.Millisecond)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -2208,12 +2295,12 @@ func (t *Tgbot) sendClientIndividualLinks(chatId int64, email string) {
|
||||||
// Force plain text to avoid HTML page; controller respects Accept header
|
// Force plain text to avoid HTML page; controller respects Accept header
|
||||||
req.Header.Set("Accept", "text/plain, */*;q=0.1")
|
req.Header.Set("Accept", "text/plain, */*;q=0.1")
|
||||||
|
|
||||||
// Use default client with reasonable timeout via context
|
// Use optimized client with connection pooling
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||||
defer cancel()
|
defer cancel()
|
||||||
req = req.WithContext(ctx)
|
req = req.WithContext(ctx)
|
||||||
|
|
||||||
resp, err := http.DefaultClient.Do(req)
|
resp, err := optimizedHTTPClient.Do(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.SendMsgToTgbot(chatId, t.I18nBot("tgbot.answers.errorOperation")+"\r\n"+err.Error())
|
t.SendMsgToTgbot(chatId, t.I18nBot("tgbot.answers.errorOperation")+"\r\n"+err.Error())
|
||||||
return
|
return
|
||||||
|
|
@ -2323,7 +2410,7 @@ func (t *Tgbot) sendClientQRLinks(chatId int64, email string) {
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||||
defer cancel()
|
defer cancel()
|
||||||
req = req.WithContext(ctx)
|
req = req.WithContext(ctx)
|
||||||
if resp, err := http.DefaultClient.Do(req); err == nil {
|
if resp, err := optimizedHTTPClient.Do(req); err == nil {
|
||||||
body, _ := io.ReadAll(resp.Body)
|
body, _ := io.ReadAll(resp.Body)
|
||||||
_ = resp.Body.Close()
|
_ = resp.Body.Close()
|
||||||
encoded, _ := t.settingService.GetSubEncrypt()
|
encoded, _ := t.settingService.GetSubEncrypt()
|
||||||
|
|
@ -2356,7 +2443,10 @@ func (t *Tgbot) sendClientQRLinks(chatId int64, email string) {
|
||||||
tu.FileFromBytes(png, filename),
|
tu.FileFromBytes(png, filename),
|
||||||
)
|
)
|
||||||
_, _ = bot.SendDocument(context.Background(), document)
|
_, _ = bot.SendDocument(context.Background(), document)
|
||||||
time.Sleep(200 * time.Millisecond)
|
// Reduced delay for better performance
|
||||||
|
if i < max-1 { // Only delay between documents, not after the last one
|
||||||
|
time.Sleep(50 * time.Millisecond)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -2443,10 +2533,20 @@ func (t *Tgbot) sendServerUsage() string {
|
||||||
|
|
||||||
// prepareServerUsageInfo prepares the server usage information string.
|
// prepareServerUsageInfo prepares the server usage information string.
|
||||||
func (t *Tgbot) prepareServerUsageInfo() string {
|
func (t *Tgbot) prepareServerUsageInfo() string {
|
||||||
|
// Check if we have cached data first
|
||||||
|
if cachedStats, found := t.getCachedServerStats(); found {
|
||||||
|
return cachedStats
|
||||||
|
}
|
||||||
|
|
||||||
info, ipv4, ipv6 := "", "", ""
|
info, ipv4, ipv6 := "", "", ""
|
||||||
|
|
||||||
// get latest status of server
|
// get latest status of server with caching
|
||||||
|
if cachedStatus, found := t.getCachedStatus(); found {
|
||||||
|
t.lastStatus = cachedStatus
|
||||||
|
} else {
|
||||||
t.lastStatus = t.serverService.GetStatus(t.lastStatus)
|
t.lastStatus = t.serverService.GetStatus(t.lastStatus)
|
||||||
|
t.setCachedStatus(t.lastStatus)
|
||||||
|
}
|
||||||
onlines := p.GetOnlineClients()
|
onlines := p.GetOnlineClients()
|
||||||
|
|
||||||
info += t.I18nBot("tgbot.messages.hostname", "Hostname=="+hostname)
|
info += t.I18nBot("tgbot.messages.hostname", "Hostname=="+hostname)
|
||||||
|
|
@ -2488,6 +2588,10 @@ func (t *Tgbot) prepareServerUsageInfo() string {
|
||||||
info += t.I18nBot("tgbot.messages.udpCount", "Count=="+strconv.Itoa(t.lastStatus.UdpCount))
|
info += t.I18nBot("tgbot.messages.udpCount", "Count=="+strconv.Itoa(t.lastStatus.UdpCount))
|
||||||
info += t.I18nBot("tgbot.messages.traffic", "Total=="+common.FormatTraffic(int64(t.lastStatus.NetTraffic.Sent+t.lastStatus.NetTraffic.Recv)), "Upload=="+common.FormatTraffic(int64(t.lastStatus.NetTraffic.Sent)), "Download=="+common.FormatTraffic(int64(t.lastStatus.NetTraffic.Recv)))
|
info += t.I18nBot("tgbot.messages.traffic", "Total=="+common.FormatTraffic(int64(t.lastStatus.NetTraffic.Sent+t.lastStatus.NetTraffic.Recv)), "Upload=="+common.FormatTraffic(int64(t.lastStatus.NetTraffic.Sent)), "Download=="+common.FormatTraffic(int64(t.lastStatus.NetTraffic.Recv)))
|
||||||
info += t.I18nBot("tgbot.messages.xrayStatus", "State=="+fmt.Sprint(t.lastStatus.Xray.State))
|
info += t.I18nBot("tgbot.messages.xrayStatus", "State=="+fmt.Sprint(t.lastStatus.Xray.State))
|
||||||
|
|
||||||
|
// Cache the complete server stats
|
||||||
|
t.setCachedServerStats(info)
|
||||||
|
|
||||||
return info
|
return info
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,7 @@ type ClientTraffic struct {
|
||||||
InboundId int `json:"inboundId" form:"inboundId"`
|
InboundId int `json:"inboundId" form:"inboundId"`
|
||||||
Enable bool `json:"enable" form:"enable"`
|
Enable bool `json:"enable" form:"enable"`
|
||||||
Email string `json:"email" form:"email" gorm:"unique"`
|
Email string `json:"email" form:"email" gorm:"unique"`
|
||||||
|
UUID string `json:"uuid" form:"uuid" gorm:"-"`
|
||||||
SubId string `json:"subId" form:"subId" gorm:"-"`
|
SubId string `json:"subId" form:"subId" gorm:"-"`
|
||||||
Up int64 `json:"up" form:"up"`
|
Up int64 `json:"up" form:"up"`
|
||||||
Down int64 `json:"down" form:"down"`
|
Down int64 `json:"down" form:"down"`
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue