diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
old mode 100644
new mode 100755
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
old mode 100644
new mode 100755
diff --git a/Dockerfile b/Dockerfile
old mode 100644
new mode 100755
index 037bd5ce..1d70d0af
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,7 +1,8 @@
-FROM alpine:3.18 as builder
+FROM alpine:edge as builder
LABEL stage=go-builder
WORKDIR /app/
COPY ./ ./
+ENV CGO_CFLAGS="-D_LARGEFILE64_SOURCE"
RUN apk add --no-cache bash curl gcc git go musl-dev; \
bash build.sh release docker
diff --git a/Dockerfile-host b/Dockerfile-host
old mode 100644
new mode 100755
index 768ccbca..dc5a8139
--- a/Dockerfile-host
+++ b/Dockerfile-host
@@ -1,7 +1,8 @@
-FROM alpine:3.18 as builder
+FROM alpine:edge as builder
LABEL stage=go-builder
WORKDIR /app/
COPY ./ ./
+ENV CGO_CFLAGS="-D_LARGEFILE64_SOURCE"
RUN apk add --no-cache bash curl gcc git go musl-dev; \
bash build.sh release docker
diff --git a/Dockerfile-new b/Dockerfile-new
old mode 100644
new mode 100755
diff --git a/LICENSE b/LICENSE
old mode 100644
new mode 100755
diff --git a/README.md b/README.md
index a1d6d8c2..5f4d8ef4 100755
--- a/README.md
+++ b/README.md
@@ -43,7 +43,7 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
## Features
-- [x] Multiple storage
+- [x] Multiple storages
- [x] Local storage
- [x] [Aliyundrive](https://www.aliyundrive.com/)
- [x] OneDrive / Sharepoint ([global](https://www.office.com/), [cn](https://portal.partner.microsoftonline.cn),de,us)
@@ -86,7 +86,7 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
- [x] Protected routes (password protection and authentication)
- [x] WebDav (see https://alist.nn.ci/guide/webdav.html for details)
- [x] [Docker Deploy](https://hub.docker.com/r/xhofe/alist)
-- [x] Cloudflare workers proxy
+- [x] Cloudflare Workers proxy
- [x] File/Folder package download
- [x] Web upload(Can allow visitors to upload), delete, mkdir, rename, move and copy
- [x] Offline download
@@ -103,7 +103,7 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
## Discussion
-Please go to our [discussion forum](https://github.com/Xhofe/alist/discussions) for general questions, **issues are for bug reports and feature request only.**
+Please go to our [discussion forum](https://github.com/Xhofe/alist/discussions) for general questions, **issues are for bug reports and feature requests only.**
## Sponsor
@@ -112,22 +112,22 @@ https://alist.nn.ci/guide/sponsor.html
### Special sponsors
-- [亚洲云 - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商](https://www.asiayun.com/aff/QQCOOQKZ) (sponsored Chinese API server)
-- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.pw/)
-- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
+- [VidHub](https://okaapps.com/product/1659622164?ref=alist) - An elegant cloud video player within the Apple ecosystem. Support for iPhone, iPad, Mac, and Apple TV.
+- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (sponsored Chinese API server)
+- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
## Contributors
Thanks goes to these wonderful people:
-[](https://github.com/alist-org/alist/graphs/contributors)
+[](https://github.com/alist-org/alist/graphs/contributors)
## License
The `AList` is open-source software licensed under the AGPL-3.0 license.
## Disclaimer
-- This program is a free and open source project. It is designed to share files on the network disk, which is convenient for downloading and learning golang. Please abide by relevant laws and regulations when using it, and do not abuse it;
+- This program is a free and open source project. It is designed to share files on the network disk, which is convenient for downloading and learning Golang. Please abide by relevant laws and regulations when using it, and do not abuse it;
- This program is implemented by calling the official sdk/interface, without destroying the official interface behavior;
- This program only does 302 redirect/traffic forwarding, and does not intercept, store, or tamper with any user data;
- Before using this program, you should understand and bear the corresponding risks, including but not limited to account ban, download speed limit, etc., which is none of this program's business;
diff --git a/README_cn.md b/README_cn.md
old mode 100644
new mode 100755
index 9374a65b..6af8aeaf
--- a/README_cn.md
+++ b/README_cn.md
@@ -110,15 +110,15 @@ AList 是一个开源软件,如果你碰巧喜欢这个项目,并希望我
### 特别赞助
-- [亚洲云 - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商](https://www.asiayun.com/aff/QQCOOQKZ) (国内API服务器赞助)
-- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.pw/)
-- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
+- [VidHub](https://zh.okaapps.com/product/1659622164?ref=alist) - 苹果生态下优雅的网盘视频播放器,iPhone,iPad,Mac,Apple TV全平台支持。
+- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (国内API服务器赞助)
+- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
## 贡献者
Thanks goes to these wonderful people:
-[](https://github.com/alist-org/alist/graphs/contributors)
+[](https://github.com/alist-org/alist/graphs/contributors)
## 许可
diff --git a/README_ja.md b/README_ja.md
old mode 100644
new mode 100755
index 645247e4..b873947f
--- a/README_ja.md
+++ b/README_ja.md
@@ -112,15 +112,15 @@ https://alist.nn.ci/guide/sponsor.html
### スペシャルスポンサー
-- [亚洲云 - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商](https://www.asiayun.com/aff/QQCOOQKZ) (sponsored Chinese API server)
-- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.pw/)
-- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
+- [VidHub](https://okaapps.com/product/1659622164?ref=alist) - An elegant cloud video player within the Apple ecosystem. Support for iPhone, iPad, Mac, and Apple TV.
+- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (sponsored Chinese API server)
+- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
## コントリビューター
これらの素晴らしい人々に感謝します:
-[](https://github.com/alist-org/alist/graphs/contributors)
+[](https://github.com/alist-org/alist/graphs/contributors)
## ライセンス
diff --git a/build.sh b/build.sh
old mode 100644
new mode 100755
index a666af48..280feb9c
--- a/build.sh
+++ b/build.sh
@@ -9,7 +9,7 @@ if [ "$1" = "dev" ]; then
webVersion="dev"
else
version=$(git describe --abbrev=0 --tags)
- webVersion=3.25.1
+ webVersion=3.29.0
fi
echo "backend version: $version"
@@ -34,7 +34,7 @@ FetchWebDev() {
}
FetchWebRelease() {
- curl -L https://github.com/alist-org/alist-web/releases/download/3.25.1/dist.tar.gz -o dist.tar.gz
+ curl -L https://github.com/alist-org/alist-web/releases/download/3.29.0/dist.tar.gz -o dist.tar.gz
tar -zxvf dist.tar.gz
rm -rf public/dist
mv -f dist public
diff --git a/cmd/admin.go b/cmd/admin.go
old mode 100644
new mode 100755
index 5dab2b25..3421e5dd
--- a/cmd/admin.go
+++ b/cmd/admin.go
@@ -19,6 +19,7 @@ var AdminCmd = &cobra.Command{
Short: "Show admin user's info and some operations about admin user's password",
Run: func(cmd *cobra.Command, args []string) {
Init()
+ defer Release()
admin, err := op.GetAdmin()
if err != nil {
utils.Log.Errorf("failed get admin user: %+v", err)
@@ -57,6 +58,7 @@ var ShowTokenCmd = &cobra.Command{
Short: "Show admin token",
Run: func(cmd *cobra.Command, args []string) {
Init()
+ defer Release()
token := setting.GetStr(conf.Token)
utils.Log.Infof("Admin token: %s", token)
},
@@ -64,6 +66,7 @@ var ShowTokenCmd = &cobra.Command{
func setAdminPassword(pwd string) {
Init()
+ defer Release()
admin, err := op.GetAdmin()
if err != nil {
utils.Log.Errorf("failed get admin user: %+v", err)
diff --git a/cmd/cancel2FA.go b/cmd/cancel2FA.go
old mode 100644
new mode 100755
index 888b0f06..08fafee8
--- a/cmd/cancel2FA.go
+++ b/cmd/cancel2FA.go
@@ -15,6 +15,7 @@ var Cancel2FACmd = &cobra.Command{
Short: "Delete 2FA of admin user",
Run: func(cmd *cobra.Command, args []string) {
Init()
+ defer Release()
admin, err := op.GetAdmin()
if err != nil {
utils.Log.Errorf("failed to get admin user: %+v", err)
diff --git a/cmd/common.go b/cmd/common.go
old mode 100644
new mode 100755
index c60d8731..b4a7081c
--- a/cmd/common.go
+++ b/cmd/common.go
@@ -7,6 +7,7 @@ import (
"github.com/alist-org/alist/v3/internal/bootstrap"
"github.com/alist-org/alist/v3/internal/bootstrap/data"
+ "github.com/alist-org/alist/v3/internal/db"
"github.com/alist-org/alist/v3/pkg/utils"
log "github.com/sirupsen/logrus"
)
@@ -19,6 +20,10 @@ func Init() {
bootstrap.InitIndex()
}
+func Release() {
+ db.Close()
+}
+
var pid = -1
var pidFile string
diff --git a/cmd/flags/config.go b/cmd/flags/config.go
old mode 100644
new mode 100755
diff --git a/cmd/lang.go b/cmd/lang.go
old mode 100644
new mode 100755
diff --git a/cmd/restart.go b/cmd/restart.go
old mode 100644
new mode 100755
diff --git a/cmd/root.go b/cmd/root.go
old mode 100644
new mode 100755
index 297eb7f8..6bd82b7a
--- a/cmd/root.go
+++ b/cmd/root.go
@@ -5,6 +5,8 @@ import (
"os"
"github.com/alist-org/alist/v3/cmd/flags"
+ _ "github.com/alist-org/alist/v3/drivers"
+ _ "github.com/alist-org/alist/v3/internal/offline_download"
"github.com/spf13/cobra"
)
diff --git a/cmd/server.go b/cmd/server.go
old mode 100644
new mode 100755
index 461389b1..d03a9d80
--- a/cmd/server.go
+++ b/cmd/server.go
@@ -2,6 +2,7 @@ package cmd
import (
"context"
+ "errors"
"fmt"
"net"
"net/http"
@@ -13,7 +14,6 @@ import (
"time"
"github.com/alist-org/alist/v3/cmd/flags"
- _ "github.com/alist-org/alist/v3/drivers"
"github.com/alist-org/alist/v3/internal/bootstrap"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/pkg/utils"
@@ -35,9 +35,9 @@ the address is defined in config file`,
utils.Log.Infof("delayed start for %d seconds", conf.Conf.DelayedStart)
time.Sleep(time.Duration(conf.Conf.DelayedStart) * time.Second)
}
- bootstrap.InitAria2()
- bootstrap.InitQbittorrent()
+ bootstrap.InitOfflineDownloadTools()
bootstrap.LoadStorages()
+ bootstrap.InitTaskManager()
if !flags.Debug && !flags.Dev {
gin.SetMode(gin.ReleaseMode)
}
@@ -51,7 +51,7 @@ the address is defined in config file`,
httpSrv = &http.Server{Addr: httpBase, Handler: r}
go func() {
err := httpSrv.ListenAndServe()
- if err != nil && err != http.ErrServerClosed {
+ if err != nil && !errors.Is(err, http.ErrServerClosed) {
utils.Log.Fatalf("failed to start http: %s", err.Error())
}
}()
@@ -62,7 +62,7 @@ the address is defined in config file`,
httpsSrv = &http.Server{Addr: httpsBase, Handler: r}
go func() {
err := httpsSrv.ListenAndServeTLS(conf.Conf.Scheme.CertFile, conf.Conf.Scheme.KeyFile)
- if err != nil && err != http.ErrServerClosed {
+ if err != nil && !errors.Is(err, http.ErrServerClosed) {
utils.Log.Fatalf("failed to start https: %s", err.Error())
}
}()
@@ -86,7 +86,7 @@ the address is defined in config file`,
}
}
err = unixSrv.Serve(listener)
- if err != nil && err != http.ErrServerClosed {
+ if err != nil && !errors.Is(err, http.ErrServerClosed) {
utils.Log.Fatalf("failed to start unix: %s", err.Error())
}
}()
@@ -100,7 +100,7 @@ the address is defined in config file`,
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
<-quit
utils.Log.Println("Shutdown server...")
-
+ Release()
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second)
defer cancel()
var wg sync.WaitGroup
diff --git a/cmd/start.go b/cmd/start.go
old mode 100644
new mode 100755
diff --git a/cmd/stop.go b/cmd/stop.go
old mode 100644
new mode 100755
diff --git a/cmd/storage.go b/cmd/storage.go
old mode 100644
new mode 100755
index 23b834c8..eabb5b40
--- a/cmd/storage.go
+++ b/cmd/storage.go
@@ -31,6 +31,7 @@ var disableStorageCmd = &cobra.Command{
}
mountPath := args[0]
Init()
+ defer Release()
storage, err := db.GetStorageByMountPath(mountPath)
if err != nil {
utils.Log.Errorf("failed to query storage: %+v", err)
@@ -89,6 +90,7 @@ var listStorageCmd = &cobra.Command{
Short: "List all storages",
Run: func(cmd *cobra.Command, args []string) {
Init()
+ defer Release()
storages, _, err := db.GetStorages(1, -1)
if err != nil {
utils.Log.Errorf("failed to query storages: %+v", err)
diff --git a/cmd/user.go b/cmd/user.go
old mode 100644
new mode 100755
diff --git a/cmd/version.go b/cmd/version.go
old mode 100644
new mode 100755
diff --git a/docker-compose.yml b/docker-compose.yml
old mode 100644
new mode 100755
diff --git a/drivers/115/driver.go b/drivers/115/driver.go
old mode 100644
new mode 100755
index b9554c89..15f6b408
--- a/drivers/115/driver.go
+++ b/drivers/115/driver.go
@@ -2,19 +2,22 @@ package _115
import (
"context"
- "os"
+ "strings"
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
+ "github.com/alist-org/alist/v3/pkg/http_range"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/pkg/errors"
+ "golang.org/x/time/rate"
)
type Pan115 struct {
model.Storage
Addition
- client *driver115.Pan115Client
+ client *driver115.Pan115Client
+ limiter *rate.Limiter
}
func (d *Pan115) Config() driver.Config {
@@ -26,29 +29,42 @@ func (d *Pan115) GetAddition() driver.Additional {
}
func (d *Pan115) Init(ctx context.Context) error {
+ if d.LimitRate > 0 {
+ d.limiter = rate.NewLimiter(rate.Limit(d.LimitRate), 1)
+ }
return d.login()
}
+func (d *Pan115) WaitLimit(ctx context.Context) error {
+ if d.limiter != nil {
+ return d.limiter.Wait(ctx)
+ }
+ return nil
+}
+
func (d *Pan115) Drop(ctx context.Context) error {
return nil
}
func (d *Pan115) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
+ if err := d.WaitLimit(ctx); err != nil {
+ return nil, err
+ }
files, err := d.getFiles(dir.GetID())
if err != nil && !errors.Is(err, driver115.ErrNotExist) {
return nil, err
}
- return utils.SliceConvert(files, func(src driver115.File) (model.Obj, error) {
- return src, nil
+ return utils.SliceConvert(files, func(src FileObj) (model.Obj, error) {
+ return &src, nil
})
}
func (d *Pan115) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
+ if err := d.WaitLimit(ctx); err != nil {
+ return nil, err
+ }
downloadInfo, err := d.client.
- SetUserAgent(driver115.UA115Browser).
- Download(file.(driver115.File).PickCode)
- // recover for upload
- d.client.SetUserAgent(driver115.UA115Desktop)
+ DownloadWithUA(file.(*FileObj).PickCode, driver115.UA115Browser)
if err != nil {
return nil, err
}
@@ -60,6 +76,9 @@ func (d *Pan115) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
}
func (d *Pan115) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
+ if err := d.WaitLimit(ctx); err != nil {
+ return err
+ }
if _, err := d.client.Mkdir(parentDir.GetID(), dirName); err != nil {
return err
}
@@ -67,31 +86,99 @@ func (d *Pan115) MakeDir(ctx context.Context, parentDir model.Obj, dirName strin
}
func (d *Pan115) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
+ if err := d.WaitLimit(ctx); err != nil {
+ return err
+ }
return d.client.Move(dstDir.GetID(), srcObj.GetID())
}
func (d *Pan115) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
+ if err := d.WaitLimit(ctx); err != nil {
+ return err
+ }
return d.client.Rename(srcObj.GetID(), newName)
}
func (d *Pan115) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
+ if err := d.WaitLimit(ctx); err != nil {
+ return err
+ }
return d.client.Copy(dstDir.GetID(), srcObj.GetID())
}
func (d *Pan115) Remove(ctx context.Context, obj model.Obj) error {
+ if err := d.WaitLimit(ctx); err != nil {
+ return err
+ }
return d.client.Delete(obj.GetID())
}
func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
- tempFile, err := utils.CreateTempFile(stream.GetReadCloser(), stream.GetSize())
+ if err := d.WaitLimit(ctx); err != nil {
+ return err
+ }
+
+ var (
+ fastInfo *driver115.UploadInitResp
+ dirID = dstDir.GetID()
+ )
+
+ if ok, err := d.client.UploadAvailable(); err != nil || !ok {
+ return err
+ }
+ if stream.GetSize() > d.client.UploadMetaInfo.SizeLimit {
+ return driver115.ErrUploadTooLarge
+ }
+ //if digest, err = d.client.GetDigestResult(stream); err != nil {
+ // return err
+ //}
+
+ const PreHashSize int64 = 128 * utils.KB
+ hashSize := PreHashSize
+ if stream.GetSize() < PreHashSize {
+ hashSize = stream.GetSize()
+ }
+ reader, err := stream.RangeRead(http_range.Range{Start: 0, Length: hashSize})
+ if err != nil {
+ return err
+ }
+ preHash, err := utils.HashReader(utils.SHA1, reader)
if err != nil {
return err
}
- defer func() {
- _ = tempFile.Close()
- _ = os.Remove(tempFile.Name())
- }()
- return d.client.UploadFastOrByMultipart(dstDir.GetID(), stream.GetName(), stream.GetSize(), tempFile)
+ preHash = strings.ToUpper(preHash)
+ fullHash := stream.GetHash().GetHash(utils.SHA1)
+ if len(fullHash) <= 0 {
+ tmpF, err := stream.CacheFullInTempFile()
+ if err != nil {
+ return err
+ }
+ fullHash, err = utils.HashFile(utils.SHA1, tmpF)
+ if err != nil {
+ return err
+ }
+ }
+ fullHash = strings.ToUpper(fullHash)
+
+ // rapid-upload
+ // note that 115 add timeout for rapid-upload,
+ // and "sig invalid" err is thrown even when the hash is correct after timeout.
+ if fastInfo, err = d.rapidUpload(stream.GetSize(), stream.GetName(), dirID, preHash, fullHash, stream); err != nil {
+ return err
+ }
+ if matched, err := fastInfo.Ok(); err != nil {
+ return err
+ } else if matched {
+ return nil
+ }
+
+ // 闪传失败,上传
+ if stream.GetSize() <= utils.KB { // 文件大小小于1KB,改用普通模式上传
+ return d.client.UploadByOSS(&fastInfo.UploadOSSParams, stream, dirID)
+ }
+ // 分片上传
+ return d.UploadByMultipart(&fastInfo.UploadOSSParams, stream.GetSize(), stream, dirID)
+
}
var _ driver.Driver = (*Pan115)(nil)
diff --git a/drivers/115/meta.go b/drivers/115/meta.go
old mode 100644
new mode 100755
index 0e614ced..16ec22cd
--- a/drivers/115/meta.go
+++ b/drivers/115/meta.go
@@ -6,17 +6,18 @@ import (
)
type Addition struct {
- Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
- QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
- PageSize int64 `json:"page_size" type:"number" default:"56" help:"list api per page size of 115 driver"`
+ Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
+ QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
+ PageSize int64 `json:"page_size" type:"number" default:"56" help:"list api per page size of 115 driver"`
+ LimitRate float64 `json:"limit_rate" type:"number" default:"2" help:"limit all api request rate (1r/[limit_rate]s)"`
driver.RootID
}
var config = driver.Config{
- Name: "115 Cloud",
- DefaultRoot: "0",
- OnlyProxy: true,
- OnlyLocal: true,
+ Name: "115 Cloud",
+ DefaultRoot: "0",
+ OnlyProxy: true,
+ //OnlyLocal: true,
NoOverwriteUpload: true,
}
diff --git a/drivers/115/types.go b/drivers/115/types.go
old mode 100644
new mode 100755
index 25492bdc..830e347b
--- a/drivers/115/types.go
+++ b/drivers/115/types.go
@@ -3,6 +3,20 @@ package _115
import (
"github.com/SheltonZhu/115driver/pkg/driver"
"github.com/alist-org/alist/v3/internal/model"
+ "github.com/alist-org/alist/v3/pkg/utils"
+ "time"
)
-var _ model.Obj = (*driver.File)(nil)
+var _ model.Obj = (*FileObj)(nil)
+
+type FileObj struct {
+ driver.File
+}
+
+func (f *FileObj) CreateTime() time.Time {
+ return f.File.CreateTime
+}
+
+func (f *FileObj) GetHash() utils.HashInfo {
+ return utils.NewHashInfo(utils.SHA1, f.Sha1)
+}
diff --git a/drivers/115/util.go b/drivers/115/util.go
old mode 100644
new mode 100755
index cc04baaf..8e638d79
--- a/drivers/115/util.go
+++ b/drivers/115/util.go
@@ -1,28 +1,42 @@
package _115
import (
+ "bytes"
"crypto/tls"
+ "encoding/json"
"fmt"
+ "github.com/alist-org/alist/v3/internal/model"
+ "github.com/alist-org/alist/v3/pkg/http_range"
+ "github.com/alist-org/alist/v3/pkg/utils"
+ "github.com/aliyun/aliyun-oss-go-sdk/oss"
+ "github.com/orzogc/fake115uploader/cipher"
+ "io"
+ "net/url"
+ "path/filepath"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
- "github.com/SheltonZhu/115driver/pkg/driver"
+ driver115 "github.com/SheltonZhu/115driver/pkg/driver"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/pkg/errors"
)
-var UserAgent = driver.UA115Desktop
+var UserAgent = driver115.UA115Desktop
func (d *Pan115) login() error {
var err error
- opts := []driver.Option{
- driver.UA(UserAgent),
- func(c *driver.Pan115Client) {
+ opts := []driver115.Option{
+ driver115.UA(UserAgent),
+ func(c *driver115.Pan115Client) {
c.Client.SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
},
}
- d.client = driver.New(opts...)
- cr := &driver.Credential{}
+ d.client = driver115.New(opts...)
+ cr := &driver115.Credential{}
if d.Addition.QRCodeToken != "" {
- s := &driver.QRCodeSession{
+ s := &driver115.QRCodeSession{
UID: d.Addition.QRCodeToken,
}
if cr, err = d.client.QRCodeLogin(s); err != nil {
@@ -41,17 +55,367 @@ func (d *Pan115) login() error {
return d.client.LoginCheck()
}
-func (d *Pan115) getFiles(fileId string) ([]driver.File, error) {
- res := make([]driver.File, 0)
+func (d *Pan115) getFiles(fileId string) ([]FileObj, error) {
+ res := make([]FileObj, 0)
if d.PageSize <= 0 {
- d.PageSize = driver.FileListLimit
+ d.PageSize = driver115.FileListLimit
}
files, err := d.client.ListWithLimit(fileId, d.PageSize)
if err != nil {
return nil, err
}
for _, file := range *files {
- res = append(res, file)
+ res = append(res, FileObj{file})
}
return res, nil
}
+
+const (
+ appVer = "2.0.3.6"
+)
+
+func (d *Pan115) rapidUpload(fileSize int64, fileName, dirID, preID, fileID string, stream model.FileStreamer) (*driver115.UploadInitResp, error) {
+ var (
+ ecdhCipher *cipher.EcdhCipher
+ encrypted []byte
+ decrypted []byte
+ encodedToken string
+ err error
+ target = "U_1_" + dirID
+ bodyBytes []byte
+ result = driver115.UploadInitResp{}
+ fileSizeStr = strconv.FormatInt(fileSize, 10)
+ )
+ if ecdhCipher, err = cipher.NewEcdhCipher(); err != nil {
+ return nil, err
+ }
+
+ userID := strconv.FormatInt(d.client.UserID, 10)
+ form := url.Values{}
+ form.Set("appid", "0")
+ form.Set("appversion", appVer)
+ form.Set("userid", userID)
+ form.Set("filename", fileName)
+ form.Set("filesize", fileSizeStr)
+ form.Set("fileid", fileID)
+ form.Set("target", target)
+ form.Set("sig", d.client.GenerateSignature(fileID, target))
+
+ signKey, signVal := "", ""
+ for retry := true; retry; {
+ t := driver115.Now()
+
+ if encodedToken, err = ecdhCipher.EncodeToken(t.ToInt64()); err != nil {
+ return nil, err
+ }
+
+ params := map[string]string{
+ "k_ec": encodedToken,
+ }
+
+ form.Set("t", t.String())
+ form.Set("token", d.client.GenerateToken(fileID, preID, t.String(), fileSizeStr, signKey, signVal))
+ if signKey != "" && signVal != "" {
+ form.Set("sign_key", signKey)
+ form.Set("sign_val", signVal)
+ }
+ if encrypted, err = ecdhCipher.Encrypt([]byte(form.Encode())); err != nil {
+ return nil, err
+ }
+
+ req := d.client.NewRequest().
+ SetQueryParams(params).
+ SetBody(encrypted).
+ SetHeaderVerbatim("Content-Type", "application/x-www-form-urlencoded").
+ SetDoNotParseResponse(true)
+ resp, err := req.Post(driver115.ApiUploadInit)
+ if err != nil {
+ return nil, err
+ }
+ data := resp.RawBody()
+ defer data.Close()
+ if bodyBytes, err = io.ReadAll(data); err != nil {
+ return nil, err
+ }
+ if decrypted, err = ecdhCipher.Decrypt(bodyBytes); err != nil {
+ return nil, err
+ }
+ if err = driver115.CheckErr(json.Unmarshal(decrypted, &result), &result, resp); err != nil {
+ return nil, err
+ }
+ if result.Status == 7 {
+ // Update signKey & signVal
+ signKey = result.SignKey
+ signVal, err = UploadDigestRange(stream, result.SignCheck)
+ if err != nil {
+ return nil, err
+ }
+ } else {
+ retry = false
+ }
+ result.SHA1 = fileID
+ }
+
+ return &result, nil
+}
+
+func UploadDigestRange(stream model.FileStreamer, rangeSpec string) (result string, err error) {
+ var start, end int64
+ if _, err = fmt.Sscanf(rangeSpec, "%d-%d", &start, &end); err != nil {
+ return
+ }
+
+ length := end - start + 1
+ reader, err := stream.RangeRead(http_range.Range{Start: start, Length: length})
+ hashStr, err := utils.HashReader(utils.SHA1, reader)
+ if err != nil {
+ return "", err
+ }
+ result = strings.ToUpper(hashStr)
+ return
+}
+
+// UploadByMultipart upload by mutipart blocks
+func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize int64, stream model.FileStreamer, dirID string, opts ...driver115.UploadMultipartOption) error {
+ var (
+ chunks []oss.FileChunk
+ parts []oss.UploadPart
+ imur oss.InitiateMultipartUploadResult
+ ossClient *oss.Client
+ bucket *oss.Bucket
+ ossToken *driver115.UploadOSSTokenResp
+ err error
+ )
+
+ tmpF, err := stream.CacheFullInTempFile()
+ if err != nil {
+ return err
+ }
+
+ options := driver115.DefalutUploadMultipartOptions()
+ if len(opts) > 0 {
+ for _, f := range opts {
+ f(options)
+ }
+ }
+
+ if ossToken, err = d.client.GetOSSToken(); err != nil {
+ return err
+ }
+
+ if ossClient, err = oss.New(driver115.OSSEndpoint, ossToken.AccessKeyID, ossToken.AccessKeySecret); err != nil {
+ return err
+ }
+
+ if bucket, err = ossClient.Bucket(params.Bucket); err != nil {
+ return err
+ }
+
+ // ossToken一小时后就会失效,所以每50分钟重新获取一次
+ ticker := time.NewTicker(options.TokenRefreshTime)
+ defer ticker.Stop()
+ // 设置超时
+ timeout := time.NewTimer(options.Timeout)
+
+ if chunks, err = SplitFile(fileSize); err != nil {
+ return err
+ }
+
+ if imur, err = bucket.InitiateMultipartUpload(params.Object,
+ oss.SetHeader(driver115.OssSecurityTokenHeaderName, ossToken.SecurityToken),
+ oss.UserAgentHeader(driver115.OSSUserAgent),
+ ); err != nil {
+ return err
+ }
+
+ wg := sync.WaitGroup{}
+ wg.Add(len(chunks))
+
+ chunksCh := make(chan oss.FileChunk)
+ errCh := make(chan error)
+ UploadedPartsCh := make(chan oss.UploadPart)
+ quit := make(chan struct{})
+
+ // producer
+ go chunksProducer(chunksCh, chunks)
+ go func() {
+ wg.Wait()
+ quit <- struct{}{}
+ }()
+
+ // consumers
+ for i := 0; i < options.ThreadsNum; i++ {
+ go func(threadId int) {
+ defer func() {
+ if r := recover(); r != nil {
+ errCh <- fmt.Errorf("recovered in %v", r)
+ }
+ }()
+ for chunk := range chunksCh {
+ var part oss.UploadPart // 出现错误就继续尝试,共尝试3次
+ for retry := 0; retry < 3; retry++ {
+ select {
+ case <-ticker.C:
+ if ossToken, err = d.client.GetOSSToken(); err != nil { // 到时重新获取ossToken
+ errCh <- errors.Wrap(err, "刷新token时出现错误")
+ }
+ default:
+ }
+
+ buf := make([]byte, chunk.Size)
+ if _, err = tmpF.ReadAt(buf, chunk.Offset); err != nil && !errors.Is(err, io.EOF) {
+ continue
+ }
+
+ b := bytes.NewBuffer(buf)
+ if part, err = bucket.UploadPart(imur, b, chunk.Size, chunk.Number, driver115.OssOption(params, ossToken)...); err == nil {
+ break
+ }
+ }
+ if err != nil {
+ errCh <- errors.Wrap(err, fmt.Sprintf("上传 %s 的第%d个分片时出现错误:%v", stream.GetName(), chunk.Number, err))
+ }
+ UploadedPartsCh <- part
+ }
+ }(i)
+ }
+
+ go func() {
+ for part := range UploadedPartsCh {
+ parts = append(parts, part)
+ wg.Done()
+ }
+ }()
+LOOP:
+ for {
+ select {
+ case <-ticker.C:
+ // 到时重新获取ossToken
+ if ossToken, err = d.client.GetOSSToken(); err != nil {
+ return err
+ }
+ case <-quit:
+ break LOOP
+ case <-errCh:
+ return err
+ case <-timeout.C:
+ return fmt.Errorf("time out")
+ }
+ }
+
+ // EOF错误是xml的Unmarshal导致的,响应其实是json格式,所以实际上上传是成功的
+ if _, err = bucket.CompleteMultipartUpload(imur, parts, driver115.OssOption(params, ossToken)...); err != nil && !errors.Is(err, io.EOF) {
+ // 当文件名含有 &< 这两个字符之一时响应的xml解析会出现错误,实际上上传是成功的
+ if filename := filepath.Base(stream.GetName()); !strings.ContainsAny(filename, "&<") {
+ return err
+ }
+ }
+ return d.checkUploadStatus(dirID, params.SHA1)
+}
+func chunksProducer(ch chan oss.FileChunk, chunks []oss.FileChunk) {
+ for _, chunk := range chunks {
+ ch <- chunk
+ }
+}
+func (d *Pan115) checkUploadStatus(dirID, sha1 string) error {
+ // 验证上传是否成功
+ req := d.client.NewRequest().ForceContentType("application/json;charset=UTF-8")
+ opts := []driver115.GetFileOptions{
+ driver115.WithOrder(driver115.FileOrderByTime),
+ driver115.WithShowDirEnable(false),
+ driver115.WithAsc(false),
+ driver115.WithLimit(500),
+ }
+ fResp, err := driver115.GetFiles(req, dirID, opts...)
+ if err != nil {
+ return err
+ }
+ for _, fileInfo := range fResp.Files {
+ if fileInfo.Sha1 == sha1 {
+ return nil
+ }
+ }
+ return driver115.ErrUploadFailed
+}
+
+func SplitFile(fileSize int64) (chunks []oss.FileChunk, err error) {
+ for i := int64(1); i < 10; i++ {
+ if fileSize < i*utils.GB { // 文件大小小于iGB时分为i*1000片
+ if chunks, err = SplitFileByPartNum(fileSize, int(i*1000)); err != nil {
+ return
+ }
+ break
+ }
+ }
+ if fileSize > 9*utils.GB { // 文件大小大于9GB时分为10000片
+ if chunks, err = SplitFileByPartNum(fileSize, 10000); err != nil {
+ return
+ }
+ }
+ // 单个分片大小不能小于100KB
+ if chunks[0].Size < 100*utils.KB {
+ if chunks, err = SplitFileByPartSize(fileSize, 100*utils.KB); err != nil {
+ return
+ }
+ }
+ return
+}
+
+// SplitFileByPartNum splits big file into parts by the num of parts.
+// Split the file with specified parts count, returns the split result when error is nil.
+func SplitFileByPartNum(fileSize int64, chunkNum int) ([]oss.FileChunk, error) {
+ if chunkNum <= 0 || chunkNum > 10000 {
+ return nil, errors.New("chunkNum invalid")
+ }
+
+ if int64(chunkNum) > fileSize {
+ return nil, errors.New("oss: chunkNum invalid")
+ }
+
+ var chunks []oss.FileChunk
+ var chunk = oss.FileChunk{}
+ var chunkN = (int64)(chunkNum)
+ for i := int64(0); i < chunkN; i++ {
+ chunk.Number = int(i + 1)
+ chunk.Offset = i * (fileSize / chunkN)
+ if i == chunkN-1 {
+ chunk.Size = fileSize/chunkN + fileSize%chunkN
+ } else {
+ chunk.Size = fileSize / chunkN
+ }
+ chunks = append(chunks, chunk)
+ }
+
+ return chunks, nil
+}
+
+// SplitFileByPartSize splits big file into parts by the size of parts.
+// Splits the file by the part size. Returns the FileChunk when error is nil.
+func SplitFileByPartSize(fileSize int64, chunkSize int64) ([]oss.FileChunk, error) {
+ if chunkSize <= 0 {
+ return nil, errors.New("chunkSize invalid")
+ }
+
+ var chunkN = fileSize / chunkSize
+ if chunkN >= 10000 {
+ return nil, errors.New("Too many parts, please increase part size")
+ }
+
+ var chunks []oss.FileChunk
+ var chunk = oss.FileChunk{}
+ for i := int64(0); i < chunkN; i++ {
+ chunk.Number = int(i + 1)
+ chunk.Offset = i * chunkSize
+ chunk.Size = chunkSize
+ chunks = append(chunks, chunk)
+ }
+
+ if fileSize%chunkSize > 0 {
+ chunk.Number = len(chunks) + 1
+ chunk.Offset = int64(len(chunks)) * chunkSize
+ chunk.Size = fileSize % chunkSize
+ chunks = append(chunks, chunk)
+ }
+
+ return chunks, nil
+}
diff --git a/drivers/115_share/driver.go b/drivers/115_share/driver.go
old mode 100644
new mode 100755
diff --git a/drivers/115_share/meta.go b/drivers/115_share/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/115_share/utils.go b/drivers/115_share/utils.go
old mode 100644
new mode 100755
diff --git a/drivers/123/driver.go b/drivers/123/driver.go
old mode 100644
new mode 100755
index bc1758a1..6f7fec1b
--- a/drivers/123/driver.go
+++ b/drivers/123/driver.go
@@ -6,11 +6,6 @@ import (
"encoding/base64"
"encoding/hex"
"fmt"
- "io"
- "net/http"
- "net/url"
- "os"
-
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
@@ -22,6 +17,9 @@ import (
"github.com/aws/aws-sdk-go/service/s3/s3manager"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
+ "io"
+ "net/http"
+ "net/url"
)
type Pan123 struct {
@@ -184,13 +182,12 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
// const DEFAULT int64 = 10485760
h := md5.New()
// need to calculate md5 of the full content
- tempFile, err := utils.CreateTempFile(stream.GetReadCloser(), stream.GetSize())
+ tempFile, err := stream.CacheFullInTempFile()
if err != nil {
return err
}
defer func() {
_ = tempFile.Close()
- _ = os.Remove(tempFile.Name())
}()
if _, err = io.Copy(h, tempFile); err != nil {
return err
diff --git a/drivers/123/meta.go b/drivers/123/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/123/types.go b/drivers/123/types.go
old mode 100644
new mode 100755
index bc7cbf3d..b79be12e
--- a/drivers/123/types.go
+++ b/drivers/123/types.go
@@ -1,6 +1,7 @@
package _123
import (
+ "github.com/alist-org/alist/v3/pkg/utils"
"net/url"
"path"
"strconv"
@@ -21,6 +22,14 @@ type File struct {
DownloadUrl string `json:"DownloadUrl"`
}
+func (f File) CreateTime() time.Time {
+ return f.UpdateAt
+}
+
+func (f File) GetHash() utils.HashInfo {
+ return utils.HashInfo{}
+}
+
func (f File) GetPath() string {
return ""
}
diff --git a/drivers/123/upload.go b/drivers/123/upload.go
old mode 100644
new mode 100755
index ae28d6aa..6f6221f1
--- a/drivers/123/upload.go
+++ b/drivers/123/upload.go
@@ -107,7 +107,7 @@ func (d *Pan123) newUpload(ctx context.Context, upReq *UploadResp, file model.Fi
if err != nil {
return err
}
- up(j * 100 / chunkCount)
+ up(float64(j) * 100 / float64(chunkCount))
}
}
// complete s3 upload
diff --git a/drivers/123/util.go b/drivers/123/util.go
old mode 100644
new mode 100755
diff --git a/drivers/123_link/driver.go b/drivers/123_link/driver.go
new file mode 100755
index 00000000..46cdcbae
--- /dev/null
+++ b/drivers/123_link/driver.go
@@ -0,0 +1,77 @@
+package _123Link
+
+import (
+ "context"
+ stdpath "path"
+ "time"
+
+ "github.com/alist-org/alist/v3/internal/driver"
+ "github.com/alist-org/alist/v3/internal/errs"
+ "github.com/alist-org/alist/v3/internal/model"
+ "github.com/alist-org/alist/v3/pkg/utils"
+)
+
+type Pan123Link struct {
+ model.Storage
+ Addition
+ root *Node
+}
+
+func (d *Pan123Link) Config() driver.Config {
+ return config
+}
+
+func (d *Pan123Link) GetAddition() driver.Additional {
+ return &d.Addition
+}
+
+func (d *Pan123Link) Init(ctx context.Context) error {
+ node, err := BuildTree(d.OriginURLs)
+ if err != nil {
+ return err
+ }
+ node.calSize()
+ d.root = node
+ return nil
+}
+
+func (d *Pan123Link) Drop(ctx context.Context) error {
+ return nil
+}
+
+func (d *Pan123Link) Get(ctx context.Context, path string) (model.Obj, error) {
+ node := GetNodeFromRootByPath(d.root, path)
+ return nodeToObj(node, path)
+}
+
+func (d *Pan123Link) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
+ node := GetNodeFromRootByPath(d.root, dir.GetPath())
+ if node == nil {
+ return nil, errs.ObjectNotFound
+ }
+ if node.isFile() {
+ return nil, errs.NotFolder
+ }
+ return utils.SliceConvert(node.Children, func(node *Node) (model.Obj, error) {
+ return nodeToObj(node, stdpath.Join(dir.GetPath(), node.Name))
+ })
+}
+
+func (d *Pan123Link) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
+ node := GetNodeFromRootByPath(d.root, file.GetPath())
+ if node == nil {
+ return nil, errs.ObjectNotFound
+ }
+ if node.isFile() {
+ signUrl, err := SignURL(node.Url, d.PrivateKey, d.UID, time.Duration(d.ValidDuration)*time.Minute)
+ if err != nil {
+ return nil, err
+ }
+ return &model.Link{
+ URL: signUrl,
+ }, nil
+ }
+ return nil, errs.NotFile
+}
+
+var _ driver.Driver = (*Pan123Link)(nil)
diff --git a/drivers/123_link/meta.go b/drivers/123_link/meta.go
new file mode 100755
index 00000000..9f357628
--- /dev/null
+++ b/drivers/123_link/meta.go
@@ -0,0 +1,23 @@
+package _123Link
+
+import (
+ "github.com/alist-org/alist/v3/internal/driver"
+ "github.com/alist-org/alist/v3/internal/op"
+)
+
+type Addition struct {
+ OriginURLs string `json:"origin_urls" type:"text" required:"true" default:"https://vip.123pan.com/29/folder/file.mp3" help:"structure:FolderName:\n [FileSize:][Modified:]Url"`
+ PrivateKey string `json:"private_key"`
+ UID uint64 `json:"uid" type:"number"`
+ ValidDuration int64 `json:"valid_duration" type:"number" default:"30" help:"minutes"`
+}
+
+var config = driver.Config{
+ Name: "123PanLink",
+}
+
+func init() {
+ op.RegisterDriver(func() driver.Driver {
+ return &Pan123Link{}
+ })
+}
diff --git a/drivers/123_link/parse.go b/drivers/123_link/parse.go
new file mode 100755
index 00000000..8d6c3a13
--- /dev/null
+++ b/drivers/123_link/parse.go
@@ -0,0 +1,152 @@
+package _123Link
+
+import (
+ "fmt"
+ url2 "net/url"
+ stdpath "path"
+ "strconv"
+ "strings"
+ "time"
+)
+
+// build tree from text, text structure definition:
+/**
+ * FolderName:
+ * [FileSize:][Modified:]Url
+ */
+/**
+ * For example:
+ * folder1:
+ * name1:url1
+ * url2
+ * folder2:
+ * url3
+ * url4
+ * url5
+ * folder3:
+ * url6
+ * url7
+ * url8
+ */
+// if there are no name, use the last segment of url as name
+func BuildTree(text string) (*Node, error) {
+ lines := strings.Split(text, "\n")
+ var root = &Node{Level: -1, Name: "root"}
+ stack := []*Node{root}
+ for _, line := range lines {
+ // calculate indent
+ indent := 0
+ for i := 0; i < len(line); i++ {
+ if line[i] != ' ' {
+ break
+ }
+ indent++
+ }
+ // if indent is not a multiple of 2, it is an error
+ if indent%2 != 0 {
+ return nil, fmt.Errorf("the line '%s' is not a multiple of 2", line)
+ }
+ // calculate level
+ level := indent / 2
+ line = strings.TrimSpace(line[indent:])
+ // if the line is empty, skip
+ if line == "" {
+ continue
+ }
+ // if level isn't greater than the level of the top of the stack
+ // it is not the child of the top of the stack
+ for level <= stack[len(stack)-1].Level {
+ // pop the top of the stack
+ stack = stack[:len(stack)-1]
+ }
+ // if the line is a folder
+ if isFolder(line) {
+ // create a new node
+ node := &Node{
+ Level: level,
+ Name: strings.TrimSuffix(line, ":"),
+ }
+ // add the node to the top of the stack
+ stack[len(stack)-1].Children = append(stack[len(stack)-1].Children, node)
+ // push the node to the stack
+ stack = append(stack, node)
+ } else {
+ // if the line is a file
+ // create a new node
+ node, err := parseFileLine(line)
+ if err != nil {
+ return nil, err
+ }
+ node.Level = level
+ // add the node to the top of the stack
+ stack[len(stack)-1].Children = append(stack[len(stack)-1].Children, node)
+ }
+ }
+ return root, nil
+}
+
+func isFolder(line string) bool {
+ return strings.HasSuffix(line, ":")
+}
+
+// line definition:
+// [FileSize:][Modified:]Url
+func parseFileLine(line string) (*Node, error) {
+ // if there is no url, it is an error
+ if !strings.Contains(line, "http://") && !strings.Contains(line, "https://") {
+ return nil, fmt.Errorf("invalid line: %s, because url is required for file", line)
+ }
+ index := strings.Index(line, "http://")
+ if index == -1 {
+ index = strings.Index(line, "https://")
+ }
+ url := line[index:]
+ info := line[:index]
+ node := &Node{
+ Url: url,
+ }
+ name := stdpath.Base(url)
+ unescape, err := url2.PathUnescape(name)
+ if err == nil {
+ name = unescape
+ }
+ node.Name = name
+ if index > 0 {
+ if !strings.HasSuffix(info, ":") {
+ return nil, fmt.Errorf("invalid line: %s, because file info must end with ':'", line)
+ }
+ info = info[:len(info)-1]
+ if info == "" {
+ return nil, fmt.Errorf("invalid line: %s, because file name can't be empty", line)
+ }
+ infoParts := strings.Split(info, ":")
+ size, err := strconv.ParseInt(infoParts[0], 10, 64)
+ if err != nil {
+ return nil, fmt.Errorf("invalid line: %s, because file size must be an integer", line)
+ }
+ node.Size = size
+ if len(infoParts) > 1 {
+ modified, err := strconv.ParseInt(infoParts[1], 10, 64)
+ if err != nil {
+ return nil, fmt.Errorf("invalid line: %s, because file modified must be an unix timestamp", line)
+ }
+ node.Modified = modified
+ } else {
+ node.Modified = time.Now().Unix()
+ }
+ }
+ return node, nil
+}
+
+func splitPath(path string) []string {
+ if path == "/" {
+ return []string{"root"}
+ }
+ parts := strings.Split(path, "/")
+ parts[0] = "root"
+ return parts
+}
+
+func GetNodeFromRootByPath(root *Node, path string) *Node {
+ return root.getByPath(splitPath(path))
+}
diff --git a/drivers/123_link/types.go b/drivers/123_link/types.go
new file mode 100755
index 00000000..3fb040eb
--- /dev/null
+++ b/drivers/123_link/types.go
@@ -0,0 +1,66 @@
+package _123Link
+
+import (
+ "time"
+
+ "github.com/alist-org/alist/v3/internal/errs"
+ "github.com/alist-org/alist/v3/internal/model"
+)
+
+// Node is a node in the folder tree
+type Node struct {
+ Url string
+ Name string
+ Level int
+ Modified int64
+ Size int64
+ Children []*Node
+}
+
+func (node *Node) getByPath(paths []string) *Node {
+ if len(paths) == 0 || node == nil {
+ return nil
+ }
+ if node.Name != paths[0] {
+ return nil
+ }
+ if len(paths) == 1 {
+ return node
+ }
+ for _, child := range node.Children {
+ tmp := child.getByPath(paths[1:])
+ if tmp != nil {
+ return tmp
+ }
+ }
+ return nil
+}
+
+func (node *Node) isFile() bool {
+ return node.Url != ""
+}
+
+func (node *Node) calSize() int64 {
+ if node.isFile() {
+ return node.Size
+ }
+ var size int64 = 0
+ for _, child := range node.Children {
+ size += child.calSize()
+ }
+ node.Size = size
+ return size
+}
+
+func nodeToObj(node *Node, path string) (model.Obj, error) {
+ if node == nil {
+ return nil, errs.ObjectNotFound
+ }
+ return &model.Object{
+ Name: node.Name,
+ Size: node.Size,
+ Modified: time.Unix(node.Modified, 0),
+ IsFolder: !node.isFile(),
+ Path: path,
+ }, nil
+}
diff --git a/drivers/123_link/util.go b/drivers/123_link/util.go
new file mode 100755
index 00000000..29c9b54d
--- /dev/null
+++ b/drivers/123_link/util.go
@@ -0,0 +1,30 @@
+package _123Link
+
+import (
+ "crypto/md5"
+ "fmt"
+ "math/rand"
+ "net/url"
+ "time"
+)
+
+func SignURL(originURL, privateKey string, uid uint64, validDuration time.Duration) (newURL string, err error) {
+ if privateKey == "" {
+ return originURL, nil
+ }
+ var (
+ ts = time.Now().Add(validDuration).Unix() // 有效时间戳
+ rInt = rand.Int() // 随机正整数
+ objURL *url.URL
+ )
+ objURL, err = url.Parse(originURL)
+ if err != nil {
+ return "", err
+ }
+ authKey := fmt.Sprintf("%d-%d-%d-%x", ts, rInt, uid, md5.Sum([]byte(fmt.Sprintf("%s-%d-%d-%d-%s",
+ objURL.Path, ts, rInt, uid, privateKey))))
+ v := objURL.Query()
+ v.Add("auth_key", authKey)
+ objURL.RawQuery = v.Encode()
+ return objURL.String(), nil
+}
diff --git a/drivers/123_share/driver.go b/drivers/123_share/driver.go
old mode 100644
new mode 100755
diff --git a/drivers/123_share/meta.go b/drivers/123_share/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/123_share/types.go b/drivers/123_share/types.go
old mode 100644
new mode 100755
index cd96b755..e8ca9e77
--- a/drivers/123_share/types.go
+++ b/drivers/123_share/types.go
@@ -1,6 +1,7 @@
package _123Share
import (
+ "github.com/alist-org/alist/v3/pkg/utils"
"net/url"
"path"
"strconv"
@@ -21,6 +22,10 @@ type File struct {
DownloadUrl string `json:"DownloadUrl"`
}
+func (f File) GetHash() utils.HashInfo {
+ return utils.HashInfo{}
+}
+
func (f File) GetPath() string {
return ""
}
@@ -36,6 +41,9 @@ func (f File) GetName() string {
func (f File) ModTime() time.Time {
return f.UpdateAt
}
+func (f File) CreateTime() time.Time {
+ return f.UpdateAt
+}
func (f File) IsDir() bool {
return f.Type == 1
diff --git a/drivers/123_share/util.go b/drivers/123_share/util.go
old mode 100644
new mode 100755
diff --git a/drivers/139/driver.go b/drivers/139/driver.go
old mode 100644
new mode 100755
index 52cbc47c..69ab68f7
--- a/drivers/139/driver.go
+++ b/drivers/139/driver.go
@@ -103,9 +103,9 @@ func (d *Yun139) MakeDir(ctx context.Context, parentDir model.Obj, dirName strin
return err
}
-func (d *Yun139) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
+func (d *Yun139) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
if d.isFamily() {
- return errs.NotImplement
+ return nil, errs.NotImplement
}
var contentInfoList []string
var catalogInfoList []string
@@ -131,7 +131,10 @@ func (d *Yun139) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
}
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
_, err := d.post(pathname, data, nil)
- return err
+ if err != nil {
+ return nil, err
+ }
+ return srcObj, nil
}
func (d *Yun139) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
diff --git a/drivers/139/meta.go b/drivers/139/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/139/types.go b/drivers/139/types.go
old mode 100644
new mode 100755
index ef0b2477..217aeb9f
--- a/drivers/139/types.go
+++ b/drivers/139/types.go
@@ -10,7 +10,7 @@ type Catalog struct {
CatalogID string `json:"catalogID"`
CatalogName string `json:"catalogName"`
//CatalogType int `json:"catalogType"`
- //CreateTime string `json:"createTime"`
+ CreateTime string `json:"createTime"`
UpdateTime string `json:"updateTime"`
//IsShared bool `json:"isShared"`
//CatalogLevel int `json:"catalogLevel"`
@@ -63,7 +63,7 @@ type Content struct {
//ParentCatalogID string `json:"parentCatalogId"`
//Channel string `json:"channel"`
//GeoLocFlag string `json:"geoLocFlag"`
- //Digest string `json:"digest"`
+ Digest string `json:"digest"`
//Version string `json:"version"`
//FileEtag string `json:"fileEtag"`
//FileVersion string `json:"fileVersion"`
@@ -141,7 +141,7 @@ type CloudContent struct {
//ContentSuffix string `json:"contentSuffix"`
ContentSize int64 `json:"contentSize"`
//ContentDesc string `json:"contentDesc"`
- //CreateTime string `json:"createTime"`
+ CreateTime string `json:"createTime"`
//Shottime interface{} `json:"shottime"`
LastUpdateTime string `json:"lastUpdateTime"`
ThumbnailURL string `json:"thumbnailURL"`
@@ -165,7 +165,7 @@ type CloudCatalog struct {
CatalogID string `json:"catalogID"`
CatalogName string `json:"catalogName"`
//CloudID string `json:"cloudID"`
- //CreateTime string `json:"createTime"`
+ CreateTime string `json:"createTime"`
LastUpdateTime string `json:"lastUpdateTime"`
//Creator string `json:"creator"`
//CreatorNickname string `json:"creatorNickname"`
diff --git a/drivers/139/util.go b/drivers/139/util.go
old mode 100644
new mode 100755
index 1c9574db..0f26b149
--- a/drivers/139/util.go
+++ b/drivers/139/util.go
@@ -48,7 +48,7 @@ func calSign(body, ts, randStr string) string {
}
func getTime(t string) time.Time {
- stamp, _ := time.ParseInLocation("20060102150405", t, time.Local)
+ stamp, _ := time.ParseInLocation("20060102150405", t, utils.CNLoc)
return stamp
}
@@ -139,6 +139,7 @@ func (d *Yun139) getFiles(catalogID string) ([]model.Obj, error) {
Name: catalog.CatalogName,
Size: 0,
Modified: getTime(catalog.UpdateTime),
+ Ctime: getTime(catalog.CreateTime),
IsFolder: true,
}
files = append(files, &f)
@@ -150,6 +151,7 @@ func (d *Yun139) getFiles(catalogID string) ([]model.Obj, error) {
Name: content.ContentName,
Size: content.ContentSize,
Modified: getTime(content.UpdateTime),
+ HashInfo: utils.NewHashInfo(utils.MD5, content.Digest),
},
Thumbnail: model.Thumbnail{Thumbnail: content.ThumbnailURL},
//Thumbnail: content.BigthumbnailURL,
@@ -202,6 +204,7 @@ func (d *Yun139) familyGetFiles(catalogID string) ([]model.Obj, error) {
Size: 0,
IsFolder: true,
Modified: getTime(catalog.LastUpdateTime),
+ Ctime: getTime(catalog.CreateTime),
}
files = append(files, &f)
}
@@ -212,6 +215,7 @@ func (d *Yun139) familyGetFiles(catalogID string) ([]model.Obj, error) {
Name: content.ContentName,
Size: content.ContentSize,
Modified: getTime(content.LastUpdateTime),
+ Ctime: getTime(content.CreateTime),
},
Thumbnail: model.Thumbnail{Thumbnail: content.ThumbnailURL},
//Thumbnail: content.BigthumbnailURL,
diff --git a/drivers/189/driver.go b/drivers/189/driver.go
old mode 100644
new mode 100755
diff --git a/drivers/189/help.go b/drivers/189/help.go
old mode 100644
new mode 100755
diff --git a/drivers/189/login.go b/drivers/189/login.go
old mode 100644
new mode 100755
diff --git a/drivers/189/meta.go b/drivers/189/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/189/types.go b/drivers/189/types.go
old mode 100644
new mode 100755
diff --git a/drivers/189/util.go b/drivers/189/util.go
old mode 100644
new mode 100755
index 680ce252..0b4c0633
--- a/drivers/189/util.go
+++ b/drivers/189/util.go
@@ -380,7 +380,7 @@ func (d *Cloud189) newUpload(ctx context.Context, dstDir model.Obj, file model.F
if err != nil {
return err
}
- up(int(i * 100 / count))
+ up(float64(i) * 100 / float64(count))
}
fileMd5 := hex.EncodeToString(md5Sum.Sum(nil))
sliceMd5 := fileMd5
diff --git a/drivers/189pc/driver.go b/drivers/189pc/driver.go
old mode 100644
new mode 100755
index c64c0463..f0977995
--- a/drivers/189pc/driver.go
+++ b/drivers/189pc/driver.go
@@ -27,10 +27,15 @@ type Cloud189PC struct {
tokenInfo *AppSessionResp
uploadThread int
+
+ storageConfig driver.Config
}
func (y *Cloud189PC) Config() driver.Config {
- return config
+ if y.storageConfig.Name == "" {
+ y.storageConfig = config
+ }
+ return y.storageConfig
}
func (y *Cloud189PC) GetAddition() driver.Additional {
@@ -38,6 +43,9 @@ func (y *Cloud189PC) GetAddition() driver.Additional {
}
func (y *Cloud189PC) Init(ctx context.Context) (err error) {
+ // 兼容旧上传接口
+ y.storageConfig.NoOverwriteUpload = y.isFamily() && (y.Addition.RapidUpload || y.Addition.UploadMethod == "old")
+
// 处理个人云和家庭云参数
if y.isFamily() && y.RootFolderID == "-11" {
y.RootFolderID = ""
@@ -118,10 +126,11 @@ func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkAr
// 重定向获取真实链接
downloadUrl.URL = strings.Replace(strings.ReplaceAll(downloadUrl.URL, "&", "&"), "http://", "https://", 1)
- res, err := base.NoRedirectClient.R().SetContext(ctx).Get(downloadUrl.URL)
+ res, err := base.NoRedirectClient.R().SetContext(ctx).SetDoNotParseResponse(true).Get(downloadUrl.URL)
if err != nil {
return nil, err
}
+ defer res.RawBody().Close()
if res.StatusCode() == 302 {
downloadUrl.URL = res.Header().Get("location")
}
@@ -302,6 +311,13 @@ func (y *Cloud189PC) Remove(ctx context.Context, obj model.Obj) error {
}
func (y *Cloud189PC) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
+ // 响应时间长,按需启用
+ if y.Addition.RapidUpload {
+ if newObj, err := y.RapidUpload(ctx, dstDir, stream); err == nil {
+ return newObj, nil
+ }
+ }
+
switch y.UploadMethod {
case "old":
return y.OldUpload(ctx, dstDir, stream, up)
diff --git a/drivers/189pc/help.go b/drivers/189pc/help.go
old mode 100644
new mode 100755
diff --git a/drivers/189pc/meta.go b/drivers/189pc/meta.go
old mode 100644
new mode 100755
index e4280186..079ac7cc
--- a/drivers/189pc/meta.go
+++ b/drivers/189pc/meta.go
@@ -16,6 +16,7 @@ type Addition struct {
FamilyID string `json:"family_id"`
UploadMethod string `json:"upload_method" type:"select" options:"stream,rapid,old" default:"stream"`
UploadThread string `json:"upload_thread" default:"3" help:"1<=thread<=32"`
+ RapidUpload bool `json:"rapid_upload"`
NoUseOcr bool `json:"no_use_ocr"`
}
diff --git a/drivers/189pc/types.go b/drivers/189pc/types.go
old mode 100644
new mode 100755
index 1087d33b..d779659e
--- a/drivers/189pc/types.go
+++ b/drivers/189pc/types.go
@@ -3,6 +3,7 @@ package _189pc
import (
"encoding/xml"
"fmt"
+ "github.com/alist-org/alist/v3/pkg/utils"
"sort"
"strings"
"time"
@@ -175,6 +176,14 @@ type Cloud189File struct {
// StarLabel int64 `json:"starLabel"`
}
+func (c *Cloud189File) CreateTime() time.Time {
+ return time.Time(c.CreateDate)
+}
+
+func (c *Cloud189File) GetHash() utils.HashInfo {
+ return utils.NewHashInfo(utils.MD5, c.Md5)
+}
+
func (c *Cloud189File) GetSize() int64 { return c.Size }
func (c *Cloud189File) GetName() string { return c.Name }
func (c *Cloud189File) ModTime() time.Time { return time.Time(c.LastOpTime) }
@@ -199,6 +208,14 @@ type Cloud189Folder struct {
// StarLabel int64 `json:"starLabel"`
}
+func (c *Cloud189Folder) CreateTime() time.Time {
+ return time.Time(c.CreateDate)
+}
+
+func (c *Cloud189Folder) GetHash() utils.HashInfo {
+ return utils.HashInfo{}
+}
+
func (c *Cloud189Folder) GetSize() int64 { return 0 }
func (c *Cloud189Folder) GetName() string { return c.Name }
func (c *Cloud189Folder) ModTime() time.Time { return time.Time(c.LastOpTime) }
diff --git a/drivers/189pc/utils.go b/drivers/189pc/utils.go
old mode 100644
new mode 100755
index a35a0efd..5e403a83
--- a/drivers/189pc/utils.go
+++ b/drivers/189pc/utils.go
@@ -13,7 +13,6 @@ import (
"net/http"
"net/http/cookiejar"
"net/url"
- "os"
"regexp"
"sort"
"strconv"
@@ -514,7 +513,7 @@ func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file mo
if err != nil {
return err
}
- up(int(threadG.Success()) * 100 / count)
+ up(float64(threadG.Success()) * 100 / float64(count))
return nil
})
}
@@ -547,17 +546,30 @@ func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file mo
return resp.toFile(), nil
}
+func (y *Cloud189PC) RapidUpload(ctx context.Context, dstDir model.Obj, stream model.FileStreamer) (model.Obj, error) {
+ fileMd5 := stream.GetHash().GetHash(utils.MD5)
+ if len(fileMd5) < utils.MD5.Width {
+ return nil, errors.New("invalid hash")
+ }
+
+ uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, stream.GetName(), fmt.Sprint(stream.GetSize()))
+ if err != nil {
+ return nil, err
+ }
+
+ if uploadInfo.FileDataExists != 1 {
+ return nil, errors.New("rapid upload fail")
+ }
+
+ return y.OldUploadCommit(ctx, uploadInfo.FileCommitUrl, uploadInfo.UploadFileId)
+}
+
// 快传
func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
- // 需要获取完整文件md5,必须支持 io.Seek
- tempFile, err := utils.CreateTempFile(file.GetReadCloser(), file.GetSize())
+ tempFile, err := file.CacheFullInTempFile()
if err != nil {
return nil, err
}
- defer func() {
- _ = tempFile.Close()
- _ = os.Remove(tempFile.Name())
- }()
var sliceSize = partSize(file.GetSize())
count := int(math.Ceil(float64(file.GetSize()) / float64(sliceSize)))
@@ -664,7 +676,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
return err
}
- up(int(threadG.Success()) * 100 / len(uploadUrls))
+ up(float64(threadG.Success()) * 100 / float64(len(uploadUrls)))
uploadProgress.UploadParts[i] = ""
return nil
})
@@ -741,69 +753,24 @@ func (y *Cloud189PC) GetMultiUploadUrls(ctx context.Context, uploadFileId string
// 旧版本上传,家庭云不支持覆盖
func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
- // 需要获取完整文件md5,必须支持 io.Seek
- tempFile, err := utils.CreateTempFile(file.GetReadCloser(), file.GetSize())
+ tempFile, err := file.CacheFullInTempFile()
if err != nil {
return nil, err
}
- defer func() {
- _ = tempFile.Close()
- _ = os.Remove(tempFile.Name())
- }()
-
- // 计算md5
- fileMd5 := md5.New()
- if _, err := io.Copy(fileMd5, tempFile); err != nil {
- return nil, err
- }
- if _, err = tempFile.Seek(0, io.SeekStart); err != nil {
+ fileMd5, err := utils.HashFile(utils.MD5, tempFile)
+ if err != nil {
return nil, err
}
- fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
// 创建上传会话
- var uploadInfo CreateUploadFileResp
-
- fullUrl := API_URL + "/createUploadFile.action"
- if y.isFamily() {
- fullUrl = API_URL + "/family/file/createFamilyFile.action"
- }
- _, err = y.post(fullUrl, func(req *resty.Request) {
- req.SetContext(ctx)
- if y.isFamily() {
- req.SetQueryParams(map[string]string{
- "familyId": y.FamilyID,
- "fileMd5": fileMd5Hex,
- "fileName": file.GetName(),
- "fileSize": fmt.Sprint(file.GetSize()),
- "parentId": dstDir.GetID(),
- "resumePolicy": "1",
- })
- } else {
- req.SetFormData(map[string]string{
- "parentFolderId": dstDir.GetID(),
- "fileName": file.GetName(),
- "size": fmt.Sprint(file.GetSize()),
- "md5": fileMd5Hex,
- "opertype": "3",
- "flag": "1",
- "resumePolicy": "1",
- "isLog": "0",
- // "baseFileId": "",
- // "lastWrite":"",
- // "localPath": strings.ReplaceAll(param.LocalPath, "\\", "/"),
- // "fileExt": "",
- })
- }
- }, &uploadInfo)
-
+ uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, file.GetName(), fmt.Sprint(file.GetSize()))
if err != nil {
return nil, err
}
// 网盘中不存在该文件,开始上传
- status := GetUploadFileStatusResp{CreateUploadFileResp: uploadInfo}
- for status.Size < file.GetSize() && status.FileDataExists != 1 {
+ status := GetUploadFileStatusResp{CreateUploadFileResp: *uploadInfo}
+ for status.GetSize() < file.GetSize() && status.FileDataExists != 1 {
if utils.IsCanceled(ctx) {
return nil, ctx.Err()
}
@@ -842,28 +809,70 @@ func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model
if err != nil {
return nil, err
}
-
if _, err := tempFile.Seek(status.GetSize(), io.SeekStart); err != nil {
return nil, err
}
- up(int(status.Size / file.GetSize()))
+ up(float64(status.GetSize()) / float64(file.GetSize()) * 100)
}
- // 提交
+ return y.OldUploadCommit(ctx, status.FileCommitUrl, status.UploadFileId)
+}
+
+// 创建上传会话
+func (y *Cloud189PC) OldUploadCreate(ctx context.Context, parentID string, fileMd5, fileName, fileSize string) (*CreateUploadFileResp, error) {
+ var uploadInfo CreateUploadFileResp
+
+ fullUrl := API_URL + "/createUploadFile.action"
+ if y.isFamily() {
+ fullUrl = API_URL + "/family/file/createFamilyFile.action"
+ }
+ _, err := y.post(fullUrl, func(req *resty.Request) {
+ req.SetContext(ctx)
+ if y.isFamily() {
+ req.SetQueryParams(map[string]string{
+ "familyId": y.FamilyID,
+ "parentId": parentID,
+ "fileMd5": fileMd5,
+ "fileName": fileName,
+ "fileSize": fileSize,
+ "resumePolicy": "1",
+ })
+ } else {
+ req.SetFormData(map[string]string{
+ "parentFolderId": parentID,
+ "fileName": fileName,
+ "size": fileSize,
+ "md5": fileMd5,
+ "opertype": "3",
+ "flag": "1",
+ "resumePolicy": "1",
+ "isLog": "0",
+ })
+ }
+ }, &uploadInfo)
+
+ if err != nil {
+ return nil, err
+ }
+ return &uploadInfo, nil
+}
+
+// 提交上传文件
+func (y *Cloud189PC) OldUploadCommit(ctx context.Context, fileCommitUrl string, uploadFileID int64) (model.Obj, error) {
var resp OldCommitUploadFileResp
- _, err = y.post(status.FileCommitUrl, func(req *resty.Request) {
+ _, err := y.post(fileCommitUrl, func(req *resty.Request) {
req.SetContext(ctx)
if y.isFamily() {
req.SetHeaders(map[string]string{
"ResumePolicy": "1",
- "UploadFileId": fmt.Sprint(status.UploadFileId),
+ "UploadFileId": fmt.Sprint(uploadFileID),
"FamilyId": fmt.Sprint(y.FamilyID),
})
} else {
req.SetFormData(map[string]string{
"opertype": "3",
"resumePolicy": "1",
- "uploadFileId": fmt.Sprint(status.UploadFileId),
+ "uploadFileId": fmt.Sprint(uploadFileID),
"isLog": "0",
})
}
diff --git a/drivers/alias/driver.go b/drivers/alias/driver.go
old mode 100644
new mode 100755
diff --git a/drivers/alias/meta.go b/drivers/alias/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/alias/types.go b/drivers/alias/types.go
old mode 100644
new mode 100755
diff --git a/drivers/alias/util.go b/drivers/alias/util.go
old mode 100644
new mode 100755
diff --git a/drivers/alist_v2/driver.go b/drivers/alist_v2/driver.go
old mode 100644
new mode 100755
diff --git a/drivers/alist_v2/meta.go b/drivers/alist_v2/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/alist_v2/types.go b/drivers/alist_v2/types.go
old mode 100644
new mode 100755
diff --git a/drivers/alist_v2/util.go b/drivers/alist_v2/util.go
old mode 100644
new mode 100755
diff --git a/drivers/alist_v3/driver.go b/drivers/alist_v3/driver.go
old mode 100644
new mode 100755
index a383d6d2..9b0c51f7
--- a/drivers/alist_v3/driver.go
+++ b/drivers/alist_v3/driver.go
@@ -3,6 +3,7 @@ package alist_v3
import (
"context"
"fmt"
+ "io"
"net/http"
"path"
"strconv"
@@ -93,8 +94,10 @@ func (d *AListV3) List(ctx context.Context, dir model.Obj, args model.ListArgs)
Object: model.Object{
Name: f.Name,
Modified: f.Modified,
+ Ctime: f.Created,
Size: f.Size,
IsFolder: f.IsDir,
+ HashInfo: utils.FromString(f.HashInfo),
},
Thumbnail: model.Thumbnail{Thumbnail: f.Thumb},
}
@@ -176,7 +179,7 @@ func (d *AListV3) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
SetHeader("Password", d.MetaPassword).
SetHeader("Content-Length", strconv.FormatInt(stream.GetSize(), 10)).
SetContentLength(true).
- SetBody(stream.GetReadCloser())
+ SetBody(io.ReadCloser(stream))
})
return err
}
diff --git a/drivers/alist_v3/meta.go b/drivers/alist_v3/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/alist_v3/types.go b/drivers/alist_v3/types.go
old mode 100644
new mode 100755
index 77801254..e517307f
--- a/drivers/alist_v3/types.go
+++ b/drivers/alist_v3/types.go
@@ -18,9 +18,11 @@ type ObjResp struct {
Size int64 `json:"size"`
IsDir bool `json:"is_dir"`
Modified time.Time `json:"modified"`
+ Created time.Time `json:"created"`
Sign string `json:"sign"`
Thumb string `json:"thumb"`
Type int `json:"type"`
+ HashInfo string `json:"hashinfo"`
}
type FsListResp struct {
diff --git a/drivers/alist_v3/util.go b/drivers/alist_v3/util.go
old mode 100644
new mode 100755
diff --git a/drivers/aliyundrive/driver.go b/drivers/aliyundrive/driver.go
old mode 100644
new mode 100755
index d3449d12..83c3f522
--- a/drivers/aliyundrive/driver.go
+++ b/drivers/aliyundrive/driver.go
@@ -14,6 +14,8 @@ import (
"os"
"time"
+ "github.com/alist-org/alist/v3/internal/stream"
+
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/driver"
@@ -163,14 +165,14 @@ func (d *AliDrive) Remove(ctx context.Context, obj model.Obj) error {
return err
}
-func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
- file := model.FileStream{
- Obj: stream,
- ReadCloser: stream,
- Mimetype: stream.GetMimetype(),
+func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, streamer model.FileStreamer, up driver.UpdateProgress) error {
+ file := stream.FileStream{
+ Obj: streamer,
+ Reader: streamer,
+ Mimetype: streamer.GetMimetype(),
}
const DEFAULT int64 = 10485760
- var count = int(math.Ceil(float64(stream.GetSize()) / float64(DEFAULT)))
+ var count = int(math.Ceil(float64(streamer.GetSize()) / float64(DEFAULT)))
partInfoList := make([]base.Json, 0, count)
for i := 1; i <= count; i++ {
@@ -187,8 +189,8 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileS
}
var localFile *os.File
- if fileStream, ok := file.ReadCloser.(*model.FileStream); ok {
- localFile, _ = fileStream.ReadCloser.(*os.File)
+ if fileStream, ok := file.Reader.(*stream.FileStream); ok {
+ localFile, _ = fileStream.Reader.(*os.File)
}
if d.RapidUpload {
buf := bytes.NewBuffer(make([]byte, 0, 1024))
@@ -200,12 +202,12 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileS
}
} else {
// 把头部拼接回去
- file.ReadCloser = struct {
+ file.Reader = struct {
io.Reader
io.Closer
}{
Reader: io.MultiReader(buf, file),
- Closer: file,
+ Closer: &file,
}
}
} else {
@@ -281,7 +283,7 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileS
if _, err = localFile.Seek(0, io.SeekStart); err != nil {
return err
}
- file.ReadCloser = localFile
+ file.Reader = localFile
}
for i, partInfo := range resp.PartInfoList {
@@ -303,7 +305,7 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileS
}
res.Body.Close()
if count > 0 {
- up(i * 100 / count)
+ up(float64(i) * 100 / float64(count))
}
}
var resp2 base.Json
diff --git a/drivers/aliyundrive/global.go b/drivers/aliyundrive/global.go
old mode 100644
new mode 100755
diff --git a/drivers/aliyundrive/help.go b/drivers/aliyundrive/help.go
old mode 100644
new mode 100755
diff --git a/drivers/aliyundrive/meta.go b/drivers/aliyundrive/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/aliyundrive/types.go b/drivers/aliyundrive/types.go
old mode 100644
new mode 100755
diff --git a/drivers/aliyundrive/util.go b/drivers/aliyundrive/util.go
old mode 100644
new mode 100755
diff --git a/drivers/aliyundrive_open/driver.go b/drivers/aliyundrive_open/driver.go
old mode 100644
new mode 100755
index 025fe39e..dc636439
--- a/drivers/aliyundrive_open/driver.go
+++ b/drivers/aliyundrive_open/driver.go
@@ -83,7 +83,7 @@ func (d *AliyundriveOpen) link(ctx context.Context, file model.Obj) (*model.Link
req.SetBody(base.Json{
"drive_id": d.DriveId,
"file_id": file.GetID(),
- "expire_sec": 14400,
+ "expire_sec": 900,
})
})
if err != nil {
@@ -96,7 +96,7 @@ func (d *AliyundriveOpen) link(ctx context.Context, file model.Obj) (*model.Link
}
url = utils.Json.Get(res, "streamsUrl", d.LIVPDownloadFormat).ToString()
}
- exp := 895 * time.Second
+ exp := time.Minute
return &model.Link{
URL: url,
Expiration: &exp,
@@ -110,7 +110,9 @@ func (d *AliyundriveOpen) Link(ctx context.Context, file model.Obj, args model.L
return d.limitLink(ctx, file)
}
-func (d *AliyundriveOpen) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
+func (d *AliyundriveOpen) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
+ nowTime, _ := getNowTime()
+ newDir := File{CreatedAt: nowTime, UpdatedAt: nowTime}
_, err := d.request("/adrive/v1.0/openFile/create", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"drive_id": d.DriveId,
@@ -118,12 +120,16 @@ func (d *AliyundriveOpen) MakeDir(ctx context.Context, parentDir model.Obj, dirN
"name": dirName,
"type": "folder",
"check_name_mode": "refuse",
- })
+ }).SetResult(&newDir)
})
- return err
+ if err != nil {
+ return nil, err
+ }
+ return fileToObj(newDir), nil
}
-func (d *AliyundriveOpen) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
+func (d *AliyundriveOpen) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
+ var resp MoveOrCopyResp
_, err := d.request("/adrive/v1.0/openFile/move", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"drive_id": d.DriveId,
@@ -131,20 +137,36 @@ func (d *AliyundriveOpen) Move(ctx context.Context, srcObj, dstDir model.Obj) er
"to_parent_file_id": dstDir.GetID(),
"check_name_mode": "refuse", // optional:ignore,auto_rename,refuse
//"new_name": "newName", // The new name to use when a file of the same name exists
- })
+ }).SetResult(&resp)
})
- return err
+ if err != nil {
+ return nil, err
+ }
+ if resp.Exist {
+ return nil, errors.New("existence of files with the same name")
+ }
+
+ if srcObj, ok := srcObj.(*model.ObjThumb); ok {
+ srcObj.ID = resp.FileID
+ srcObj.Modified = time.Now()
+ return srcObj, nil
+ }
+ return nil, nil
}
-func (d *AliyundriveOpen) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
+func (d *AliyundriveOpen) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
+ var newFile File
_, err := d.request("/adrive/v1.0/openFile/update", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"drive_id": d.DriveId,
"file_id": srcObj.GetID(),
"name": newName,
- })
+ }).SetResult(&newFile)
})
- return err
+ if err != nil {
+ return nil, err
+ }
+ return fileToObj(newFile), nil
}
func (d *AliyundriveOpen) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
@@ -173,7 +195,7 @@ func (d *AliyundriveOpen) Remove(ctx context.Context, obj model.Obj) error {
return err
}
-func (d *AliyundriveOpen) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
+func (d *AliyundriveOpen) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
return d.upload(ctx, dstDir, stream, up)
}
@@ -188,7 +210,7 @@ func (d *AliyundriveOpen) Other(ctx context.Context, args model.OtherArgs) (inte
case "video_preview":
uri = "/adrive/v1.0/openFile/getVideoPreviewPlayInfo"
data["category"] = "live_transcoding"
- data["url_expire_sec"] = 14400
+ data["url_expire_sec"] = 900
default:
return nil, errs.NotSupport
}
@@ -202,3 +224,7 @@ func (d *AliyundriveOpen) Other(ctx context.Context, args model.OtherArgs) (inte
}
var _ driver.Driver = (*AliyundriveOpen)(nil)
+var _ driver.MkdirResult = (*AliyundriveOpen)(nil)
+var _ driver.MoveResult = (*AliyundriveOpen)(nil)
+var _ driver.RenameResult = (*AliyundriveOpen)(nil)
+var _ driver.PutResult = (*AliyundriveOpen)(nil)
diff --git a/drivers/aliyundrive_open/meta.go b/drivers/aliyundrive_open/meta.go
old mode 100644
new mode 100755
index 83635ec0..9ae16dba
--- a/drivers/aliyundrive_open/meta.go
+++ b/drivers/aliyundrive_open/meta.go
@@ -11,7 +11,7 @@ type Addition struct {
RefreshToken string `json:"refresh_token" required:"true"`
OrderBy string `json:"order_by" type:"select" options:"name,size,updated_at,created_at"`
OrderDirection string `json:"order_direction" type:"select" options:"ASC,DESC"`
- OauthTokenURL string `json:"oauth_token_url" default:"https://api.xhofe.top/alist/ali_open/token"`
+ OauthTokenURL string `json:"oauth_token_url" default:"https://api.nn.ci/alist/ali_open/token"`
ClientID string `json:"client_id" required:"false" help:"Keep it empty if you don't have one"`
ClientSecret string `json:"client_secret" required:"false" help:"Keep it empty if you don't have one"`
RemoveWay string `json:"remove_way" required:"true" type:"select" options:"trash,delete"`
@@ -37,7 +37,7 @@ var config = driver.Config{
func init() {
op.RegisterDriver(func() driver.Driver {
return &AliyundriveOpen{
- base: "https://openapi.aliyundrive.com",
+ base: "https://openapi.alipan.com",
}
})
}
diff --git a/drivers/aliyundrive_open/types.go b/drivers/aliyundrive_open/types.go
old mode 100644
new mode 100755
index 6980effd..46830a51
--- a/drivers/aliyundrive_open/types.go
+++ b/drivers/aliyundrive_open/types.go
@@ -1,6 +1,7 @@
package aliyundrive_open
import (
+ "github.com/alist-org/alist/v3/pkg/utils"
"time"
"github.com/alist-org/alist/v3/internal/model"
@@ -17,22 +18,28 @@ type Files struct {
}
type File struct {
- DriveId string `json:"drive_id"`
- FileId string `json:"file_id"`
- ParentFileId string `json:"parent_file_id"`
- Name string `json:"name"`
- Size int64 `json:"size"`
- FileExtension string `json:"file_extension"`
- ContentHash string `json:"content_hash"`
- Category string `json:"category"`
- Type string `json:"type"`
- Thumbnail string `json:"thumbnail"`
- Url string `json:"url"`
- CreatedAt *time.Time `json:"created_at"`
- UpdatedAt time.Time `json:"updated_at"`
+ DriveId string `json:"drive_id"`
+ FileId string `json:"file_id"`
+ ParentFileId string `json:"parent_file_id"`
+ Name string `json:"name"`
+ Size int64 `json:"size"`
+ FileExtension string `json:"file_extension"`
+ ContentHash string `json:"content_hash"`
+ Category string `json:"category"`
+ Type string `json:"type"`
+ Thumbnail string `json:"thumbnail"`
+ Url string `json:"url"`
+ CreatedAt time.Time `json:"created_at"`
+ UpdatedAt time.Time `json:"updated_at"`
+
+ // create only
+ FileName string `json:"file_name"`
}
func fileToObj(f File) *model.ObjThumb {
+ if f.Name == "" {
+ f.Name = f.FileName
+ }
return &model.ObjThumb{
Object: model.Object{
ID: f.FileId,
@@ -40,6 +47,8 @@ func fileToObj(f File) *model.ObjThumb {
Size: f.Size,
Modified: f.UpdatedAt,
IsFolder: f.Type == "folder",
+ Ctime: f.CreatedAt,
+ HashInfo: utils.NewHashInfo(utils.SHA1, f.ContentHash),
},
Thumbnail: model.Thumbnail{Thumbnail: f.Thumbnail},
}
@@ -67,3 +76,9 @@ type CreateResp struct {
RapidUpload bool `json:"rapid_upload"`
PartInfoList []PartInfo `json:"part_info_list"`
}
+
+type MoveOrCopyResp struct {
+ Exist bool `json:"exist"`
+ DriveID string `json:"drive_id"`
+ FileID string `json:"file_id"`
+}
diff --git a/drivers/aliyundrive_open/upload.go b/drivers/aliyundrive_open/upload.go
old mode 100644
new mode 100755
index c700a16a..3b224e7d
--- a/drivers/aliyundrive_open/upload.go
+++ b/drivers/aliyundrive_open/upload.go
@@ -3,14 +3,11 @@ package aliyundrive_open
import (
"bytes"
"context"
- "crypto/sha1"
"encoding/base64"
- "encoding/hex"
"fmt"
"io"
"math"
"net/http"
- "os"
"strconv"
"strings"
"time"
@@ -18,7 +15,9 @@ import (
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
+ "github.com/alist-org/alist/v3/pkg/http_range"
"github.com/alist-org/alist/v3/pkg/utils"
+ "github.com/avast/retry-go"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
@@ -32,19 +31,19 @@ func makePartInfos(size int) []base.Json {
}
func calPartSize(fileSize int64) int64 {
- var partSize int64 = 20 * 1024 * 1024
+ var partSize int64 = 20 * utils.MB
if fileSize > partSize {
- if fileSize > 1*1024*1024*1024*1024 { // file Size over 1TB
- partSize = 5 * 1024 * 1024 * 1024 // file part size 5GB
- } else if fileSize > 768*1024*1024*1024 { // over 768GB
+ if fileSize > 1*utils.TB { // file Size over 1TB
+ partSize = 5 * utils.GB // file part size 5GB
+ } else if fileSize > 768*utils.GB { // over 768GB
partSize = 109951163 // ≈ 104.8576MB, split 1TB into 10,000 part
- } else if fileSize > 512*1024*1024*1024 { // over 512GB
+ } else if fileSize > 512*utils.GB { // over 512GB
partSize = 82463373 // ≈ 78.6432MB
- } else if fileSize > 384*1024*1024*1024 { // over 384GB
+ } else if fileSize > 384*utils.GB { // over 384GB
partSize = 54975582 // ≈ 52.4288MB
- } else if fileSize > 256*1024*1024*1024 { // over 256GB
+ } else if fileSize > 256*utils.GB { // over 256GB
partSize = 41231687 // ≈ 39.3216MB
- } else if fileSize > 128*1024*1024*1024 { // over 128GB
+ } else if fileSize > 128*utils.GB { // over 128GB
partSize = 27487791 // ≈ 26.2144MB
}
}
@@ -65,73 +64,40 @@ func (d *AliyundriveOpen) getUploadUrl(count int, fileId, uploadId string) ([]Pa
return resp.PartInfoList, err
}
-func (d *AliyundriveOpen) uploadPart(ctx context.Context, i, count int, reader *utils.MultiReadable, resp *CreateResp, retry bool) error {
- partInfo := resp.PartInfoList[i-1]
+func (d *AliyundriveOpen) uploadPart(ctx context.Context, r io.Reader, partInfo PartInfo) error {
uploadUrl := partInfo.UploadUrl
if d.InternalUpload {
uploadUrl = strings.ReplaceAll(uploadUrl, "https://cn-beijing-data.aliyundrive.net/", "http://ccp-bj29-bj-1592982087.oss-cn-beijing-internal.aliyuncs.com/")
}
- req, err := http.NewRequest("PUT", uploadUrl, reader)
+ req, err := http.NewRequestWithContext(ctx, "PUT", uploadUrl, r)
if err != nil {
return err
}
- req = req.WithContext(ctx)
res, err := base.HttpClient.Do(req)
if err != nil {
- if retry {
- reader.Reset()
- return d.uploadPart(ctx, i, count, reader, resp, false)
- }
return err
}
res.Body.Close()
- if retry && res.StatusCode == http.StatusForbidden {
- resp.PartInfoList, err = d.getUploadUrl(count, resp.FileId, resp.UploadId)
- if err != nil {
- return err
- }
- reader.Reset()
- return d.uploadPart(ctx, i, count, reader, resp, false)
- }
if res.StatusCode != http.StatusOK && res.StatusCode != http.StatusConflict {
return fmt.Errorf("upload status: %d", res.StatusCode)
}
return nil
}
-func (d *AliyundriveOpen) normalUpload(ctx context.Context, stream model.FileStreamer, up driver.UpdateProgress, createResp CreateResp, count int, partSize int64) error {
- log.Debugf("[aliyundive_open] normal upload")
- // 2. upload
- preTime := time.Now()
- for i := 1; i <= len(createResp.PartInfoList); i++ {
- if utils.IsCanceled(ctx) {
- return ctx.Err()
- }
- err := d.uploadPart(ctx, i, count, utils.NewMultiReadable(io.LimitReader(stream, partSize)), &createResp, true)
- if err != nil {
- return err
- }
- if count > 0 {
- up(i * 100 / count)
- }
- // refresh upload url if 50 minutes passed
- if time.Since(preTime) > 50*time.Minute {
- createResp.PartInfoList, err = d.getUploadUrl(count, createResp.FileId, createResp.UploadId)
- if err != nil {
- return err
- }
- preTime = time.Now()
- }
- }
+func (d *AliyundriveOpen) completeUpload(fileId, uploadId string) (model.Obj, error) {
// 3. complete
+ var newFile File
_, err := d.request("/adrive/v1.0/openFile/complete", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"drive_id": d.DriveId,
- "file_id": createResp.FileId,
- "upload_id": createResp.UploadId,
- })
+ "file_id": fileId,
+ "upload_id": uploadId,
+ }).SetResult(&newFile)
})
- return err
+ if err != nil {
+ return nil, err
+ }
+ return fileToObj(newFile), nil
}
type ProofRange struct {
@@ -159,110 +125,146 @@ func getProofRange(input string, size int64) (*ProofRange, error) {
return pr, nil
}
-func (d *AliyundriveOpen) calProofCode(file *os.File, fileSize int64) (string, error) {
- proofRange, err := getProofRange(d.AccessToken, fileSize)
+func (d *AliyundriveOpen) calProofCode(stream model.FileStreamer) (string, error) {
+ proofRange, err := getProofRange(d.AccessToken, stream.GetSize())
+ if err != nil {
+ return "", err
+ }
+ length := proofRange.End - proofRange.Start
+ buf := bytes.NewBuffer(make([]byte, 0, length))
+ reader, err := stream.RangeRead(http_range.Range{Start: proofRange.Start, Length: length})
if err != nil {
return "", err
}
- buf := make([]byte, proofRange.End-proofRange.Start)
- _, err = file.ReadAt(buf, proofRange.Start)
+ _, err = io.CopyN(buf, reader, length)
if err != nil {
return "", err
}
- return base64.StdEncoding.EncodeToString(buf), nil
+ return base64.StdEncoding.EncodeToString(buf.Bytes()), nil
}
-func (d *AliyundriveOpen) upload(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
+func (d *AliyundriveOpen) upload(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
// 1. create
// Part Size Unit: Bytes, Default: 20MB,
// Maximum number of slices 10,000, ≈195.3125GB
var partSize = calPartSize(stream.GetSize())
+ const dateFormat = "2006-01-02T15:04:05.000Z"
+ mtimeStr := stream.ModTime().UTC().Format(dateFormat)
+ ctimeStr := stream.CreateTime().UTC().Format(dateFormat)
+
createData := base.Json{
- "drive_id": d.DriveId,
- "parent_file_id": dstDir.GetID(),
- "name": stream.GetName(),
- "type": "file",
- "check_name_mode": "ignore",
+ "drive_id": d.DriveId,
+ "parent_file_id": dstDir.GetID(),
+ "name": stream.GetName(),
+ "type": "file",
+ "check_name_mode": "ignore",
+ "local_modified_at": mtimeStr,
+ "local_created_at": ctimeStr,
}
count := int(math.Ceil(float64(stream.GetSize()) / float64(partSize)))
createData["part_info_list"] = makePartInfos(count)
// rapid upload
- rapidUpload := stream.GetSize() > 100*1024 && d.RapidUpload
+ rapidUpload := stream.GetSize() > 100*utils.KB && d.RapidUpload
if rapidUpload {
log.Debugf("[aliyundrive_open] start cal pre_hash")
// read 1024 bytes to calculate pre hash
- buf := bytes.NewBuffer(make([]byte, 0, 1024))
- _, err := io.CopyN(buf, stream, 1024)
+ reader, err := stream.RangeRead(http_range.Range{Start: 0, Length: 1024})
if err != nil {
- return err
+ return nil, err
}
- createData["size"] = stream.GetSize()
- createData["pre_hash"] = utils.HashData(utils.SHA1, buf.Bytes())
- // if support seek, seek to start
- if localFile, ok := stream.(io.Seeker); ok {
- if _, err := localFile.Seek(0, io.SeekStart); err != nil {
- return err
- }
- } else {
- // Put spliced head back to stream
- stream.SetReadCloser(struct {
- io.Reader
- io.Closer
- }{
- Reader: io.MultiReader(buf, stream.GetReadCloser()),
- Closer: stream.GetReadCloser(),
- })
+ hash, err := utils.HashReader(utils.SHA1, reader)
+ if err != nil {
+ return nil, err
}
+ createData["size"] = stream.GetSize()
+ createData["pre_hash"] = hash
}
var createResp CreateResp
_, err, e := d.requestReturnErrResp("/adrive/v1.0/openFile/create", http.MethodPost, func(req *resty.Request) {
req.SetBody(createData).SetResult(&createResp)
})
+ var tmpF model.File
if err != nil {
if e.Code != "PreHashMatched" || !rapidUpload {
- return err
+ return nil, err
}
log.Debugf("[aliyundrive_open] pre_hash matched, start rapid upload")
- // convert to local file
- file, err := utils.CreateTempFile(stream.GetReadCloser(), stream.GetSize())
- if err != nil {
- return err
- }
- _ = stream.GetReadCloser().Close()
- stream.SetReadCloser(file)
- // calculate full hash
- h := sha1.New()
- _, err = io.Copy(h, file)
- if err != nil {
- return err
+
+ hi := stream.GetHash()
+ hash := hi.GetHash(utils.SHA1)
+ if len(hash) <= 0 {
+ tmpF, err = stream.CacheFullInTempFile()
+ if err != nil {
+ return nil, err
+ }
+ hash, err = utils.HashFile(utils.SHA1, tmpF)
+ if err != nil {
+ return nil, err
+ }
+
}
+
delete(createData, "pre_hash")
createData["proof_version"] = "v1"
createData["content_hash_name"] = "sha1"
- createData["content_hash"] = hex.EncodeToString(h.Sum(nil))
- // seek to start
- if _, err = file.Seek(0, io.SeekStart); err != nil {
- return err
- }
- createData["proof_code"], err = d.calProofCode(file, stream.GetSize())
+ createData["content_hash"] = hash
+ createData["proof_code"], err = d.calProofCode(stream)
if err != nil {
- return fmt.Errorf("cal proof code error: %s", err.Error())
+ return nil, fmt.Errorf("cal proof code error: %s", err.Error())
}
_, err = d.request("/adrive/v1.0/openFile/create", http.MethodPost, func(req *resty.Request) {
req.SetBody(createData).SetResult(&createResp)
})
if err != nil {
- return err
- }
- if createResp.RapidUpload {
- log.Debugf("[aliyundrive_open] rapid upload success, file id: %s", createResp.FileId)
- return nil
+ return nil, err
}
- // failed to rapid upload, try normal upload
- if _, err = file.Seek(0, io.SeekStart); err != nil {
- return err
+ }
+
+ if !createResp.RapidUpload {
+ // 2. normal upload
+ log.Debugf("[aliyundive_open] normal upload")
+
+ preTime := time.Now()
+ var offset, length int64 = 0, partSize
+ //var length
+ for i := 0; i < len(createResp.PartInfoList); i++ {
+ if utils.IsCanceled(ctx) {
+ return nil, ctx.Err()
+ }
+ // refresh upload url if 50 minutes passed
+ if time.Since(preTime) > 50*time.Minute {
+ createResp.PartInfoList, err = d.getUploadUrl(count, createResp.FileId, createResp.UploadId)
+ if err != nil {
+ return nil, err
+ }
+ preTime = time.Now()
+ }
+ if remain := stream.GetSize() - offset; length > remain {
+ length = remain
+ }
+ //rd := utils.NewMultiReadable(io.LimitReader(stream, partSize))
+ rd, err := stream.RangeRead(http_range.Range{Start: offset, Length: length})
+ if err != nil {
+ return nil, err
+ }
+ err = retry.Do(func() error {
+ //rd.Reset()
+ return d.uploadPart(ctx, rd, createResp.PartInfoList[i])
+ },
+ retry.Attempts(3),
+ retry.DelayType(retry.BackOffDelay),
+ retry.Delay(time.Second))
+ if err != nil {
+ return nil, err
+ }
+ offset += partSize
+ up(float64(i*100) / float64(count))
}
+ } else {
+ log.Debugf("[aliyundrive_open] rapid upload success, file id: %s", createResp.FileId)
}
+
log.Debugf("[aliyundrive_open] create file success, resp: %+v", createResp)
- return d.normalUpload(ctx, stream, up, createResp, count, partSize)
+ // 3. complete
+ return d.completeUpload(createResp.FileId, createResp.UploadId)
}
diff --git a/drivers/aliyundrive_open/util.go b/drivers/aliyundrive_open/util.go
old mode 100644
new mode 100755
index 8cfb71fe..c0542cbf
--- a/drivers/aliyundrive_open/util.go
+++ b/drivers/aliyundrive_open/util.go
@@ -2,6 +2,7 @@ package aliyundrive_open
import (
"context"
+ "encoding/base64"
"errors"
"fmt"
"github.com/alist-org/alist/v3/internal/model"
@@ -9,6 +10,7 @@ import (
"github.com/alist-org/alist/v3/internal/token"
"net/http"
"strconv"
+ "strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
@@ -20,7 +22,7 @@ import (
// do others that not defined in Driver interface
-func (d *AliyundriveOpen) refreshToken() error {
+func (d *AliyundriveOpen) _refreshToken() (string, string, error) {
accountId := strconv.Itoa(d.AccountId)
accessTokenOpen := token.GetToken("AccessTokenOpen-"+accountId, 7200)
refreshTokenOpen := token.GetToken("RefreshTokenOpen-"+accountId, 0)
@@ -28,7 +30,7 @@ func (d *AliyundriveOpen) refreshToken() error {
if accessTokenOpen != "" && refreshTokenOpen != "" {
d.RefreshToken, d.AccessToken = refreshTokenOpen, accessTokenOpen
log.Println("RefreshTokenOpen已经存在")
- return nil
+ return refreshTokenOpen, accessTokenOpen, nil
}
t := time.Now()
@@ -40,7 +42,7 @@ func (d *AliyundriveOpen) refreshToken() error {
//var resp base.TokenResp
var e ErrResp
res, err := base.RestyClient.R().
- ForceContentType("application/json").
+ //ForceContentType("application/json").
SetBody(base.Json{
"client_id": d.ClientID,
"client_secret": d.ClientSecret,
@@ -51,21 +53,58 @@ func (d *AliyundriveOpen) refreshToken() error {
SetError(&e).
Post(url)
if err != nil {
- return err
+ return "", "", err
}
log.Debugf("[ali_open] refresh token response: %s", res.String())
if e.Code != "" {
- return fmt.Errorf("failed to refresh token: %s", e.Message)
+ return "", "", fmt.Errorf("failed to refresh token: %s", e.Message)
}
refresh, access := utils.Json.Get(res.Body(), "refresh_token").ToString(), utils.Json.Get(res.Body(), "access_token").ToString()
if refresh == "" {
- return errors.New("failed to refresh token: refresh token is empty")
+ return "", "", fmt.Errorf("failed to refresh token: refresh token is empty, resp: %s", res.String())
+ }
+ curSub, err := getSub(d.RefreshToken)
+ if err != nil {
+ return "", "", err
+ }
+ newSub, err := getSub(refresh)
+ if err != nil {
+ return "", "", err
+ }
+ if curSub != newSub {
+ return "", "", errors.New("failed to refresh token: sub not match")
}
- log.Debugf("[ali_open] toekn exchange: %s -> %s", d.RefreshToken, refresh)
- d.RefreshToken, d.AccessToken = refresh, access
-
d.SaveOpenToken(t)
+ return refresh, access, nil
+}
+
+func getSub(token string) (string, error) {
+ segments := strings.Split(token, ".")
+ if len(segments) != 3 {
+ return "", errors.New("not a jwt token because of invalid segments")
+ }
+ bs, err := base64.RawStdEncoding.DecodeString(segments[1])
+ if err != nil {
+ return "", errors.New("failed to decode jwt token")
+ }
+ return utils.Json.Get(bs, "sub").ToString(), nil
+}
+func (d *AliyundriveOpen) refreshToken() error {
+ refresh, access, err := d._refreshToken()
+ for i := 0; i < 3; i++ {
+ if err == nil {
+ break
+ } else {
+ log.Errorf("[ali_open] failed to refresh token: %s", err)
+ }
+ refresh, access, err = d._refreshToken()
+ }
+ if err != nil {
+ return err
+ }
+ log.Infof("[ali_open] token exchange: %s -> %s", d.RefreshToken, refresh)
+ d.RefreshToken, d.AccessToken = refresh, access
op.MustSaveDriverStorage(d)
return nil
}
@@ -175,3 +214,9 @@ func (d *AliyundriveOpen) getFiles(ctx context.Context, fileId string) ([]File,
}
return res, nil
}
+
+func getNowTime() (time.Time, string) {
+ nowTime := time.Now()
+ nowTimeStr := nowTime.Format("2006-01-02T15:04:05.000Z")
+ return nowTime, nowTimeStr
+}
diff --git a/drivers/aliyundrive_share/driver.go b/drivers/aliyundrive_share/driver.go
old mode 100644
new mode 100755
diff --git a/drivers/aliyundrive_share/meta.go b/drivers/aliyundrive_share/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/aliyundrive_share/types.go b/drivers/aliyundrive_share/types.go
old mode 100644
new mode 100755
index 97b6b7cf..bb9be800
--- a/drivers/aliyundrive_share/types.go
+++ b/drivers/aliyundrive_share/types.go
@@ -44,6 +44,7 @@ func fileToObj(f File) *model.ObjThumb {
Name: f.Name,
Size: f.Size,
Modified: f.UpdatedAt,
+ Ctime: f.CreatedAt,
IsFolder: f.Type == "folder",
},
Thumbnail: model.Thumbnail{Thumbnail: f.Thumbnail},
diff --git a/drivers/aliyundrive_share/util.go b/drivers/aliyundrive_share/util.go
old mode 100644
new mode 100755
diff --git a/drivers/aliyundrive_share2_open/driver.go b/drivers/aliyundrive_share2_open/driver.go
old mode 100644
new mode 100755
index 3d6aaba8..4e8abcf5
--- a/drivers/aliyundrive_share2_open/driver.go
+++ b/drivers/aliyundrive_share2_open/driver.go
@@ -174,13 +174,15 @@ func (d *AliyundriveShare2Open) Other(ctx context.Context, args model.OtherArgs)
return nil, err
}
- url, err := d.getDownloadUrl(fileId)
- if url != "" {
- resp.PlayInfo.Videos = append(resp.PlayInfo.Videos, LiveTranscoding{
- TemplateId: "原画",
- Status: "finished",
- Url: url,
- })
+ if args.Data == "preview" {
+ url, _ := d.getDownloadUrl(fileId)
+ if url != "" {
+ resp.PlayInfo.Videos = append(resp.PlayInfo.Videos, LiveTranscoding{
+ TemplateId: "原画",
+ Status: "finished",
+ Url: url,
+ })
+ }
}
return resp, nil
diff --git a/drivers/aliyundrive_share2_open/meta.go b/drivers/aliyundrive_share2_open/meta.go
old mode 100644
new mode 100755
index fb9b6977..34a4c803
--- a/drivers/aliyundrive_share2_open/meta.go
+++ b/drivers/aliyundrive_share2_open/meta.go
@@ -31,7 +31,7 @@ var config = driver.Config{
func init() {
op.RegisterDriver(func() driver.Driver {
return &AliyundriveShare2Open{
- base: "https://openapi.aliyundrive.com",
+ base: "https://openapi.alipan.com",
}
})
}
diff --git a/drivers/aliyundrive_share2_open/types.go b/drivers/aliyundrive_share2_open/types.go
old mode 100644
new mode 100755
index 747e590f..d3264ff0
--- a/drivers/aliyundrive_share2_open/types.go
+++ b/drivers/aliyundrive_share2_open/types.go
@@ -1,6 +1,7 @@
package aliyundrive_share2_open
import (
+ "github.com/alist-org/alist/v3/pkg/utils"
"time"
"github.com/alist-org/alist/v3/internal/model"
@@ -67,6 +68,14 @@ type MyFile struct {
UpdateAt time.Time `json:"UpdateAt"`
}
+func (f MyFile) CreateTime() time.Time {
+ return f.UpdateAt
+}
+
+func (f MyFile) GetHash() utils.HashInfo {
+ return utils.HashInfo{}
+}
+
type VideoPreviewResponse struct {
PlayInfo VideoPreviewPlayInfo `json:"video_preview_play_info"`
}
diff --git a/drivers/aliyundrive_share2_open/util.go b/drivers/aliyundrive_share2_open/util.go
old mode 100644
new mode 100755
index 1029a54c..4a8c8436
--- a/drivers/aliyundrive_share2_open/util.go
+++ b/drivers/aliyundrive_share2_open/util.go
@@ -155,6 +155,8 @@ func (d *AliyundriveShare2Open) getDriveId() {
log.Warnf("getDriveId error: %v", err)
return
}
+ name := utils.Json.Get(res, "name").ToString()
+ log.Printf("昵称: %v", name)
d.DriveId = utils.Json.Get(res, "resource_drive_id").ToString()
if d.DriveId == "" {
d.DriveId = utils.Json.Get(res, "default_drive_id").ToString()
diff --git a/drivers/all.go b/drivers/all.go
old mode 100644
new mode 100755
index a657927b..d35a98d7
--- a/drivers/all.go
+++ b/drivers/all.go
@@ -4,6 +4,7 @@ import (
_ "github.com/alist-org/alist/v3/drivers/115"
_ "github.com/alist-org/alist/v3/drivers/115_share"
_ "github.com/alist-org/alist/v3/drivers/123"
+ _ "github.com/alist-org/alist/v3/drivers/123_link"
_ "github.com/alist-org/alist/v3/drivers/123_share"
_ "github.com/alist-org/alist/v3/drivers/139"
_ "github.com/alist-org/alist/v3/drivers/189"
@@ -18,6 +19,7 @@ import (
_ "github.com/alist-org/alist/v3/drivers/baidu_netdisk"
_ "github.com/alist-org/alist/v3/drivers/baidu_photo"
_ "github.com/alist-org/alist/v3/drivers/baidu_share"
+ _ "github.com/alist-org/alist/v3/drivers/chaoxing"
_ "github.com/alist-org/alist/v3/drivers/cloudreve"
_ "github.com/alist-org/alist/v3/drivers/crypt"
_ "github.com/alist-org/alist/v3/drivers/dropbox"
@@ -46,6 +48,7 @@ import (
_ "github.com/alist-org/alist/v3/drivers/url_tree"
_ "github.com/alist-org/alist/v3/drivers/uss"
_ "github.com/alist-org/alist/v3/drivers/virtual"
+ _ "github.com/alist-org/alist/v3/drivers/vtencent"
_ "github.com/alist-org/alist/v3/drivers/webdav"
_ "github.com/alist-org/alist/v3/drivers/weiyun"
_ "github.com/alist-org/alist/v3/drivers/wopan"
diff --git a/drivers/baidu_netdisk/driver.go b/drivers/baidu_netdisk/driver.go
old mode 100644
new mode 100755
index 3066843e..20810a76
--- a/drivers/baidu_netdisk/driver.go
+++ b/drivers/baidu_netdisk/driver.go
@@ -5,11 +5,9 @@ import (
"crypto/md5"
"encoding/hex"
"errors"
- "fmt"
"io"
"math"
"net/url"
- "os"
stdpath "path"
"strconv"
"time"
@@ -29,10 +27,9 @@ type BaiduNetdisk struct {
Addition
uploadThread int
+ vipType int // 会员类型,0普通用户(4G/4M)、1普通会员(10G/16M)、2超级会员(20G/32M)
}
-const DefaultSliceSize int64 = 4 * 1024 * 1024
-
func (d *BaiduNetdisk) Config() driver.Config {
return config
}
@@ -55,7 +52,11 @@ func (d *BaiduNetdisk) Init(ctx context.Context) error {
"method": "uinfo",
}, nil)
log.Debugf("[baidu] get uinfo: %s", string(res))
- return err
+ if err != nil {
+ return err
+ }
+ d.vipType = utils.Json.Get(res, "vip_type").ToInt()
+ return nil
}
func (d *BaiduNetdisk) Drop(ctx context.Context) error {
@@ -81,7 +82,7 @@ func (d *BaiduNetdisk) Link(ctx context.Context, file model.Obj, args model.Link
func (d *BaiduNetdisk) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
var newDir File
- _, err := d.create(stdpath.Join(parentDir.GetPath(), dirName), 0, 1, "", "", &newDir)
+ _, err := d.create(stdpath.Join(parentDir.GetPath(), dirName), 0, 1, "", "", &newDir, 0, 0)
if err != nil {
return nil, err
}
@@ -147,28 +148,50 @@ func (d *BaiduNetdisk) Remove(ctx context.Context, obj model.Obj) error {
return err
}
+func (d *BaiduNetdisk) PutRapid(ctx context.Context, dstDir model.Obj, stream model.FileStreamer) (model.Obj, error) {
+ contentMd5 := stream.GetHash().GetHash(utils.MD5)
+ if len(contentMd5) < utils.MD5.Width {
+ return nil, errors.New("invalid hash")
+ }
+
+ streamSize := stream.GetSize()
+ path := stdpath.Join(dstDir.GetPath(), stream.GetName())
+ mtime := stream.ModTime().Unix()
+ ctime := stream.CreateTime().Unix()
+ blockList, _ := utils.Json.MarshalToString([]string{contentMd5})
+
+ var newFile File
+ _, err := d.create(path, streamSize, 0, "", blockList, &newFile, mtime, ctime)
+ if err != nil {
+ return nil, err
+ }
+ return fileToObj(newFile), nil
+}
+
func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
- tempFile, err := utils.CreateTempFile(stream.GetReadCloser(), stream.GetSize())
+ // rapid upload
+ if newObj, err := d.PutRapid(ctx, dstDir, stream); err == nil {
+ return newObj, nil
+ }
+
+ tempFile, err := stream.CacheFullInTempFile()
if err != nil {
return nil, err
}
- defer func() {
- _ = tempFile.Close()
- _ = os.Remove(tempFile.Name())
- }()
streamSize := stream.GetSize()
- count := int(math.Max(math.Ceil(float64(streamSize)/float64(DefaultSliceSize)), 1))
- lastBlockSize := streamSize % DefaultSliceSize
+ sliceSize := d.getSliceSize()
+ count := int(math.Max(math.Ceil(float64(streamSize)/float64(sliceSize)), 1))
+ lastBlockSize := streamSize % sliceSize
if streamSize > 0 && lastBlockSize == 0 {
- lastBlockSize = DefaultSliceSize
+ lastBlockSize = sliceSize
}
//cal md5 for first 256k data
const SliceSize int64 = 256 * 1024
// cal md5
blockList := make([]string, 0, count)
- byteSize := DefaultSliceSize
+ byteSize := sliceSize
fileMd5H := md5.New()
sliceMd5H := md5.New()
sliceMd5H2 := md5.New()
@@ -191,23 +214,31 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
contentMd5 := hex.EncodeToString(fileMd5H.Sum(nil))
sliceMd5 := hex.EncodeToString(sliceMd5H2.Sum(nil))
blockListStr, _ := utils.Json.MarshalToString(blockList)
-
- rawPath := stdpath.Join(dstDir.GetPath(), stream.GetName())
- path := encodeURIComponent(rawPath)
+ path := stdpath.Join(dstDir.GetPath(), stream.GetName())
+ mtime := stream.ModTime().Unix()
+ ctime := stream.CreateTime().Unix()
// step.1 预上传
// 尝试获取之前的进度
precreateResp, ok := base.GetUploadProgress[*PrecreateResp](d, d.AccessToken, contentMd5)
if !ok {
- data := fmt.Sprintf("path=%s&size=%d&isdir=0&autoinit=1&rtype=3&block_list=%s&content-md5=%s&slice-md5=%s",
- path, streamSize,
- blockListStr,
- contentMd5, sliceMd5)
params := map[string]string{
"method": "precreate",
}
- log.Debugf("[baidu_netdisk] precreate data: %s", data)
- _, err = d.post("/xpan/file", params, data, &precreateResp)
+ form := map[string]string{
+ "path": path,
+ "size": strconv.FormatInt(streamSize, 10),
+ "isdir": "0",
+ "autoinit": "1",
+ "rtype": "3",
+ "block_list": blockListStr,
+ "content-md5": contentMd5,
+ "slice-md5": sliceMd5,
+ }
+ joinTime(form, ctime, mtime)
+
+ log.Debugf("[baidu_netdisk] precreate data: %s", form)
+ _, err = d.postForm("/xpan/file", params, form, &precreateResp)
if err != nil {
return nil, err
}
@@ -230,7 +261,7 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
break
}
- i, partseq, offset, byteSize := i, partseq, int64(partseq)*DefaultSliceSize, DefaultSliceSize
+ i, partseq, offset, byteSize := i, partseq, int64(partseq)*sliceSize, sliceSize
if partseq+1 == count {
byteSize = lastBlockSize
}
@@ -247,7 +278,7 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
if err != nil {
return err
}
- up(int(threadG.Success()) * 100 / len(precreateResp.BlockList))
+ up(float64(threadG.Success()) * 100 / float64(len(precreateResp.BlockList)))
precreateResp.BlockList[i] = -1
return nil
})
@@ -263,12 +294,13 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
// step.3 创建文件
var newFile File
- _, err = d.create(rawPath, streamSize, 0, precreateResp.Uploadid, blockListStr, &newFile)
+ _, err = d.create(path, streamSize, 0, precreateResp.Uploadid, blockListStr, &newFile, mtime, ctime)
if err != nil {
return nil, err
}
return fileToObj(newFile), nil
}
+
func (d *BaiduNetdisk) uploadSlice(ctx context.Context, params map[string]string, fileName string, file io.Reader) error {
res, err := base.RestyClient.R().
SetContext(ctx).
diff --git a/drivers/baidu_netdisk/meta.go b/drivers/baidu_netdisk/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/baidu_netdisk/types.go b/drivers/baidu_netdisk/types.go
old mode 100644
new mode 100755
index 4effe70f..cbec0bcf
--- a/drivers/baidu_netdisk/types.go
+++ b/drivers/baidu_netdisk/types.go
@@ -40,11 +40,11 @@ type File struct {
Isdir int `json:"isdir"`
// list resp
- //ServerCtime int64 `json:"server_ctime"`
+ ServerCtime int64 `json:"server_ctime"`
ServerMtime int64 `json:"server_mtime"`
- //ServerAtime int64 `json:"server_atime"`
- //LocalCtime int64 `json:"local_ctime"`
- //LocalMtime int64 `json:"local_mtime"`
+ LocalMtime int64 `json:"local_mtime"`
+ LocalCtime int64 `json:"local_ctime"`
+ //ServerAtime int64 `json:"server_atime"` `
// only create and precreate resp
Ctime int64 `json:"ctime"`
@@ -55,8 +55,11 @@ func fileToObj(f File) *model.ObjThumb {
if f.ServerFilename == "" {
f.ServerFilename = path.Base(f.Path)
}
- if f.ServerMtime == 0 {
- f.ServerMtime = int64(f.Mtime)
+ if f.LocalCtime == 0 {
+ f.LocalCtime = f.Ctime
+ }
+ if f.LocalMtime == 0 {
+ f.LocalMtime = f.Mtime
}
return &model.ObjThumb{
Object: model.Object{
@@ -64,8 +67,12 @@ func fileToObj(f File) *model.ObjThumb {
Path: f.Path,
Name: f.ServerFilename,
Size: f.Size,
- Modified: time.Unix(f.ServerMtime, 0),
+ Modified: time.Unix(f.LocalMtime, 0),
+ Ctime: time.Unix(f.LocalCtime, 0),
IsFolder: f.Isdir == 1,
+
+ // 直接获取的MD5是错误的
+ // HashInfo: utils.NewHashInfo(utils.MD5, f.Md5),
},
Thumbnail: model.Thumbnail{Thumbnail: f.Thumbs.Url3},
}
diff --git a/drivers/baidu_netdisk/util.go b/drivers/baidu_netdisk/util.go
old mode 100644
new mode 100755
index 81b798e5..6c51156c
--- a/drivers/baidu_netdisk/util.go
+++ b/drivers/baidu_netdisk/util.go
@@ -1,11 +1,10 @@
package baidu_netdisk
import (
+ "errors"
"fmt"
"net/http"
- "net/url"
"strconv"
- "strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
@@ -22,7 +21,7 @@ import (
func (d *BaiduNetdisk) refreshToken() error {
err := d._refreshToken()
- if err != nil && err == errs.EmptyToken {
+ if err != nil && errors.Is(err, errs.EmptyToken) {
err = d._refreshToken()
}
return err
@@ -74,21 +73,16 @@ func (d *BaiduNetdisk) request(furl string, method string, callback base.ReqCall
log.Info("refreshing baidu_netdisk token.")
err2 := d.refreshToken()
if err2 != nil {
- return err2
+ return retry.Unrecoverable(err2)
}
}
-
- err2 := fmt.Errorf("req: [%s] ,errno: %d, refer to https://pan.baidu.com/union/doc/", furl, errno)
- if !utils.SliceContains([]int{2}, errno) {
- err2 = retry.Unrecoverable(err2)
- }
- return err2
+ return fmt.Errorf("req: [%s] ,errno: %d, refer to https://pan.baidu.com/union/doc/", furl, errno)
}
result = res.Body()
return nil
},
retry.LastErrorOnly(true),
- retry.Attempts(5),
+ retry.Attempts(3),
retry.Delay(time.Second),
retry.DelayType(retry.BackOffDelay))
return result, err
@@ -100,10 +94,10 @@ func (d *BaiduNetdisk) get(pathname string, params map[string]string, resp inter
}, resp)
}
-func (d *BaiduNetdisk) post(pathname string, params map[string]string, data interface{}, resp interface{}) ([]byte, error) {
+func (d *BaiduNetdisk) postForm(pathname string, params map[string]string, form map[string]string, resp interface{}) ([]byte, error) {
return d.request("https://pan.baidu.com/rest/2.0"+pathname, http.MethodPost, func(req *resty.Request) {
req.SetQueryParams(params)
- req.SetBody(data)
+ req.SetFormData(form)
}, resp)
}
@@ -158,6 +152,9 @@ func (d *BaiduNetdisk) linkOfficial(file model.Obj, args model.LinkArgs) (*model
//if res.StatusCode() == 302 {
u = res.Header().Get("location")
//}
+
+ updateObjMd5(file, "pan.baidu.com", u)
+
return &model.Link{
URL: u,
Header: http.Header{
@@ -180,6 +177,9 @@ func (d *BaiduNetdisk) linkCrack(file model.Obj, args model.LinkArgs) (*model.Li
if err != nil {
return nil, err
}
+
+ updateObjMd5(file, d.CustomCrackUA, resp.Info[0].Dlink)
+
return &model.Link{
URL: resp.Info[0].Dlink,
Header: http.Header{
@@ -194,23 +194,73 @@ func (d *BaiduNetdisk) manage(opera string, filelist any) ([]byte, error) {
"opera": opera,
}
marshal, _ := utils.Json.MarshalToString(filelist)
- data := fmt.Sprintf("async=0&filelist=%s&ondup=fail", marshal)
- return d.post("/xpan/file", params, data, nil)
+ return d.postForm("/xpan/file", params, map[string]string{
+ "async": "0",
+ "filelist": marshal,
+ "ondup": "fail",
+ }, nil)
}
-func (d *BaiduNetdisk) create(path string, size int64, isdir int, uploadid, block_list string, resp any) ([]byte, error) {
+func (d *BaiduNetdisk) create(path string, size int64, isdir int, uploadid, block_list string, resp any, mtime, ctime int64) ([]byte, error) {
params := map[string]string{
"method": "create",
}
- data := fmt.Sprintf("path=%s&size=%d&isdir=%d&rtype=3", encodeURIComponent(path), size, isdir)
+ form := map[string]string{
+ "path": path,
+ "size": strconv.FormatInt(size, 10),
+ "isdir": strconv.Itoa(isdir),
+ "rtype": "3",
+ }
+ if mtime != 0 && ctime != 0 {
+ joinTime(form, ctime, mtime)
+ }
+
if uploadid != "" {
- data += fmt.Sprintf("&uploadid=%s&block_list=%s", uploadid, block_list)
+ form["uploadid"] = uploadid
}
- return d.post("/xpan/file", params, data, resp)
+ if block_list != "" {
+ form["block_list"] = block_list
+ }
+ return d.postForm("/xpan/file", params, form, resp)
}
-func encodeURIComponent(str string) string {
- r := url.QueryEscape(str)
- r = strings.ReplaceAll(r, "+", "%20")
- return r
+func joinTime(form map[string]string, ctime, mtime int64) {
+ form["local_mtime"] = strconv.FormatInt(mtime, 10)
+ form["local_ctime"] = strconv.FormatInt(ctime, 10)
}
+
+func updateObjMd5(obj model.Obj, userAgent, u string) {
+ object := model.GetRawObject(obj)
+ if object != nil {
+ req, _ := http.NewRequest(http.MethodHead, u, nil)
+ req.Header.Add("User-Agent", userAgent)
+ resp, _ := base.HttpClient.Do(req)
+ if resp != nil {
+ contentMd5 := resp.Header.Get("Content-Md5")
+ object.HashInfo = utils.NewHashInfo(utils.MD5, contentMd5)
+ }
+ }
+}
+
+const (
+ DefaultSliceSize int64 = 4 * utils.MB
+ VipSliceSize = 16 * utils.MB
+ SVipSliceSize = 32 * utils.MB
+)
+
+func (d *BaiduNetdisk) getSliceSize() int64 {
+ switch d.vipType {
+ case 1:
+ return VipSliceSize
+ case 2:
+ return SVipSliceSize
+ default:
+ return DefaultSliceSize
+ }
+}
+
+// func encodeURIComponent(str string) string {
+// r := url.QueryEscape(str)
+// r = strings.ReplaceAll(r, "+", "%20")
+// return r
+// }
diff --git a/drivers/baidu_photo/driver.go b/drivers/baidu_photo/driver.go
old mode 100644
new mode 100755
index 3ff3bc6e..c29bc110
--- a/drivers/baidu_photo/driver.go
+++ b/drivers/baidu_photo/driver.go
@@ -8,7 +8,6 @@ import (
"fmt"
"io"
"math"
- "os"
"regexp"
"strconv"
"strings"
@@ -228,15 +227,14 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
return nil, fmt.Errorf("file size cannot be zero")
}
+ // TODO:
+ // 暂时没有找到妙传方式
+
// 需要获取完整文件md5,必须支持 io.Seek
- tempFile, err := utils.CreateTempFile(stream.GetReadCloser(), stream.GetSize())
+ tempFile, err := stream.CacheFullInTempFile()
if err != nil {
return nil, err
}
- defer func() {
- _ = tempFile.Close()
- _ = os.Remove(tempFile.Name())
- }()
const DEFAULT int64 = 1 << 22
const SliceSize int64 = 1 << 18
@@ -331,7 +329,7 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
if err != nil {
return err
}
- up(int(threadG.Success()) * 100 / len(precreateResp.BlockList))
+ up(float64(threadG.Success()) * 100 / float64(len(precreateResp.BlockList)))
precreateResp.BlockList[i] = -1
return nil
})
diff --git a/drivers/baidu_photo/help.go b/drivers/baidu_photo/help.go
old mode 100644
new mode 100755
index d689f81d..40588ee9
--- a/drivers/baidu_photo/help.go
+++ b/drivers/baidu_photo/help.go
@@ -61,12 +61,12 @@ func moveFileToAlbumFile(file *File, album *Album, uk int64) *AlbumFile {
func renameAlbum(album *Album, newName string) *Album {
return &Album{
- AlbumID: album.AlbumID,
- Tid: album.Tid,
- JoinTime: album.JoinTime,
- CreateTime: album.CreateTime,
- Title: newName,
- Mtime: time.Now().Unix(),
+ AlbumID: album.AlbumID,
+ Tid: album.Tid,
+ JoinTime: album.JoinTime,
+ CreationTime: album.CreationTime,
+ Title: newName,
+ Mtime: time.Now().Unix(),
}
}
diff --git a/drivers/baidu_photo/meta.go b/drivers/baidu_photo/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/baidu_photo/types.go b/drivers/baidu_photo/types.go
old mode 100644
new mode 100755
index 7ac66570..2bbacd30
--- a/drivers/baidu_photo/types.go
+++ b/drivers/baidu_photo/types.go
@@ -4,6 +4,8 @@ import (
"fmt"
"time"
+ "github.com/alist-org/alist/v3/pkg/utils"
+
"github.com/alist-org/alist/v3/internal/model"
)
@@ -51,22 +53,17 @@ type (
Ctime int64 `json:"ctime"` // 创建时间 s
Mtime int64 `json:"mtime"` // 修改时间 s
Thumburl []string `json:"thumburl"`
-
- parseTime *time.Time
+ Md5 string `json:"md5"`
}
)
-func (c *File) GetSize() int64 { return c.Size }
-func (c *File) GetName() string { return getFileName(c.Path) }
-func (c *File) ModTime() time.Time {
- if c.parseTime == nil {
- c.parseTime = toTime(c.Mtime)
- }
- return *c.parseTime
-}
-func (c *File) IsDir() bool { return false }
-func (c *File) GetID() string { return "" }
-func (c *File) GetPath() string { return "" }
+func (c *File) GetSize() int64 { return c.Size }
+func (c *File) GetName() string { return getFileName(c.Path) }
+func (c *File) CreateTime() time.Time { return time.Unix(c.Ctime, 0) }
+func (c *File) ModTime() time.Time { return time.Unix(c.Mtime, 0) }
+func (c *File) IsDir() bool { return false }
+func (c *File) GetID() string { return "" }
+func (c *File) GetPath() string { return "" }
func (c *File) Thumb() string {
if len(c.Thumburl) > 0 {
return c.Thumburl[0]
@@ -74,6 +71,10 @@ func (c *File) Thumb() string {
return ""
}
+func (c *File) GetHash() utils.HashInfo {
+ return utils.NewHashInfo(utils.MD5, c.Md5)
+}
+
/*相册部分*/
type (
AlbumListResp struct {
@@ -84,12 +85,12 @@ type (
}
Album struct {
- AlbumID string `json:"album_id"`
- Tid int64 `json:"tid"`
- Title string `json:"title"`
- JoinTime int64 `json:"join_time"`
- CreateTime int64 `json:"create_time"`
- Mtime int64 `json:"mtime"`
+ AlbumID string `json:"album_id"`
+ Tid int64 `json:"tid"`
+ Title string `json:"title"`
+ JoinTime int64 `json:"join_time"`
+ CreationTime int64 `json:"create_time"`
+ Mtime int64 `json:"mtime"`
parseTime *time.Time
}
@@ -109,17 +110,17 @@ type (
}
)
-func (a *Album) GetSize() int64 { return 0 }
-func (a *Album) GetName() string { return a.Title }
-func (a *Album) ModTime() time.Time {
- if a.parseTime == nil {
- a.parseTime = toTime(a.Mtime)
- }
- return *a.parseTime
+func (a *Album) GetHash() utils.HashInfo {
+ return utils.HashInfo{}
}
-func (a *Album) IsDir() bool { return true }
-func (a *Album) GetID() string { return "" }
-func (a *Album) GetPath() string { return "" }
+
+func (a *Album) GetSize() int64 { return 0 }
+func (a *Album) GetName() string { return a.Title }
+func (a *Album) CreateTime() time.Time { return time.Unix(a.CreationTime, 0) }
+func (a *Album) ModTime() time.Time { return time.Unix(a.Mtime, 0) }
+func (a *Album) IsDir() bool { return true }
+func (a *Album) GetID() string { return "" }
+func (a *Album) GetPath() string { return "" }
type (
CopyFileResp struct {
diff --git a/drivers/baidu_photo/utils.go b/drivers/baidu_photo/utils.go
old mode 100644
new mode 100755
diff --git a/drivers/baidu_share/driver.go b/drivers/baidu_share/driver.go
old mode 100644
new mode 100755
diff --git a/drivers/baidu_share/meta.go b/drivers/baidu_share/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/baidu_share/types.go b/drivers/baidu_share/types.go
old mode 100644
new mode 100755
diff --git a/drivers/baidu_share/util.go b/drivers/baidu_share/util.go
old mode 100644
new mode 100755
diff --git a/drivers/base/client.go b/drivers/base/client.go
old mode 100644
new mode 100755
diff --git a/drivers/base/types.go b/drivers/base/types.go
old mode 100644
new mode 100755
diff --git a/drivers/base/upload.go b/drivers/base/upload.go
old mode 100644
new mode 100755
diff --git a/drivers/base/util.go b/drivers/base/util.go
old mode 100644
new mode 100755
diff --git a/drivers/chaoxing/driver.go b/drivers/chaoxing/driver.go
new file mode 100755
index 00000000..143235fa
--- /dev/null
+++ b/drivers/chaoxing/driver.go
@@ -0,0 +1,297 @@
+package chaoxing
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "mime/multipart"
+ "net/http"
+ "net/url"
+ "strings"
+ "time"
+
+ "github.com/alist-org/alist/v3/internal/driver"
+ "github.com/alist-org/alist/v3/internal/errs"
+ "github.com/alist-org/alist/v3/internal/model"
+ "github.com/alist-org/alist/v3/internal/op"
+ "github.com/alist-org/alist/v3/pkg/cron"
+ "github.com/alist-org/alist/v3/pkg/utils"
+ "github.com/go-resty/resty/v2"
+ "google.golang.org/appengine/log"
+)
+
+type ChaoXing struct {
+ model.Storage
+ Addition
+ cron *cron.Cron
+ config driver.Config
+ conf Conf
+}
+
+func (d *ChaoXing) Config() driver.Config {
+ return d.config
+}
+
+func (d *ChaoXing) GetAddition() driver.Additional {
+ return &d.Addition
+}
+
+func (d *ChaoXing) refreshCookie() error {
+ cookie, err := d.Login()
+ if err != nil {
+ d.Status = err.Error()
+ op.MustSaveDriverStorage(d)
+ return nil
+ }
+ d.Addition.Cookie = cookie
+ op.MustSaveDriverStorage(d)
+ return nil
+}
+
+func (d *ChaoXing) Init(ctx context.Context) error {
+ err := d.refreshCookie()
+ if err != nil {
+ log.Errorf(ctx, err.Error())
+ }
+ d.cron = cron.NewCron(time.Hour * 12)
+ d.cron.Do(func() {
+ err = d.refreshCookie()
+ if err != nil {
+ log.Errorf(ctx, err.Error())
+ }
+ })
+ return nil
+}
+
+func (d *ChaoXing) Drop(ctx context.Context) error {
+ d.cron.Stop()
+ return nil
+}
+
+func (d *ChaoXing) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
+ files, err := d.GetFiles(dir.GetID())
+ if err != nil {
+ return nil, err
+ }
+ return utils.SliceConvert(files, func(src File) (model.Obj, error) {
+ return fileToObj(src), nil
+ })
+}
+
+func (d *ChaoXing) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
+ var resp DownResp
+ ua := d.conf.ua
+ fileId := strings.Split(file.GetID(), "$")[1]
+ _, err := d.requestDownload("/screen/note_note/files/status/"+fileId, http.MethodPost, func(req *resty.Request) {
+ req.SetHeader("User-Agent", ua)
+ }, &resp)
+ if err != nil {
+ return nil, err
+ }
+ u := resp.Download
+ return &model.Link{
+ URL: u,
+ Header: http.Header{
+ "Cookie": []string{d.Cookie},
+ "Referer": []string{d.conf.referer},
+ "User-Agent": []string{ua},
+ },
+ Concurrency: 2,
+ PartSize: 10 * utils.MB,
+ }, nil
+}
+
+func (d *ChaoXing) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
+ query := map[string]string{
+ "bbsid": d.Addition.Bbsid,
+ "name": dirName,
+ "pid": parentDir.GetID(),
+ }
+ var resp ListFileResp
+ _, err := d.request("/pc/resource/addResourceFolder", http.MethodGet, func(req *resty.Request) {
+ req.SetQueryParams(query)
+ }, &resp)
+ if err != nil {
+ return err
+ }
+ if resp.Result != 1 {
+ msg := fmt.Sprintf("error:%s", resp.Msg)
+ return errors.New(msg)
+ }
+ return nil
+}
+
+func (d *ChaoXing) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
+ query := map[string]string{
+ "bbsid": d.Addition.Bbsid,
+ "folderIds": srcObj.GetID(),
+ "targetId": dstDir.GetID(),
+ }
+ if !srcObj.IsDir() {
+ query = map[string]string{
+ "bbsid": d.Addition.Bbsid,
+ "recIds": strings.Split(srcObj.GetID(), "$")[0],
+ "targetId": dstDir.GetID(),
+ }
+ }
+ var resp ListFileResp
+ _, err := d.request("/pc/resource/moveResource", http.MethodGet, func(req *resty.Request) {
+ req.SetQueryParams(query)
+ }, &resp)
+ if err != nil {
+ return err
+ }
+ if !resp.Status {
+ msg := fmt.Sprintf("error:%s", resp.Msg)
+ return errors.New(msg)
+ }
+ return nil
+}
+
+func (d *ChaoXing) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
+ query := map[string]string{
+ "bbsid": d.Addition.Bbsid,
+ "folderId": srcObj.GetID(),
+ "name": newName,
+ }
+ path := "/pc/resource/updateResourceFolderName"
+ if !srcObj.IsDir() {
+ // path = "/pc/resource/updateResourceFileName"
+ // query = map[string]string{
+ // "bbsid": d.Addition.Bbsid,
+ // "recIds": strings.Split(srcObj.GetID(), "$")[0],
+ // "name": newName,
+ // }
+ return errors.New("此网盘不支持修改文件名")
+ }
+ var resp ListFileResp
+ _, err := d.request(path, http.MethodGet, func(req *resty.Request) {
+ req.SetQueryParams(query)
+ }, &resp)
+ if err != nil {
+ return err
+ }
+ if resp.Result != 1 {
+ msg := fmt.Sprintf("error:%s", resp.Msg)
+ return errors.New(msg)
+ }
+ return nil
+}
+
+func (d *ChaoXing) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
+ // TODO copy obj, optional
+ return errs.NotImplement
+}
+
+func (d *ChaoXing) Remove(ctx context.Context, obj model.Obj) error {
+ query := map[string]string{
+ "bbsid": d.Addition.Bbsid,
+ "folderIds": obj.GetID(),
+ }
+ path := "/pc/resource/deleteResourceFolder"
+ var resp ListFileResp
+ if !obj.IsDir() {
+ path = "/pc/resource/deleteResourceFile"
+ query = map[string]string{
+ "bbsid": d.Addition.Bbsid,
+ "recIds": strings.Split(obj.GetID(), "$")[0],
+ }
+ }
+ _, err := d.request(path, http.MethodGet, func(req *resty.Request) {
+ req.SetQueryParams(query)
+ }, &resp)
+ if err != nil {
+ return err
+ }
+ if resp.Result != 1 {
+ msg := fmt.Sprintf("error:%s", resp.Msg)
+ return errors.New(msg)
+ }
+ return nil
+}
+
+func (d *ChaoXing) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
+ var resp UploadDataRsp
+ _, err := d.request("https://noteyd.chaoxing.com/pc/files/getUploadConfig", http.MethodGet, func(req *resty.Request) {
+ }, &resp)
+ if err != nil {
+ return err
+ }
+ if resp.Result != 1 {
+ return errors.New("get upload data error")
+ }
+ body := &bytes.Buffer{}
+ writer := multipart.NewWriter(body)
+ filePart, err := writer.CreateFormFile("file", stream.GetName())
+ if err != nil {
+ return err
+ }
+ _, err = io.Copy(filePart, stream)
+ if err != nil {
+ return err
+ }
+ err = writer.WriteField("_token", resp.Msg.Token)
+ if err != nil {
+ return err
+ }
+ err = writer.WriteField("puid", fmt.Sprintf("%d", resp.Msg.Puid))
+ if err != nil {
+ fmt.Println("Error writing param2 to request body:", err)
+ return err
+ }
+ err = writer.Close()
+ if err != nil {
+ return err
+ }
+ req, err := http.NewRequest("POST", "https://pan-yz.chaoxing.com/upload", body)
+ if err != nil {
+ return err
+ }
+ req.Header.Set("Content-Type", writer.FormDataContentType())
+ req.Header.Set("Content-Length", fmt.Sprintf("%d", body.Len()))
+ resps, err := http.DefaultClient.Do(req)
+ if err != nil {
+ return err
+ }
+ defer resps.Body.Close()
+ bodys, err := io.ReadAll(resps.Body)
+ if err != nil {
+ return err
+ }
+ var fileRsp UploadFileDataRsp
+ err = json.Unmarshal(bodys, &fileRsp)
+ if err != nil {
+ return err
+ }
+ if fileRsp.Msg != "success" {
+ return errors.New(fileRsp.Msg)
+ }
+ uploadDoneParam := UploadDoneParam{Key: fileRsp.ObjectID, Cataid: "100000019", Param: fileRsp.Data}
+ params, err := json.Marshal(uploadDoneParam)
+ if err != nil {
+ return err
+ }
+ query := map[string]string{
+ "bbsid": d.Addition.Bbsid,
+ "pid": dstDir.GetID(),
+ "type": "yunpan",
+ "params": url.QueryEscape("[" + string(params) + "]"),
+ }
+ var respd ListFileResp
+ _, err = d.request("/pc/resource/addResource", http.MethodGet, func(req *resty.Request) {
+ req.SetQueryParams(query)
+ }, &respd)
+ if err != nil {
+ return err
+ }
+ if respd.Result != 1 {
+ msg := fmt.Sprintf("error:%v", resp.Msg)
+ return errors.New(msg)
+ }
+ return nil
+}
+
+var _ driver.Driver = (*ChaoXing)(nil)
diff --git a/drivers/chaoxing/meta.go b/drivers/chaoxing/meta.go
new file mode 100755
index 00000000..c0500629
--- /dev/null
+++ b/drivers/chaoxing/meta.go
@@ -0,0 +1,47 @@
+package chaoxing
+
+import (
+ "github.com/alist-org/alist/v3/internal/driver"
+ "github.com/alist-org/alist/v3/internal/op"
+)
+
+// 此程序挂载的是超星小组网盘,需要代理才能使用;
+// 登录超星后进入个人空间,进入小组,新建小组,点击进去。
+// url中就有bbsid的参数,系统限制单文件大小2G,没有总容量限制
+type Addition struct {
+ // 超星用户名及密码
+ UserName string `json:"user_name" required:"true"`
+ Password string `json:"password" required:"true"`
+ // 从自己新建的小组url里获取
+ Bbsid string `json:"bbsid" required:"true"`
+ driver.RootID
+ // 可不填,程序会自动登录获取
+ Cookie string `json:"cookie"`
+}
+
+type Conf struct {
+ ua string
+ referer string
+ api string
+ DowloadApi string
+}
+
+func init() {
+ op.RegisterDriver(func() driver.Driver {
+ return &ChaoXing{
+ config: driver.Config{
+ Name: "ChaoXingGroupDrive",
+ OnlyProxy: true,
+ OnlyLocal: false,
+ DefaultRoot: "-1",
+ NoOverwriteUpload: true,
+ },
+ conf: Conf{
+ ua: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) quark-cloud-drive/2.5.20 Chrome/100.0.4896.160 Electron/18.3.5.4-b478491100 Safari/537.36 Channel/pckk_other_ch",
+ referer: "https://chaoxing.com/",
+ api: "https://groupweb.chaoxing.com",
+ DowloadApi: "https://noteyd.chaoxing.com",
+ },
+ }
+ })
+}
diff --git a/drivers/chaoxing/types.go b/drivers/chaoxing/types.go
new file mode 100755
index 00000000..777d4043
--- /dev/null
+++ b/drivers/chaoxing/types.go
@@ -0,0 +1,262 @@
+package chaoxing
+
+import (
+ "fmt"
+ "time"
+
+ "github.com/alist-org/alist/v3/internal/model"
+)
+
+type Resp struct {
+ Result int `json:"result"`
+}
+
+type UserAuth struct {
+ GroupAuth struct {
+ AddData int `json:"addData"`
+ AddDataFolder int `json:"addDataFolder"`
+ AddLebel int `json:"addLebel"`
+ AddManager int `json:"addManager"`
+ AddMem int `json:"addMem"`
+ AddTopicFolder int `json:"addTopicFolder"`
+ AnonymousAddReply int `json:"anonymousAddReply"`
+ AnonymousAddTopic int `json:"anonymousAddTopic"`
+ BatchOperation int `json:"batchOperation"`
+ DelData int `json:"delData"`
+ DelDataFolder int `json:"delDataFolder"`
+ DelMem int `json:"delMem"`
+ DelTopicFolder int `json:"delTopicFolder"`
+ Dismiss int `json:"dismiss"`
+ ExamEnc string `json:"examEnc"`
+ GroupChat int `json:"groupChat"`
+ IsShowCircleChatButton int `json:"isShowCircleChatButton"`
+ IsShowCircleCloudButton int `json:"isShowCircleCloudButton"`
+ IsShowCompanyButton int `json:"isShowCompanyButton"`
+ Join int `json:"join"`
+ MemberShowRankSet int `json:"memberShowRankSet"`
+ ModifyDataFolder int `json:"modifyDataFolder"`
+ ModifyExpose int `json:"modifyExpose"`
+ ModifyName int `json:"modifyName"`
+ ModifyShowPic int `json:"modifyShowPic"`
+ ModifyTopicFolder int `json:"modifyTopicFolder"`
+ ModifyVisibleState int `json:"modifyVisibleState"`
+ OnlyMgrScoreSet int `json:"onlyMgrScoreSet"`
+ Quit int `json:"quit"`
+ SendNotice int `json:"sendNotice"`
+ ShowActivityManage int `json:"showActivityManage"`
+ ShowActivitySet int `json:"showActivitySet"`
+ ShowAttentionSet int `json:"showAttentionSet"`
+ ShowAutoClearStatus int `json:"showAutoClearStatus"`
+ ShowBarcode int `json:"showBarcode"`
+ ShowChatRoomSet int `json:"showChatRoomSet"`
+ ShowCircleActivitySet int `json:"showCircleActivitySet"`
+ ShowCircleSet int `json:"showCircleSet"`
+ ShowCmem int `json:"showCmem"`
+ ShowDataFolder int `json:"showDataFolder"`
+ ShowDelReason int `json:"showDelReason"`
+ ShowForward int `json:"showForward"`
+ ShowGroupChat int `json:"showGroupChat"`
+ ShowGroupChatSet int `json:"showGroupChatSet"`
+ ShowGroupSquareSet int `json:"showGroupSquareSet"`
+ ShowLockAddSet int `json:"showLockAddSet"`
+ ShowManager int `json:"showManager"`
+ ShowManagerIdentitySet int `json:"showManagerIdentitySet"`
+ ShowNeedDelReasonSet int `json:"showNeedDelReasonSet"`
+ ShowNotice int `json:"showNotice"`
+ ShowOnlyManagerReplySet int `json:"showOnlyManagerReplySet"`
+ ShowRank int `json:"showRank"`
+ ShowRank2 int `json:"showRank2"`
+ ShowRecycleBin int `json:"showRecycleBin"`
+ ShowReplyByClass int `json:"showReplyByClass"`
+ ShowReplyNeedCheck int `json:"showReplyNeedCheck"`
+ ShowSignbanSet int `json:"showSignbanSet"`
+ ShowSpeechSet int `json:"showSpeechSet"`
+ ShowTopicCheck int `json:"showTopicCheck"`
+ ShowTopicNeedCheck int `json:"showTopicNeedCheck"`
+ ShowTransferSet int `json:"showTransferSet"`
+ } `json:"groupAuth"`
+ OperationAuth struct {
+ Add int `json:"add"`
+ AddTopicToFolder int `json:"addTopicToFolder"`
+ ChoiceSet int `json:"choiceSet"`
+ DelTopicFromFolder int `json:"delTopicFromFolder"`
+ Delete int `json:"delete"`
+ Reply int `json:"reply"`
+ ScoreSet int `json:"scoreSet"`
+ TopSet int `json:"topSet"`
+ Update int `json:"update"`
+ } `json:"operationAuth"`
+}
+
+type File struct {
+ Cataid int `json:"cataid"`
+ Cfid int `json:"cfid"`
+ Content struct {
+ Cfid int `json:"cfid"`
+ Pid int `json:"pid"`
+ FolderName string `json:"folderName"`
+ ShareType int `json:"shareType"`
+ Preview string `json:"preview"`
+ Filetype string `json:"filetype"`
+ PreviewURL string `json:"previewUrl"`
+ IsImg bool `json:"isImg"`
+ ParentPath string `json:"parentPath"`
+ Icon string `json:"icon"`
+ Suffix string `json:"suffix"`
+ Duration int `json:"duration"`
+ Pantype string `json:"pantype"`
+ Puid int `json:"puid"`
+ Filepath string `json:"filepath"`
+ Crc string `json:"crc"`
+ Isfile bool `json:"isfile"`
+ Residstr string `json:"residstr"`
+ ObjectID string `json:"objectId"`
+ Extinfo string `json:"extinfo"`
+ Thumbnail string `json:"thumbnail"`
+ Creator int `json:"creator"`
+ ResTypeValue int `json:"resTypeValue"`
+ UploadDateFormat string `json:"uploadDateFormat"`
+ DisableOpt bool `json:"disableOpt"`
+ DownPath string `json:"downPath"`
+ Sort int `json:"sort"`
+ Topsort int `json:"topsort"`
+ Restype string `json:"restype"`
+ Size int `json:"size"`
+ UploadDate string `json:"uploadDate"`
+ FileSize string `json:"fileSize"`
+ Name string `json:"name"`
+ FileID string `json:"fileId"`
+ } `json:"content"`
+ CreatorID int `json:"creatorId"`
+ DesID string `json:"des_id"`
+ ID int `json:"id"`
+ Inserttime int64 `json:"inserttime"`
+ Key string `json:"key"`
+ Norder int `json:"norder"`
+ OwnerID int `json:"ownerId"`
+ OwnerType int `json:"ownerType"`
+ Path string `json:"path"`
+ Rid int `json:"rid"`
+ Status int `json:"status"`
+ Topsign int `json:"topsign"`
+}
+
+type ListFileResp struct {
+ Msg string `json:"msg"`
+ Result int `json:"result"`
+ Status bool `json:"status"`
+ UserAuth UserAuth `json:"userAuth"`
+ List []File `json:"list"`
+}
+
+type DownResp struct {
+ Msg string `json:"msg"`
+ Duration int `json:"duration"`
+ Download string `json:"download"`
+ FileStatus string `json:"fileStatus"`
+ URL string `json:"url"`
+ Status bool `json:"status"`
+}
+
+type UploadDataRsp struct {
+ Result int `json:"result"`
+ Msg struct {
+ Puid int `json:"puid"`
+ Token string `json:"token"`
+ } `json:"msg"`
+}
+
+type UploadFileDataRsp struct {
+ Result bool `json:"result"`
+ Msg string `json:"msg"`
+ Crc string `json:"crc"`
+ ObjectID string `json:"objectId"`
+ Resid int64 `json:"resid"`
+ Puid int `json:"puid"`
+ Data struct {
+ DisableOpt bool `json:"disableOpt"`
+ Resid int64 `json:"resid"`
+ Crc string `json:"crc"`
+ Puid int `json:"puid"`
+ Isfile bool `json:"isfile"`
+ Pantype string `json:"pantype"`
+ Size int `json:"size"`
+ Name string `json:"name"`
+ ObjectID string `json:"objectId"`
+ Restype string `json:"restype"`
+ UploadDate time.Time `json:"uploadDate"`
+ ModifyDate time.Time `json:"modifyDate"`
+ UploadDateFormat string `json:"uploadDateFormat"`
+ Residstr string `json:"residstr"`
+ Suffix string `json:"suffix"`
+ Preview string `json:"preview"`
+ Thumbnail string `json:"thumbnail"`
+ Creator int `json:"creator"`
+ Duration int `json:"duration"`
+ IsImg bool `json:"isImg"`
+ PreviewURL string `json:"previewUrl"`
+ Filetype string `json:"filetype"`
+ Filepath string `json:"filepath"`
+ Sort int `json:"sort"`
+ Topsort int `json:"topsort"`
+ ResTypeValue int `json:"resTypeValue"`
+ Extinfo string `json:"extinfo"`
+ } `json:"data"`
+}
+
+type UploadDoneParam struct {
+ Cataid string `json:"cataid"`
+ Key string `json:"key"`
+ Param struct {
+ DisableOpt bool `json:"disableOpt"`
+ Resid int64 `json:"resid"`
+ Crc string `json:"crc"`
+ Puid int `json:"puid"`
+ Isfile bool `json:"isfile"`
+ Pantype string `json:"pantype"`
+ Size int `json:"size"`
+ Name string `json:"name"`
+ ObjectID string `json:"objectId"`
+ Restype string `json:"restype"`
+ UploadDate time.Time `json:"uploadDate"`
+ ModifyDate time.Time `json:"modifyDate"`
+ UploadDateFormat string `json:"uploadDateFormat"`
+ Residstr string `json:"residstr"`
+ Suffix string `json:"suffix"`
+ Preview string `json:"preview"`
+ Thumbnail string `json:"thumbnail"`
+ Creator int `json:"creator"`
+ Duration int `json:"duration"`
+ IsImg bool `json:"isImg"`
+ PreviewURL string `json:"previewUrl"`
+ Filetype string `json:"filetype"`
+ Filepath string `json:"filepath"`
+ Sort int `json:"sort"`
+ Topsort int `json:"topsort"`
+ ResTypeValue int `json:"resTypeValue"`
+ Extinfo string `json:"extinfo"`
+ } `json:"param"`
+}
+
+func fileToObj(f File) *model.Object {
+ if len(f.Content.FolderName) > 0 {
+ return &model.Object{
+ ID: fmt.Sprintf("%d", f.ID),
+ Name: f.Content.FolderName,
+ Size: 0,
+ Modified: time.UnixMilli(f.Inserttime),
+ IsFolder: true,
+ }
+ }
+ paserTime, err := time.Parse("2006-01-02 15:04", f.Content.UploadDate)
+ if err != nil {
+ paserTime = time.Now()
+ }
+ return &model.Object{
+ ID: fmt.Sprintf("%d$%s", f.ID, f.Content.FileID),
+ Name: f.Content.Name,
+ Size: int64(f.Content.Size),
+ Modified: paserTime,
+ IsFolder: false,
+ }
+}
diff --git a/drivers/chaoxing/util.go b/drivers/chaoxing/util.go
new file mode 100755
index 00000000..fdbd6653
--- /dev/null
+++ b/drivers/chaoxing/util.go
@@ -0,0 +1,179 @@
+package chaoxing
+
+import (
+ "bytes"
+ "crypto/aes"
+ "crypto/cipher"
+ "encoding/base64"
+ "errors"
+ "fmt"
+ "mime/multipart"
+ "net/http"
+ "strings"
+
+ "github.com/alist-org/alist/v3/drivers/base"
+ "github.com/go-resty/resty/v2"
+)
+
+func (d *ChaoXing) requestDownload(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
+ u := d.conf.DowloadApi + pathname
+ req := base.RestyClient.R()
+ req.SetHeaders(map[string]string{
+ "Cookie": d.Cookie,
+ "Accept": "application/json, text/plain, */*",
+ "Referer": d.conf.referer,
+ })
+ if callback != nil {
+ callback(req)
+ }
+ if resp != nil {
+ req.SetResult(resp)
+ }
+ var e Resp
+ req.SetError(&e)
+ res, err := req.Execute(method, u)
+ if err != nil {
+ return nil, err
+ }
+ return res.Body(), nil
+}
+
+func (d *ChaoXing) request(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
+ u := d.conf.api + pathname
+ if strings.Contains(pathname, "getUploadConfig") {
+ u = pathname
+ }
+ req := base.RestyClient.R()
+ req.SetHeaders(map[string]string{
+ "Cookie": d.Cookie,
+ "Accept": "application/json, text/plain, */*",
+ "Referer": d.conf.referer,
+ })
+ if callback != nil {
+ callback(req)
+ }
+ if resp != nil {
+ req.SetResult(resp)
+ }
+ var e Resp
+ req.SetError(&e)
+ res, err := req.Execute(method, u)
+ if err != nil {
+ return nil, err
+ }
+ return res.Body(), nil
+}
+
+func (d *ChaoXing) GetFiles(parent string) ([]File, error) {
+ files := make([]File, 0)
+ query := map[string]string{
+ "bbsid": d.Addition.Bbsid,
+ "folderId": parent,
+ "recType": "1",
+ }
+ var resp ListFileResp
+ _, err := d.request("/pc/resource/getResourceList", http.MethodGet, func(req *resty.Request) {
+ req.SetQueryParams(query)
+ }, &resp)
+ if err != nil {
+ return nil, err
+ }
+ if resp.Result != 1 {
+ msg := fmt.Sprintf("error code is:%d", resp.Result)
+ return nil, errors.New(msg)
+ }
+ if len(resp.List) > 0 {
+ files = append(files, resp.List...)
+ }
+ querys := map[string]string{
+ "bbsid": d.Addition.Bbsid,
+ "folderId": parent,
+ "recType": "2",
+ }
+ var resps ListFileResp
+ _, err = d.request("/pc/resource/getResourceList", http.MethodGet, func(req *resty.Request) {
+ req.SetQueryParams(querys)
+ }, &resps)
+ if err != nil {
+ return nil, err
+ }
+ if len(resps.List) > 0 {
+ files = append(files, resps.List...)
+ }
+ return files, nil
+}
+
+func EncryptByAES(message, key string) (string, error) {
+ aesKey := []byte(key)
+ plainText := []byte(message)
+ block, err := aes.NewCipher(aesKey)
+ if err != nil {
+ return "", err
+ }
+ iv := aesKey[:aes.BlockSize]
+ mode := cipher.NewCBCEncrypter(block, iv)
+ padding := aes.BlockSize - len(plainText)%aes.BlockSize
+ paddedText := append(plainText, byte(padding))
+ for i := 0; i < padding-1; i++ {
+ paddedText = append(paddedText, byte(padding))
+ }
+ ciphertext := make([]byte, len(paddedText))
+ mode.CryptBlocks(ciphertext, paddedText)
+ encrypted := base64.StdEncoding.EncodeToString(ciphertext)
+ return encrypted, nil
+}
+
+func CookiesToString(cookies []*http.Cookie) string {
+ var cookieStr string
+ for _, cookie := range cookies {
+ cookieStr += cookie.Name + "=" + cookie.Value + "; "
+ }
+ if len(cookieStr) > 2 {
+ cookieStr = cookieStr[:len(cookieStr)-2]
+ }
+ return cookieStr
+}
+
+func (d *ChaoXing) Login() (string, error) {
+ transferKey := "u2oh6Vu^HWe4_AES"
+ body := &bytes.Buffer{}
+ writer := multipart.NewWriter(body)
+ uname, err := EncryptByAES(d.Addition.UserName, transferKey)
+ if err != nil {
+ return "", err
+ }
+ password, err := EncryptByAES(d.Addition.Password, transferKey)
+ if err != nil {
+ return "", err
+ }
+ err = writer.WriteField("uname", uname)
+ if err != nil {
+ return "", err
+ }
+ err = writer.WriteField("password", password)
+ if err != nil {
+ return "", err
+ }
+ err = writer.WriteField("t", "true")
+ if err != nil {
+ return "", err
+ }
+ err = writer.Close()
+ if err != nil {
+ return "", err
+ }
+ // Create the request
+ req, err := http.NewRequest("POST", "https://passport2.chaoxing.com/fanyalogin", body)
+ if err != nil {
+ return "", err
+ }
+ req.Header.Set("Content-Type", writer.FormDataContentType())
+ req.Header.Set("Content-Length", fmt.Sprintf("%d", body.Len()))
+ resp, err := http.DefaultClient.Do(req)
+ if err != nil {
+ return "", err
+ }
+ defer resp.Body.Close()
+ return CookiesToString(resp.Cookies()), nil
+
+}
diff --git a/drivers/cloudreve/driver.go b/drivers/cloudreve/driver.go
old mode 100644
new mode 100755
index 22407d0c..49c2d5f0
--- a/drivers/cloudreve/driver.go
+++ b/drivers/cloudreve/driver.go
@@ -49,7 +49,19 @@ func (d *Cloudreve) List(ctx context.Context, dir model.Obj, args model.ListArgs
}
return utils.SliceConvert(r.Objects, func(src Object) (model.Obj, error) {
- return objectToObj(src), nil
+ thumb, err := d.GetThumb(src)
+ if err != nil {
+ return nil, err
+ }
+ if src.Type == "dir" && d.EnableThumbAndFolderSize {
+ var dprop DirectoryProp
+ err = d.request(http.MethodGet, "/object/property/"+src.Id+"?is_folder=true", nil, &dprop)
+ if err != nil {
+ return nil, err
+ }
+ src.Size = dprop.Size
+ }
+ return objectToObj(src, thumb), nil
})
}
@@ -115,7 +127,7 @@ func (d *Cloudreve) Remove(ctx context.Context, obj model.Obj) error {
}
func (d *Cloudreve) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
- if stream.GetReadCloser() == http.NoBody {
+ if io.ReadCloser(stream) == http.NoBody {
return d.create(ctx, dstDir, stream)
}
var r DirectoryResp
diff --git a/drivers/cloudreve/meta.go b/drivers/cloudreve/meta.go
old mode 100644
new mode 100755
index 3c37c6d0..92c0b9fb
--- a/drivers/cloudreve/meta.go
+++ b/drivers/cloudreve/meta.go
@@ -9,10 +9,12 @@ type Addition struct {
// Usually one of two
driver.RootPath
// define other
- Address string `json:"address" required:"true"`
- Username string `json:"username"`
- Password string `json:"password"`
- Cookie string `json:"cookie"`
+ Address string `json:"address" required:"true"`
+ Username string `json:"username"`
+ Password string `json:"password"`
+ Cookie string `json:"cookie"`
+ CustomUA string `json:"custom_ua"`
+ EnableThumbAndFolderSize bool `json:"enable_thumb_and_folder_size"`
}
var config = driver.Config{
diff --git a/drivers/cloudreve/types.go b/drivers/cloudreve/types.go
old mode 100644
new mode 100755
index 114afd3c..241d993e
--- a/drivers/cloudreve/types.go
+++ b/drivers/cloudreve/types.go
@@ -44,13 +44,20 @@ type Object struct {
SourceEnabled bool `json:"source_enabled"`
}
-func objectToObj(f Object) *model.Object {
- return &model.Object{
- ID: f.Id,
- Name: f.Name,
- Size: int64(f.Size),
- Modified: f.Date,
- IsFolder: f.Type == "dir",
+type DirectoryProp struct {
+ Size int `json:"size"`
+}
+
+func objectToObj(f Object, t model.Thumbnail) *model.ObjThumb {
+ return &model.ObjThumb{
+ Object: model.Object{
+ ID: f.Id,
+ Name: f.Name,
+ Size: int64(f.Size),
+ Modified: f.Date,
+ IsFolder: f.Type == "dir",
+ },
+ Thumbnail: t,
}
}
diff --git a/drivers/cloudreve/util.go b/drivers/cloudreve/util.go
old mode 100644
new mode 100755
index 6a13ac7a..284e3289
--- a/drivers/cloudreve/util.go
+++ b/drivers/cloudreve/util.go
@@ -22,15 +22,18 @@ const loginPath = "/user/session"
func (d *Cloudreve) request(method string, path string, callback base.ReqCallback, out interface{}) error {
u := d.Address + "/api/v3" + path
+ ua := d.CustomUA
+ if ua == "" {
+ ua = base.UserAgent
+ }
req := base.RestyClient.R()
req.SetHeaders(map[string]string{
"Cookie": "cloudreve-session=" + d.Cookie,
"Accept": "application/json, text/plain, */*",
- "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36",
+ "User-Agent": ua,
})
var r Resp
-
req.SetResult(&r)
if callback != nil {
@@ -146,3 +149,26 @@ func convertSrc(obj model.Obj) map[string]interface{} {
m["items"] = items
return m
}
+
+func (d *Cloudreve) GetThumb(file Object) (model.Thumbnail, error) {
+ if !d.Addition.EnableThumbAndFolderSize {
+ return model.Thumbnail{}, nil
+ }
+ ua := d.CustomUA
+ if ua == "" {
+ ua = base.UserAgent
+ }
+ req := base.NoRedirectClient.R()
+ req.SetHeaders(map[string]string{
+ "Cookie": "cloudreve-session=" + d.Cookie,
+ "Accept": "image/webp,image/apng,image/svg+xml,image/*,*/*;q=0.8",
+ "User-Agent": ua,
+ })
+ resp, err := req.Execute(http.MethodGet, d.Address+"/api/v3/file/thumb/"+file.Id)
+ if err != nil {
+ return model.Thumbnail{}, err
+ }
+ return model.Thumbnail{
+ Thumbnail: resp.Header().Get("Location"),
+ }, nil
+}
diff --git a/drivers/crypt/driver.go b/drivers/crypt/driver.go
old mode 100644
new mode 100755
index a2d5a17c..2873465a
--- a/drivers/crypt/driver.go
+++ b/drivers/crypt/driver.go
@@ -3,8 +3,8 @@ package crypt
import (
"context"
"fmt"
+ "github.com/alist-org/alist/v3/internal/stream"
"io"
- "net/http"
stdpath "path"
"regexp"
"strings"
@@ -13,10 +13,10 @@ import (
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/fs"
"github.com/alist-org/alist/v3/internal/model"
- "github.com/alist-org/alist/v3/internal/net"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/pkg/http_range"
"github.com/alist-org/alist/v3/pkg/utils"
+ "github.com/alist-org/alist/v3/server/common"
rcCrypt "github.com/rclone/rclone/backend/crypt"
"github.com/rclone/rclone/fs/config/configmap"
"github.com/rclone/rclone/fs/config/obscure"
@@ -55,6 +55,8 @@ func (d *Crypt) Init(ctx context.Context) error {
if !isCryptExt(d.EncryptedSuffix) {
return fmt.Errorf("EncryptedSuffix is Illegal")
}
+ d.FileNameEncoding = utils.GetNoneEmpty(d.FileNameEncoding, "base64")
+ d.EncryptedSuffix = utils.GetNoneEmpty(d.EncryptedSuffix, ".bin")
op.MustSaveDriverStorage(d)
@@ -72,7 +74,7 @@ func (d *Crypt) Init(ctx context.Context) error {
"password2": p2,
"filename_encryption": d.FileNameEnc,
"directory_name_encryption": d.DirNameEnc,
- "filename_encoding": "base64",
+ "filename_encoding": d.FileNameEncoding,
"suffix": d.EncryptedSuffix,
"pass_bad_blocks": "",
}
@@ -82,7 +84,6 @@ func (d *Crypt) Init(ctx context.Context) error {
}
d.cipher = c
- //c, err := rcCrypt.newCipher(rcCrypt.NameEncryptionStandard, "", "", true, nil)
return nil
}
@@ -123,11 +124,16 @@ func (d *Crypt) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([
//filter illegal files
continue
}
+ if !d.ShowHidden && strings.HasPrefix(name, ".") {
+ continue
+ }
objRes := model.Object{
Name: name,
Size: 0,
Modified: obj.ModTime(),
IsFolder: obj.IsDir(),
+ Ctime: obj.CreateTime(),
+ // discarding hash as it's encrypted
}
result = append(result, &objRes)
} else {
@@ -142,13 +148,21 @@ func (d *Crypt) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([
//filter illegal files
continue
}
+ if !d.ShowHidden && strings.HasPrefix(name, ".") {
+ continue
+ }
objRes := model.Object{
Name: name,
Size: size,
Modified: obj.ModTime(),
IsFolder: obj.IsDir(),
+ Ctime: obj.CreateTime(),
+ // discarding hash as it's encrypted
}
- if !ok {
+ if d.Thumbnail && thumb == "" {
+ thumb = utils.EncodePath(common.GetApiUrl(nil)+stdpath.Join("/d", args.ReqPath, ".thumbnails", name+".webp"), true)
+ }
+ if !ok && !d.Thumbnail {
result = append(result, &objRes)
} else {
objWithThumb := model.ObjThumb{
@@ -232,70 +246,53 @@ func (d *Crypt) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (
return nil, err
}
- if remoteLink.RangeReadCloser.RangeReader == nil && remoteLink.ReadSeekCloser == nil && len(remoteLink.URL) == 0 {
+ if remoteLink.RangeReadCloser == nil && remoteLink.MFile == nil && len(remoteLink.URL) == 0 {
return nil, fmt.Errorf("the remote storage driver need to be enhanced to support encrytion")
}
remoteFileSize := remoteFile.GetSize()
- remoteClosers := utils.NewClosers()
+ remoteClosers := utils.EmptyClosers()
rangeReaderFunc := func(ctx context.Context, underlyingOffset, underlyingLength int64) (io.ReadCloser, error) {
length := underlyingLength
if underlyingLength >= 0 && underlyingOffset+underlyingLength >= remoteFileSize {
length = -1
}
- if remoteLink.RangeReadCloser.RangeReader != nil {
- //remoteRangeReader, err :=
- remoteReader, err := remoteLink.RangeReadCloser.RangeReader(http_range.Range{Start: underlyingOffset, Length: length})
- remoteClosers.Add(remoteLink.RangeReadCloser.Closers)
+ rrc := remoteLink.RangeReadCloser
+ if len(remoteLink.URL) > 0 {
+
+ rangedRemoteLink := &model.Link{
+ URL: remoteLink.URL,
+ Header: remoteLink.Header,
+ }
+ var converted, err = stream.GetRangeReadCloserFromLink(remoteFileSize, rangedRemoteLink)
if err != nil {
return nil, err
}
- return remoteReader, nil
+ rrc = converted
}
- if remoteLink.ReadSeekCloser != nil {
- _, err := remoteLink.ReadSeekCloser.Seek(underlyingOffset, io.SeekStart)
+ if rrc != nil {
+ //remoteRangeReader, err :=
+ remoteReader, err := rrc.RangeRead(ctx, http_range.Range{Start: underlyingOffset, Length: length})
+ remoteClosers.AddClosers(rrc.GetClosers())
if err != nil {
return nil, err
}
- //remoteClosers.Add(remoteLink.ReadSeekCloser)
- //keep reuse same ReadSeekCloser and close at last.
- return io.NopCloser(remoteLink.ReadSeekCloser), nil
+ return remoteReader, nil
}
- if len(remoteLink.URL) > 0 {
- rangedRemoteLink := &model.Link{
- URL: remoteLink.URL,
- Header: remoteLink.Header,
- }
- response, err := RequestRangedHttp(args.HttpReq, rangedRemoteLink, underlyingOffset, length)
- //remoteClosers.Add(response.Body)
+ if remoteLink.MFile != nil {
+ _, err := remoteLink.MFile.Seek(underlyingOffset, io.SeekStart)
if err != nil {
- return nil, fmt.Errorf("remote storage http request failure,status: %d err:%s", response.StatusCode, err)
- }
- if underlyingOffset == 0 && length == -1 || response.StatusCode == http.StatusPartialContent {
- return response.Body, nil
- } else if response.StatusCode == http.StatusOK {
- log.Warnf("remote http server not supporting range request, expect low perfromace!")
- readCloser, err := net.GetRangedHttpReader(response.Body, underlyingOffset, length)
- if err != nil {
- return nil, err
- }
- return readCloser, nil
+ return nil, err
}
-
- return response.Body, nil
+ //remoteClosers.Add(remoteLink.MFile)
+ //keep reuse same MFile and close at last.
+ remoteClosers.Add(remoteLink.MFile)
+ return io.NopCloser(remoteLink.MFile), nil
}
- //if remoteLink.Data != nil {
- // log.Warnf("remote storage not supporting range request, expect low perfromace!")
- // readCloser, err := net.GetRangedHttpReader(remoteLink.Data, underlyingOffset, length)
- // remoteCloser = remoteLink.Data
- // if err != nil {
- // return nil, err
- // }
- // return readCloser, nil
- //}
+
return nil, errs.NotSupport
}
- resultRangeReader := func(httpRange http_range.Range) (io.ReadCloser, error) {
+ resultRangeReader := func(ctx context.Context, httpRange http_range.Range) (io.ReadCloser, error) {
readSeeker, err := d.cipher.DecryptDataSeek(ctx, rangeReaderFunc, httpRange.Start, httpRange.Length)
if err != nil {
return nil, err
@@ -306,7 +303,7 @@ func (d *Crypt) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (
resultRangeReadCloser := &model.RangeReadCloser{RangeReader: resultRangeReader, Closers: remoteClosers}
resultLink := &model.Link{
Header: remoteLink.Header,
- RangeReadCloser: *resultRangeReadCloser,
+ RangeReadCloser: resultRangeReadCloser,
Expiration: remoteLink.Expiration,
}
@@ -370,32 +367,32 @@ func (d *Crypt) Remove(ctx context.Context, obj model.Obj) error {
return op.Remove(ctx, d.remoteStorage, remoteActualPath)
}
-func (d *Crypt) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
+func (d *Crypt) Put(ctx context.Context, dstDir model.Obj, streamer model.FileStreamer, up driver.UpdateProgress) error {
dstDirActualPath, err := d.getActualPathForRemote(dstDir.GetPath(), true)
if err != nil {
return fmt.Errorf("failed to convert path to remote path: %w", err)
}
- in := stream.GetReadCloser()
// Encrypt the data into wrappedIn
- wrappedIn, err := d.cipher.EncryptData(in)
+ wrappedIn, err := d.cipher.EncryptData(streamer)
if err != nil {
return fmt.Errorf("failed to EncryptData: %w", err)
}
- streamOut := &model.FileStream{
+ // doesn't support seekableStream, since rapid-upload is not working for encrypted data
+ streamOut := &stream.FileStream{
Obj: &model.Object{
- ID: stream.GetID(),
- Path: stream.GetPath(),
- Name: d.cipher.EncryptFileName(stream.GetName()),
- Size: d.cipher.EncryptedSize(stream.GetSize()),
- Modified: stream.ModTime(),
- IsFolder: stream.IsDir(),
+ ID: streamer.GetID(),
+ Path: streamer.GetPath(),
+ Name: d.cipher.EncryptFileName(streamer.GetName()),
+ Size: d.cipher.EncryptedSize(streamer.GetSize()),
+ Modified: streamer.ModTime(),
+ IsFolder: streamer.IsDir(),
},
- ReadCloser: io.NopCloser(wrappedIn),
+ Reader: wrappedIn,
Mimetype: "application/octet-stream",
- WebPutAsTask: stream.NeedStore(),
- Old: stream.GetOld(),
+ WebPutAsTask: streamer.NeedStore(),
+ Exist: streamer.GetExist(),
}
err = op.Put(ctx, d.remoteStorage, dstDirActualPath, streamOut, up, false)
if err != nil {
diff --git a/drivers/crypt/meta.go b/drivers/crypt/meta.go
old mode 100644
new mode 100755
index 68eab6b4..eec62a15
--- a/drivers/crypt/meta.go
+++ b/drivers/crypt/meta.go
@@ -15,16 +15,15 @@ type Addition struct {
DirNameEnc string `json:"directory_name_encryption" type:"select" required:"true" options:"false,true" default:"false"`
RemotePath string `json:"remote_path" required:"true" help:"This is where the encrypted data stores"`
- Password string `json:"password" required:"true" confidential:"true" help:"the main password"`
- Salt string `json:"salt" confidential:"true" help:"If you don't know what is salt, treat it as a second password'. Optional but recommended"`
- EncryptedSuffix string `json:"encrypted_suffix" required:"true" default:".bin" help:"encrypted files will have this suffix"`
-}
+ Password string `json:"password" required:"true" confidential:"true" help:"the main password"`
+ Salt string `json:"salt" confidential:"true" help:"If you don't know what is salt, treat it as a second password. Optional but recommended"`
+ EncryptedSuffix string `json:"encrypted_suffix" required:"true" default:".bin" help:"for advanced user only! encrypted files will have this suffix"`
+ FileNameEncoding string `json:"filename_encoding" type:"select" required:"true" options:"base64,base32,base32768" default:"base64" help:"for advanced user only!"`
+
+ Thumbnail bool `json:"thumbnail" required:"true" default:"false" help:"enable thumbnail which pre-generated under .thumbnails folder"`
-/*// inMemory contains decrypted confidential info and other temp data. will not persist these info anywhere
-type inMemory struct {
- password string
- salt string
-}*/
+ ShowHidden bool `json:"show_hidden" default:"true" required:"false" help:"show hidden directories and files"`
+}
var config = driver.Config{
Name: "Crypt",
diff --git a/drivers/crypt/types.go b/drivers/crypt/types.go
old mode 100644
new mode 100755
diff --git a/drivers/crypt/util.go b/drivers/crypt/util.go
old mode 100644
new mode 100755
index f4246756..3e55fb37
--- a/drivers/crypt/util.go
+++ b/drivers/crypt/util.go
@@ -1,24 +1,13 @@
package crypt
import (
- "net/http"
stdpath "path"
"path/filepath"
"strings"
- "github.com/alist-org/alist/v3/internal/model"
- "github.com/alist-org/alist/v3/internal/net"
"github.com/alist-org/alist/v3/internal/op"
- "github.com/alist-org/alist/v3/pkg/http_range"
)
-func RequestRangedHttp(r *http.Request, link *model.Link, offset, length int64) (*http.Response, error) {
- header := net.ProcessHeader(http.Header{}, link.Header)
- header = http_range.ApplyRangeToHttpHeader(http_range.Range{Start: offset, Length: length}, header)
-
- return net.RequestHttp("GET", header, link.URL)
-}
-
// will give the best guessing based on the path
func guessPath(path string) (isFolder, secondTry bool) {
if strings.HasSuffix(path, "/") {
diff --git a/drivers/dropbox/driver.go b/drivers/dropbox/driver.go
old mode 100644
new mode 100755
index 7559d645..95148b94
--- a/drivers/dropbox/driver.go
+++ b/drivers/dropbox/driver.go
@@ -203,7 +203,7 @@ func (d *Dropbox) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
_ = res.Body.Close()
if count > 0 {
- up((i + 1) * 100 / count)
+ up(float64(i+1) * 100 / float64(count))
}
offset += byteSize
diff --git a/drivers/dropbox/meta.go b/drivers/dropbox/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/dropbox/types.go b/drivers/dropbox/types.go
old mode 100644
new mode 100755
diff --git a/drivers/dropbox/util.go b/drivers/dropbox/util.go
old mode 100644
new mode 100755
diff --git a/drivers/ftp/driver.go b/drivers/ftp/driver.go
old mode 100644
new mode 100755
index 03606bc9..70fbabdc
--- a/drivers/ftp/driver.go
+++ b/drivers/ftp/driver.go
@@ -64,9 +64,9 @@ func (d *FTP) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*m
return nil, err
}
- r := NewFTPFileReader(d.conn, file.GetPath())
+ r := NewFileReader(d.conn, file.GetPath(), file.GetSize())
link := &model.Link{
- ReadSeekCloser: r,
+ MFile: r,
}
return link, nil
}
diff --git a/drivers/ftp/meta.go b/drivers/ftp/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/ftp/types.go b/drivers/ftp/types.go
old mode 100644
new mode 100755
diff --git a/drivers/ftp/util.go b/drivers/ftp/util.go
old mode 100644
new mode 100755
index 3abffd93..196d874c
--- a/drivers/ftp/util.go
+++ b/drivers/ftp/util.go
@@ -4,6 +4,7 @@ import (
"io"
"os"
"sync"
+ "sync/atomic"
"time"
"github.com/jlaffaye/ftp"
@@ -30,43 +31,59 @@ func (d *FTP) login() error {
return nil
}
-// An FTP file reader that implements io.ReadSeekCloser for seeking.
-type FTPFileReader struct {
- conn *ftp.ServerConn
- resp *ftp.Response
- offset int64
- mu sync.Mutex
- path string
+// FileReader An FTP file reader that implements io.MFile for seeking.
+type FileReader struct {
+ conn *ftp.ServerConn
+ resp *ftp.Response
+ offset atomic.Int64
+ readAtOffset int64
+ mu sync.Mutex
+ path string
+ size int64
}
-func NewFTPFileReader(conn *ftp.ServerConn, path string) *FTPFileReader {
- return &FTPFileReader{
+func NewFileReader(conn *ftp.ServerConn, path string, size int64) *FileReader {
+ return &FileReader{
conn: conn,
path: path,
+ size: size,
}
}
-func (r *FTPFileReader) Read(buf []byte) (n int, err error) {
+func (r *FileReader) Read(buf []byte) (n int, err error) {
+ n, err = r.ReadAt(buf, r.offset.Load())
+ r.offset.Add(int64(n))
+ return
+}
+
+func (r *FileReader) ReadAt(buf []byte, off int64) (n int, err error) {
+ if off < 0 {
+ return -1, os.ErrInvalid
+ }
r.mu.Lock()
defer r.mu.Unlock()
+ if off != r.readAtOffset {
+ //have to restart the connection, to correct offset
+ _ = r.resp.Close()
+ r.resp = nil
+ }
+
if r.resp == nil {
- r.resp, err = r.conn.RetrFrom(r.path, uint64(r.offset))
+ r.resp, err = r.conn.RetrFrom(r.path, uint64(off))
+ r.readAtOffset = off
if err != nil {
return 0, err
}
}
n, err = r.resp.Read(buf)
- r.offset += int64(n)
+ r.readAtOffset += int64(n)
return
}
-func (r *FTPFileReader) Seek(offset int64, whence int) (int64, error) {
- r.mu.Lock()
- defer r.mu.Unlock()
-
- oldOffset := r.offset
+func (r *FileReader) Seek(offset int64, whence int) (int64, error) {
+ oldOffset := r.offset.Load()
var newOffset int64
switch whence {
case io.SeekStart:
@@ -74,11 +91,7 @@ func (r *FTPFileReader) Seek(offset int64, whence int) (int64, error) {
case io.SeekCurrent:
newOffset = oldOffset + offset
case io.SeekEnd:
- size, err := r.conn.FileSize(r.path)
- if err != nil {
- return oldOffset, err
- }
- newOffset = offset + int64(size)
+ return r.size, nil
default:
return -1, os.ErrInvalid
}
@@ -91,17 +104,11 @@ func (r *FTPFileReader) Seek(offset int64, whence int) (int64, error) {
// offset not changed, so return directly
return oldOffset, nil
}
- r.offset = newOffset
-
- if r.resp != nil {
- // close the existing ftp data connection, otherwise the next read will be blocked
- _ = r.resp.Close() // we do not care about whether it returns an error
- r.resp = nil
- }
+ r.offset.Store(newOffset)
return newOffset, nil
}
-func (r *FTPFileReader) Close() error {
+func (r *FileReader) Close() error {
if r.resp != nil {
return r.resp.Close()
}
diff --git a/drivers/google_drive/driver.go b/drivers/google_drive/driver.go
old mode 100644
new mode 100755
index cf573d93..dccdcea9
--- a/drivers/google_drive/driver.go
+++ b/drivers/google_drive/driver.go
@@ -112,7 +112,7 @@ func (d *GoogleDrive) Remove(ctx context.Context, obj model.Obj) error {
}
func (d *GoogleDrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
- obj := stream.GetOld()
+ obj := stream.GetExist()
var (
e Error
url string
@@ -158,7 +158,7 @@ func (d *GoogleDrive) Put(ctx context.Context, dstDir model.Obj, stream model.Fi
putUrl := res.Header().Get("location")
if stream.GetSize() < d.ChunkSize*1024*1024 {
_, err = d.request(putUrl, http.MethodPut, func(req *resty.Request) {
- req.SetHeader("Content-Length", strconv.FormatInt(stream.GetSize(), 10)).SetBody(stream.GetReadCloser())
+ req.SetHeader("Content-Length", strconv.FormatInt(stream.GetSize(), 10)).SetBody(stream)
}, nil)
} else {
err = d.chunkUpload(ctx, stream, putUrl)
diff --git a/drivers/google_drive/meta.go b/drivers/google_drive/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/google_drive/types.go b/drivers/google_drive/types.go
old mode 100644
new mode 100755
index 796c1321..07545932
--- a/drivers/google_drive/types.go
+++ b/drivers/google_drive/types.go
@@ -5,6 +5,7 @@ import (
"time"
"github.com/alist-org/alist/v3/internal/model"
+ "github.com/alist-org/alist/v3/pkg/utils"
log "github.com/sirupsen/logrus"
)
@@ -23,12 +24,17 @@ type File struct {
Name string `json:"name"`
MimeType string `json:"mimeType"`
ModifiedTime time.Time `json:"modifiedTime"`
+ CreatedTime time.Time `json:"createdTime"`
Size string `json:"size"`
ThumbnailLink string `json:"thumbnailLink"`
ShortcutDetails struct {
TargetId string `json:"targetId"`
TargetMimeType string `json:"targetMimeType"`
} `json:"shortcutDetails"`
+
+ MD5Checksum string `json:"md5Checksum"`
+ SHA1Checksum string `json:"sha1Checksum"`
+ SHA256Checksum string `json:"sha256Checksum"`
}
func fileToObj(f File) *model.ObjThumb {
@@ -39,10 +45,18 @@ func fileToObj(f File) *model.ObjThumb {
ID: f.Id,
Name: f.Name,
Size: size,
+ Ctime: f.CreatedTime,
Modified: f.ModifiedTime,
IsFolder: f.MimeType == "application/vnd.google-apps.folder",
+ HashInfo: utils.NewHashInfoByMap(map[*utils.HashType]string{
+ utils.MD5: f.MD5Checksum,
+ utils.SHA1: f.SHA1Checksum,
+ utils.SHA256: f.SHA256Checksum,
+ }),
+ },
+ Thumbnail: model.Thumbnail{
+ Thumbnail: f.ThumbnailLink,
},
- Thumbnail: model.Thumbnail{},
}
if f.MimeType == "application/vnd.google-apps.shortcut" {
obj.ID = f.ShortcutDetails.TargetId
diff --git a/drivers/google_drive/util.go b/drivers/google_drive/util.go
old mode 100644
new mode 100755
index 0168b21c..0d380112
--- a/drivers/google_drive/util.go
+++ b/drivers/google_drive/util.go
@@ -5,14 +5,14 @@ import (
"crypto/x509"
"encoding/pem"
"fmt"
- "io"
- "io/ioutil"
"net/http"
"os"
"regexp"
"strconv"
"time"
+ "github.com/alist-org/alist/v3/pkg/http_range"
+
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
@@ -43,7 +43,7 @@ func (d *GoogleDrive) refreshToken() error {
gdsaFileThis := d.RefreshToken
if gdsaFile.IsDir() {
if len(d.ServiceAccountFileList) <= 0 {
- gdsaReadDir, gdsaDirErr := ioutil.ReadDir(d.RefreshToken)
+ gdsaReadDir, gdsaDirErr := os.ReadDir(d.RefreshToken)
if gdsaDirErr != nil {
log.Error("read dir fail")
return gdsaDirErr
@@ -75,7 +75,7 @@ func (d *GoogleDrive) refreshToken() error {
}
}
- gdsaFileThisContent, err := ioutil.ReadFile(gdsaFileThis)
+ gdsaFileThisContent, err := os.ReadFile(gdsaFileThis)
if err != nil {
return err
}
@@ -195,7 +195,7 @@ func (d *GoogleDrive) getFiles(id string) ([]File, error) {
}
query := map[string]string{
"orderBy": orderBy,
- "fields": "files(id,name,mimeType,size,modifiedTime,thumbnailLink,shortcutDetails),nextPageToken",
+ "fields": "files(id,name,mimeType,size,modifiedTime,createdTime,thumbnailLink,shortcutDetails,md5Checksum,sha1Checksum,sha256Checksum),nextPageToken",
"pageSize": "1000",
"q": fmt.Sprintf("'%s' in parents and trashed = false", id),
//"includeItemsFromAllDrives": "true",
@@ -216,25 +216,29 @@ func (d *GoogleDrive) getFiles(id string) ([]File, error) {
func (d *GoogleDrive) chunkUpload(ctx context.Context, stream model.FileStreamer, url string) error {
var defaultChunkSize = d.ChunkSize * 1024 * 1024
- var finish int64 = 0
- for finish < stream.GetSize() {
+ var offset int64 = 0
+ for offset < stream.GetSize() {
if utils.IsCanceled(ctx) {
return ctx.Err()
}
- chunkSize := stream.GetSize() - finish
+ chunkSize := stream.GetSize() - offset
if chunkSize > defaultChunkSize {
chunkSize = defaultChunkSize
}
- _, err := d.request(url, http.MethodPut, func(req *resty.Request) {
+ reader, err := stream.RangeRead(http_range.Range{Start: offset, Length: chunkSize})
+ if err != nil {
+ return err
+ }
+ _, err = d.request(url, http.MethodPut, func(req *resty.Request) {
req.SetHeaders(map[string]string{
"Content-Length": strconv.FormatInt(chunkSize, 10),
- "Content-Range": fmt.Sprintf("bytes %d-%d/%d", finish, finish+chunkSize-1, stream.GetSize()),
- }).SetBody(io.LimitReader(stream.GetReadCloser(), chunkSize)).SetContext(ctx)
+ "Content-Range": fmt.Sprintf("bytes %d-%d/%d", offset, offset+chunkSize-1, stream.GetSize()),
+ }).SetBody(reader).SetContext(ctx)
}, nil)
if err != nil {
return err
}
- finish += chunkSize
+ offset += chunkSize
}
return nil
}
diff --git a/drivers/google_photo/driver.go b/drivers/google_photo/driver.go
old mode 100644
new mode 100755
index aab3b5d9..b54132ef
--- a/drivers/google_photo/driver.go
+++ b/drivers/google_photo/driver.go
@@ -124,7 +124,7 @@ func (d *GooglePhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fi
}
resp, err := d.request(postUrl, http.MethodPost, func(req *resty.Request) {
- req.SetBody(stream.GetReadCloser()).SetContext(ctx)
+ req.SetBody(stream).SetContext(ctx)
}, nil, postHeaders)
if err != nil {
diff --git a/drivers/google_photo/meta.go b/drivers/google_photo/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/google_photo/types.go b/drivers/google_photo/types.go
old mode 100644
new mode 100755
index 1a53ae09..497af0bf
--- a/drivers/google_photo/types.go
+++ b/drivers/google_photo/types.go
@@ -26,7 +26,7 @@ type MediaItem struct {
CoverPhotoBaseUrl string `json:"coverPhotoBaseUrl,omitempty"`
MimeType string `json:"mimeType,omitempty"`
FileName string `json:"filename,omitempty"`
- MediaMetadata MediaMetadata `json:"mediaMetadata,omitempty"`
+ MediaMetadata MediaMetadata `json:"mediaMetadata,omitempty"`
}
type MediaMetadata struct {
@@ -44,7 +44,7 @@ type Video struct {
}
func fileToObj(f MediaItem) *model.ObjThumb {
- if !reflect.DeepEqual(f.MediaMetadata, MediaMetadata{}){
+ if !reflect.DeepEqual(f.MediaMetadata, MediaMetadata{}) {
return &model.ObjThumb{
Object: model.Object{
ID: f.Id,
diff --git a/drivers/google_photo/util.go b/drivers/google_photo/util.go
old mode 100644
new mode 100755
index fbbff9ab..b7bcc525
--- a/drivers/google_photo/util.go
+++ b/drivers/google_photo/util.go
@@ -11,9 +11,9 @@ import (
// do others that not defined in Driver interface
const (
- FETCH_ALL = "all"
- FETCH_ALBUMS = "albums"
- FETCH_ROOT = "root"
+ FETCH_ALL = "all"
+ FETCH_ALBUMS = "albums"
+ FETCH_ROOT = "root"
FETCH_SHARE_ALBUMS = "share_albums"
)
@@ -89,15 +89,15 @@ func (d *GooglePhoto) getFiles(id string) ([]MediaItem, error) {
func (d *GooglePhoto) getFakeRoot() ([]MediaItem, error) {
return []MediaItem{
{
- Id: FETCH_ALL,
+ Id: FETCH_ALL,
Title: "全部媒体",
},
{
- Id: FETCH_ALBUMS,
+ Id: FETCH_ALBUMS,
Title: "全部影集",
},
{
- Id: FETCH_SHARE_ALBUMS,
+ Id: FETCH_SHARE_ALBUMS,
Title: "共享影集",
},
}, nil
@@ -131,7 +131,7 @@ func (d *GooglePhoto) getMedias(albumId string) ([]MediaItem, error) {
map[string]string{
"fields": "mediaItems(id,baseUrl,mimeType,mediaMetadata,filename),nextPageToken",
"pageSize": "100",
- "albumId": albumId,
+ "albumId": albumId,
"pageToken": "first",
}, http.MethodPost)
}
@@ -163,7 +163,7 @@ func (d *GooglePhoto) getMedia(id string) (MediaItem, error) {
return resp, nil
}
-func (d *GooglePhoto) fetchItems(url string, query map[string]string, method string) ([]MediaItem, error){
+func (d *GooglePhoto) fetchItems(url string, query map[string]string, method string) ([]MediaItem, error) {
res := make([]MediaItem, 0)
for query["pageToken"] != "" {
if query["pageToken"] == "first" {
diff --git a/drivers/ipfs_api/driver.go b/drivers/ipfs_api/driver.go
old mode 100644
new mode 100755
diff --git a/drivers/ipfs_api/meta.go b/drivers/ipfs_api/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/lanzou/driver.go b/drivers/lanzou/driver.go
old mode 100644
new mode 100755
diff --git a/drivers/lanzou/help.go b/drivers/lanzou/help.go
old mode 100644
new mode 100755
index 89f8e071..31a558e9
--- a/drivers/lanzou/help.go
+++ b/drivers/lanzou/help.go
@@ -118,13 +118,102 @@ var findKVReg = regexp.MustCompile(`'(.+?)':('?([^' },]*)'?)`) // 拆分kv
// 根据key查询js变量
func findJSVarFunc(key, data string) string {
- values := regexp.MustCompile(`var ` + key + ` = '(.+?)';`).FindStringSubmatch(data)
+ var values []string
+ if key != "sasign" {
+ values = regexp.MustCompile(`var ` + key + ` = '(.+?)';`).FindStringSubmatch(data)
+ } else {
+ matches := regexp.MustCompile(`var `+key+` = '(.+?)';`).FindAllStringSubmatch(data, -1)
+ if len(matches) == 3 {
+ values = matches[1]
+ } else {
+ if len(matches) > 0 {
+ values = matches[0]
+ }
+ }
+ }
if len(values) == 0 {
return ""
}
return values[1]
}
+var findFunction = regexp.MustCompile(`(?ims)^function[^{]+`)
+var findFunctionAll = regexp.MustCompile(`(?is)function[^{]+`)
+
+// 查找所有方法位置
+func findJSFunctionIndex(data string, all bool) [][2]int {
+ findFunction := findFunction
+ if all {
+ findFunction = findFunctionAll
+ }
+
+ indexs := findFunction.FindAllStringIndex(data, -1)
+ fIndexs := make([][2]int, 0, len(indexs))
+
+ for _, index := range indexs {
+ if len(index) != 2 {
+ continue
+ }
+ count, data := 0, data[index[1]:]
+ for ii, v := range data {
+ if v == ' ' && count == 0 {
+ continue
+ }
+ if v == '{' {
+ count++
+ }
+
+ if v == '}' {
+ count--
+ }
+ if count == 0 {
+ fIndexs = append(fIndexs, [2]int{index[0], index[1] + ii + 1})
+ break
+ }
+ }
+ }
+ return fIndexs
+}
+
+// 删除JS全局方法
+func removeJSGlobalFunction(html string) string {
+ indexs := findJSFunctionIndex(html, false)
+ block := make([]string, len(indexs))
+ for i, next := len(indexs)-1, len(html); i >= 0; i-- {
+ index := indexs[i]
+ block[i] = html[index[1]:next]
+ next = index[0]
+ }
+ return strings.Join(block, "")
+}
+
+// 根据名称获取方法
+func getJSFunctionByName(html string, name string) (string, error) {
+ indexs := findJSFunctionIndex(html, true)
+ for _, index := range indexs {
+ data := html[index[0]:index[1]]
+ if regexp.MustCompile(`function\s+` + name + `[()\s]+{`).MatchString(data) {
+ return data, nil
+ }
+ }
+ return "", fmt.Errorf("not find %s function", name)
+}
+
+// 解析html中的JSON,选择最长的数据
+func htmlJsonToMap2(html string) (map[string]string, error) {
+ datas := findDataReg.FindAllStringSubmatch(html, -1)
+ var sData string
+ for _, data := range datas {
+ if len(datas) > 0 && len(data[1]) > len(sData) {
+ sData = data[1]
+ }
+ }
+ if sData == "" {
+ return nil, fmt.Errorf("not find data")
+ }
+ return jsonToMap(sData, html), nil
+}
+
// 解析html中的JSON
func htmlJsonToMap(html string) (map[string]string, error) {
datas := findDataReg.FindStringSubmatch(html)
diff --git a/drivers/lanzou/meta.go b/drivers/lanzou/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/lanzou/types.go b/drivers/lanzou/types.go
old mode 100644
new mode 100755
index 2e2daf46..d03838dd
--- a/drivers/lanzou/types.go
+++ b/drivers/lanzou/types.go
@@ -3,6 +3,8 @@ package lanzou
import (
"errors"
"fmt"
+ "github.com/alist-org/alist/v3/internal/model"
+ "github.com/alist-org/alist/v3/pkg/utils"
"time"
)
@@ -18,6 +20,9 @@ type RespInfo[T any] struct {
Info T `json:"info"`
}
+var _ model.Obj = (*FileOrFolder)(nil)
+var _ model.Obj = (*FileOrFolderByShareUrl)(nil)
+
type FileOrFolder struct {
Name string `json:"name"`
//Onof string `json:"onof"` // 是否存在提取码
@@ -49,6 +54,14 @@ type FileOrFolder struct {
shareInfo *FileShare `json:"-"`
}
+func (f *FileOrFolder) CreateTime() time.Time {
+ return f.ModTime()
+}
+
+func (f *FileOrFolder) GetHash() utils.HashInfo {
+ return utils.HashInfo{}
+}
+
func (f *FileOrFolder) GetID() string {
if f.IsDir() {
return f.FolID
@@ -130,6 +143,14 @@ type FileOrFolderByShareUrl struct {
repairFlag bool `json:"-"`
}
+func (f *FileOrFolderByShareUrl) CreateTime() time.Time {
+ return f.ModTime()
+}
+
+func (f *FileOrFolderByShareUrl) GetHash() utils.HashInfo {
+ return utils.HashInfo{}
+}
+
func (f *FileOrFolderByShareUrl) GetID() string { return f.ID }
func (f *FileOrFolderByShareUrl) GetName() string { return f.NameAll }
func (f *FileOrFolderByShareUrl) GetPath() string { return "" }
diff --git a/drivers/lanzou/util.go b/drivers/lanzou/util.go
old mode 100644
new mode 100755
index 6e2f05cc..8aeba811
--- a/drivers/lanzou/util.go
+++ b/drivers/lanzou/util.go
@@ -258,7 +258,7 @@ var sizeFindReg = regexp.MustCompile(`(?i)大小\W*([0-9.]+\s*[bkm]+)`)
var timeFindReg = regexp.MustCompile(`\d+\s*[秒天分小][钟时]?前|[昨前]天|\d{4}-\d{2}-\d{2}`)
// 查找分享文件夹子文件夹ID和名称
-var findSubFolaerReg = regexp.MustCompile(`(?i)(?:folderlink|mbxfolder).+href="/(.+?)"(?:.+filename")?>(.+?)<`)
+var findSubFolderReg = regexp.MustCompile(`(?i)(?:folderlink|mbxfolder).+href="/(.+?)"(?:.+filename")?>(.+?)<`)
// 获取下载页面链接
var findDownPageParamReg = regexp.MustCompile(`= 0 && httpRange.Start+httpRange.Length >= size {
length = -1
}
var down *mega.Download
err := utils.Retry(3, time.Second, func() (err error) {
- down, err = d.c.NewDownload(node.Node)
+ down, err = d.c.NewDownload(node.n)
return err
})
if err != nil {
@@ -97,37 +98,37 @@ func (d *Mega) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*
return readers.NewLimitedReadCloser(oo, length), nil
}
- resultRangeReadCloser := &model.RangeReadCloser{RangeReader: resultRangeReader, Closers: &finalClosers}
+ resultRangeReadCloser := &model.RangeReadCloser{RangeReader: resultRangeReader, Closers: finalClosers}
resultLink := &model.Link{
- RangeReadCloser: *resultRangeReadCloser,
+ RangeReadCloser: resultRangeReadCloser,
}
return resultLink, nil
}
- return nil, fmt.Errorf("unable to convert dir to mega node")
+ return nil, fmt.Errorf("unable to convert dir to mega n")
}
func (d *Mega) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
if parentNode, ok := parentDir.(*MegaNode); ok {
- _, err := d.c.CreateDir(dirName, parentNode.Node)
+ _, err := d.c.CreateDir(dirName, parentNode.n)
return err
}
- return fmt.Errorf("unable to convert dir to mega node")
+ return fmt.Errorf("unable to convert dir to mega n")
}
func (d *Mega) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
if srcNode, ok := srcObj.(*MegaNode); ok {
if dstNode, ok := dstDir.(*MegaNode); ok {
- return d.c.Move(srcNode.Node, dstNode.Node)
+ return d.c.Move(srcNode.n, dstNode.n)
}
}
- return fmt.Errorf("unable to convert dir to mega node")
+ return fmt.Errorf("unable to convert dir to mega n")
}
func (d *Mega) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
if srcNode, ok := srcObj.(*MegaNode); ok {
- return d.c.Rename(srcNode.Node, newName)
+ return d.c.Rename(srcNode.n, newName)
}
- return fmt.Errorf("unable to convert dir to mega node")
+ return fmt.Errorf("unable to convert dir to mega n")
}
func (d *Mega) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
@@ -136,14 +137,14 @@ func (d *Mega) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
func (d *Mega) Remove(ctx context.Context, obj model.Obj) error {
if node, ok := obj.(*MegaNode); ok {
- return d.c.Delete(node.Node, false)
+ return d.c.Delete(node.n, false)
}
- return fmt.Errorf("unable to convert dir to mega node")
+ return fmt.Errorf("unable to convert dir to mega n")
}
func (d *Mega) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
if dstNode, ok := dstDir.(*MegaNode); ok {
- u, err := d.c.NewUpload(dstNode.Node, stream.GetName(), stream.GetSize())
+ u, err := d.c.NewUpload(dstNode.n, stream.GetName(), stream.GetSize())
if err != nil {
return err
}
@@ -169,13 +170,13 @@ func (d *Mega) Put(ctx context.Context, dstDir model.Obj, stream model.FileStrea
if err != nil {
return err
}
- up(id * 100 / u.Chunks())
+ up(float64(id) * 100 / float64(u.Chunks()))
}
_, err = u.Finish()
return err
}
- return fmt.Errorf("unable to convert dir to mega node")
+ return fmt.Errorf("unable to convert dir to mega n")
}
//func (d *Mega) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
diff --git a/drivers/mega/meta.go b/drivers/mega/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/mega/types.go b/drivers/mega/types.go
old mode 100644
new mode 100755
index 4e7b3a9a..3046d449
--- a/drivers/mega/types.go
+++ b/drivers/mega/types.go
@@ -1,6 +1,7 @@
package mega
import (
+ "github.com/alist-org/alist/v3/pkg/utils"
"time"
"github.com/alist-org/alist/v3/internal/model"
@@ -8,29 +9,36 @@ import (
)
type MegaNode struct {
- *mega.Node
+ n *mega.Node
}
-//func (m *MegaNode) GetSize() int64 {
-// //TODO implement me
-// panic("implement me")
-//}
-//
-//func (m *MegaNode) GetName() string {
-// //TODO implement me
-// panic("implement me")
-//}
+func (m *MegaNode) GetSize() int64 {
+ return m.n.GetSize()
+}
+
+func (m *MegaNode) GetName() string {
+ return m.n.GetName()
+}
+
+func (m *MegaNode) CreateTime() time.Time {
+ return m.n.GetTimeStamp()
+}
+
+func (m *MegaNode) GetHash() utils.HashInfo {
+ //Meganz use md5, but can't get the original file hash, due to it's encrypted in the cloud
+ return utils.HashInfo{}
+}
func (m *MegaNode) ModTime() time.Time {
- return m.GetTimeStamp()
+ return m.n.GetTimeStamp()
}
func (m *MegaNode) IsDir() bool {
- return m.GetType() == mega.FOLDER || m.GetType() == mega.ROOT
+ return m.n.GetType() == mega.FOLDER || m.n.GetType() == mega.ROOT
}
func (m *MegaNode) GetID() string {
- return m.GetHash()
+ return m.n.GetHash()
}
func (m *MegaNode) GetPath() string {
diff --git a/drivers/mega/util.go b/drivers/mega/util.go
old mode 100644
new mode 100755
diff --git a/drivers/mopan/driver.go b/drivers/mopan/driver.go
old mode 100644
new mode 100755
index 55879005..f3bb4e74
--- a/drivers/mopan/driver.go
+++ b/drivers/mopan/driver.go
@@ -6,8 +6,8 @@ import (
"fmt"
"io"
"net/http"
- "os"
"strconv"
+ "strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
@@ -18,6 +18,7 @@ import (
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/avast/retry-go"
"github.com/foxxorcat/mopan-sdk-go"
+ log "github.com/sirupsen/logrus"
)
type MoPan struct {
@@ -54,6 +55,16 @@ func (d *MoPan) Init(ctx context.Context) error {
return err
}
d.userID = info.UserID
+ log.Debugf("[mopan] Phone: %s UserCloudStorageRelations: %+v", d.Phone, data.UserCloudStorageRelations)
+ cloudCircleApp, _ := d.client.QueryAllCloudCircleApp()
+ log.Debugf("[mopan] Phone: %s CloudCircleApp: %+v", d.Phone, cloudCircleApp)
+ if d.RootFolderID == "" {
+ for _, userCloudStorage := range data.UserCloudStorageRelations {
+ if userCloudStorage.Path == "/文件" {
+ d.RootFolderID = userCloudStorage.FolderID
+ }
+ }
+ }
return nil
}
d.client = mopan.NewMoClientWithRestyClient(base.NewRestyClient()).
@@ -94,6 +105,7 @@ func (d *MoPan) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([
break
}
+ log.Debugf("[mopan] Phone: %s folder: %+v", d.Phone, data.FileListAO.FolderList)
files = append(files, utils.MustSliceConvert(data.FileListAO.FolderList, folderToObj)...)
files = append(files, utils.MustSliceConvert(data.FileListAO.FileList, fileToObj)...)
}
@@ -106,6 +118,18 @@ func (d *MoPan) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (
return nil, err
}
+ data.DownloadUrl = strings.Replace(strings.ReplaceAll(data.DownloadUrl, "&", "&"), "http://", "https://", 1)
+ res, err := base.NoRedirectClient.R().SetDoNotParseResponse(true).SetContext(ctx).Get(data.DownloadUrl)
+ if err != nil {
+ return nil, err
+ }
+ defer func() {
+ _ = res.RawBody().Close()
+ }()
+ if res.StatusCode() == 302 {
+ data.DownloadUrl = res.Header().Get("location")
+ }
+
return &model.Link{
URL: data.DownloadUrl,
}, nil
@@ -219,13 +243,12 @@ func (d *MoPan) Remove(ctx context.Context, obj model.Obj) error {
}
func (d *MoPan) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
- file, err := utils.CreateTempFile(stream, stream.GetSize())
+ file, err := stream.CacheFullInTempFile()
if err != nil {
return nil, err
}
defer func() {
_ = file.Close()
- _ = os.Remove(file.Name())
}()
// step.1
@@ -252,7 +275,7 @@ func (d *MoPan) Put(ctx context.Context, dstDir model.Obj, stream model.FileStre
}
if !initUpdload.FileDataExists {
- fmt.Println(d.client.CloudDiskStartBusiness())
+ utils.Log.Error(d.client.CloudDiskStartBusiness())
threadG, upCtx := errgroup.NewGroupWithContext(ctx, d.uploadThread,
retry.Attempts(3),
@@ -288,7 +311,7 @@ func (d *MoPan) Put(ctx context.Context, dstDir model.Obj, stream model.FileStre
if resp.StatusCode != http.StatusOK {
return fmt.Errorf("upload err,code=%d", resp.StatusCode)
}
- up(100 * int(threadG.Success()) / len(parts))
+ up(100 * float64(threadG.Success()) / float64(len(parts)))
initUpdload.PartInfos[i] = ""
return nil
})
diff --git a/drivers/mopan/meta.go b/drivers/mopan/meta.go
old mode 100644
new mode 100755
index ecea3bbd..e6583fc1
--- a/drivers/mopan/meta.go
+++ b/drivers/mopan/meta.go
@@ -9,7 +9,7 @@ type Addition struct {
Phone string `json:"phone" required:"true"`
Password string `json:"password" required:"true"`
- RootFolderID string `json:"root_folder_id" default:"-11" required:"true" help:"be careful when using the -11 value, some operations may cause system errors"`
+ RootFolderID string `json:"root_folder_id" default:""`
CloudID string `json:"cloud_id"`
diff --git a/drivers/mopan/types.go b/drivers/mopan/types.go
old mode 100644
new mode 100755
diff --git a/drivers/mopan/util.go b/drivers/mopan/util.go
old mode 100644
new mode 100755
index b6460559..e6b20f9a
--- a/drivers/mopan/util.go
+++ b/drivers/mopan/util.go
@@ -4,6 +4,7 @@ import (
"time"
"github.com/alist-org/alist/v3/internal/model"
+ "github.com/alist-org/alist/v3/pkg/utils"
"github.com/foxxorcat/mopan-sdk-go"
)
@@ -14,6 +15,8 @@ func fileToObj(f mopan.File) model.Obj {
Name: f.Name,
Size: int64(f.Size),
Modified: time.Time(f.LastOpTime),
+ Ctime: time.Time(f.CreateDate),
+ HashInfo: utils.NewHashInfo(utils.MD5, f.Md5),
},
Thumbnail: model.Thumbnail{
Thumbnail: f.Icon.SmallURL,
@@ -26,6 +29,7 @@ func folderToObj(f mopan.Folder) model.Obj {
ID: string(f.ID),
Name: f.Name,
Modified: time.Time(f.LastOpTime),
+ Ctime: time.Time(f.CreateDate),
IsFolder: true,
}
}
@@ -37,6 +41,7 @@ func CloneObj(o model.Obj, newID, newName string) model.Obj {
Name: newName,
IsFolder: true,
Modified: o.ModTime(),
+ Ctime: o.CreateTime(),
}
}
@@ -50,6 +55,8 @@ func CloneObj(o model.Obj, newID, newName string) model.Obj {
Name: newName,
Size: o.GetSize(),
Modified: o.ModTime(),
+ Ctime: o.CreateTime(),
+ HashInfo: o.GetHash(),
},
Thumbnail: model.Thumbnail{
Thumbnail: thumb,
diff --git a/drivers/onedrive/driver.go b/drivers/onedrive/driver.go
old mode 100644
new mode 100755
index c63a6dbd..50e129d9
--- a/drivers/onedrive/driver.go
+++ b/drivers/onedrive/driver.go
@@ -4,6 +4,7 @@ import (
"context"
"fmt"
"net/http"
+ "net/url"
"path"
"github.com/alist-org/alist/v3/drivers/base"
@@ -57,8 +58,17 @@ func (d *Onedrive) Link(ctx context.Context, file model.Obj, args model.LinkArgs
if f.File == nil {
return nil, errs.NotFile
}
+ u := f.Url
+ if d.CustomHost != "" {
+ _u, err := url.Parse(f.Url)
+ if err != nil {
+ return nil, err
+ }
+ _u.Host = d.CustomHost
+ u = _u.String()
+ }
return &model.Link{
- URL: f.Url,
+ URL: u,
}, nil
}
diff --git a/drivers/onedrive/meta.go b/drivers/onedrive/meta.go
old mode 100644
new mode 100755
index f948e1a5..a60e5f33
--- a/drivers/onedrive/meta.go
+++ b/drivers/onedrive/meta.go
@@ -15,6 +15,7 @@ type Addition struct {
RefreshToken string `json:"refresh_token" required:"true"`
SiteId string `json:"site_id"`
ChunkSize int64 `json:"chunk_size" type:"number" default:"5"`
+ CustomHost string `json:"custom_host" help:"Custom host for onedrive download link"`
}
var config = driver.Config{
diff --git a/drivers/onedrive/types.go b/drivers/onedrive/types.go
old mode 100644
new mode 100755
diff --git a/drivers/onedrive/util.go b/drivers/onedrive/util.go
old mode 100644
new mode 100755
index b17a0361..a0c6fa8f
--- a/drivers/onedrive/util.go
+++ b/drivers/onedrive/util.go
@@ -196,13 +196,14 @@ func (d *Onedrive) upBig(ctx context.Context, dstDir model.Obj, stream model.Fil
if err != nil {
return err
}
- if res.StatusCode != 201 && res.StatusCode != 202 {
+ // https://learn.microsoft.com/zh-cn/onedrive/developer/rest-api/api/driveitem_createuploadsession
+ if res.StatusCode != 201 && res.StatusCode != 202 && res.StatusCode != 200 {
data, _ := io.ReadAll(res.Body)
res.Body.Close()
return errors.New(string(data))
}
res.Body.Close()
- up(int(finish * 100 / stream.GetSize()))
+ up(float64(finish) * 100 / float64(stream.GetSize()))
}
return nil
}
diff --git a/drivers/onedrive_app/driver.go b/drivers/onedrive_app/driver.go
old mode 100644
new mode 100755
index ac6f232e..84ff878a
--- a/drivers/onedrive_app/driver.go
+++ b/drivers/onedrive_app/driver.go
@@ -4,6 +4,7 @@ import (
"context"
"fmt"
"net/http"
+ "net/url"
"path"
"github.com/alist-org/alist/v3/drivers/base"
@@ -57,8 +58,17 @@ func (d *OnedriveAPP) Link(ctx context.Context, file model.Obj, args model.LinkA
if f.File == nil {
return nil, errs.NotFile
}
+ u := f.Url
+ if d.CustomHost != "" {
+ _u, err := url.Parse(f.Url)
+ if err != nil {
+ return nil, err
+ }
+ _u.Host = d.CustomHost
+ u = _u.String()
+ }
return &model.Link{
- URL: f.Url,
+ URL: u,
}, nil
}
diff --git a/drivers/onedrive_app/meta.go b/drivers/onedrive_app/meta.go
old mode 100644
new mode 100755
index 21ae4f15..0499f503
--- a/drivers/onedrive_app/meta.go
+++ b/drivers/onedrive_app/meta.go
@@ -13,6 +13,7 @@ type Addition struct {
TenantID string `json:"tenant_id"`
Email string `json:"email"`
ChunkSize int64 `json:"chunk_size" type:"number" default:"5"`
+ CustomHost string `json:"custom_host" help:"Custom host for onedrive download link"`
}
var config = driver.Config{
diff --git a/drivers/onedrive_app/types.go b/drivers/onedrive_app/types.go
old mode 100644
new mode 100755
diff --git a/drivers/onedrive_app/util.go b/drivers/onedrive_app/util.go
old mode 100644
new mode 100755
index 51160e1a..28b34837
--- a/drivers/onedrive_app/util.go
+++ b/drivers/onedrive_app/util.go
@@ -71,8 +71,8 @@ func (d *OnedriveAPP) _accessToken() error {
"grant_type": "client_credentials",
"client_id": d.ClientID,
"client_secret": d.ClientSecret,
- "resource": "https://graph.microsoft.com/",
- "scope": "https://graph.microsoft.com/.default",
+ "resource": onedriveHostMap[d.Region].Api + "/",
+ "scope": onedriveHostMap[d.Region].Api + "/.default",
}).Post(url)
if err != nil {
return err
@@ -187,13 +187,14 @@ func (d *OnedriveAPP) upBig(ctx context.Context, dstDir model.Obj, stream model.
if err != nil {
return err
}
- if res.StatusCode != 201 && res.StatusCode != 202 {
+ // https://learn.microsoft.com/zh-cn/onedrive/developer/rest-api/api/driveitem_createuploadsession
+ if res.StatusCode != 201 && res.StatusCode != 202 && res.StatusCode != 200 {
data, _ := io.ReadAll(res.Body)
res.Body.Close()
return errors.New(string(data))
}
res.Body.Close()
- up(int(finish * 100 / stream.GetSize()))
+ up(float64(finish) * 100 / float64(stream.GetSize()))
}
return nil
}
diff --git a/drivers/pikpak/driver.go b/drivers/pikpak/driver.go
old mode 100644
new mode 100755
index a86a7539..52ca15c7
--- a/drivers/pikpak/driver.go
+++ b/drivers/pikpak/driver.go
@@ -3,15 +3,14 @@ package pikpak
import (
"context"
"fmt"
- "io"
"net/http"
- "os"
"strings"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
+ hash_extend "github.com/alist-org/alist/v3/pkg/utils/hash"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/session"
@@ -124,23 +123,20 @@ func (d *PikPak) Remove(ctx context.Context, obj model.Obj) error {
}
func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
- tempFile, err := utils.CreateTempFile(stream.GetReadCloser(), stream.GetSize())
- if err != nil {
- return err
- }
- defer func() {
- _ = tempFile.Close()
- _ = os.Remove(tempFile.Name())
- }()
- // cal gcid
- sha1Str, err := getGcid(tempFile, stream.GetSize())
- if err != nil {
- return err
- }
- _, err = tempFile.Seek(0, io.SeekStart)
- if err != nil {
- return err
+ hi := stream.GetHash()
+ sha1Str := hi.GetHash(hash_extend.GCID)
+ if len(sha1Str) < hash_extend.GCID.Width {
+ tFile, err := stream.CacheFullInTempFile()
+ if err != nil {
+ return err
+ }
+
+ sha1Str, err = utils.HashFile(hash_extend.GCID, tFile, stream.GetSize())
+ if err != nil {
+ return err
+ }
}
+
var resp UploadTaskData
res, err := d.request("https://api-drive.mypikpak.com/drive/v1/files", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
@@ -179,7 +175,7 @@ func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
input := &s3manager.UploadInput{
Bucket: ¶ms.Bucket,
Key: ¶ms.Key,
- Body: tempFile,
+ Body: stream,
}
_, err = uploader.UploadWithContext(ctx, input)
return err
diff --git a/drivers/pikpak/meta.go b/drivers/pikpak/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/pikpak/types.go b/drivers/pikpak/types.go
old mode 100644
new mode 100755
index b909cae0..489a1efe
--- a/drivers/pikpak/types.go
+++ b/drivers/pikpak/types.go
@@ -5,6 +5,8 @@ import (
"time"
"github.com/alist-org/alist/v3/internal/model"
+ "github.com/alist-org/alist/v3/pkg/utils"
+ hash_extend "github.com/alist-org/alist/v3/pkg/utils/hash"
)
type RespErr struct {
@@ -21,7 +23,9 @@ type File struct {
Id string `json:"id"`
Kind string `json:"kind"`
Name string `json:"name"`
+ CreatedTime time.Time `json:"created_time"`
ModifiedTime time.Time `json:"modified_time"`
+ Hash string `json:"hash"`
Size string `json:"size"`
ThumbnailLink string `json:"thumbnail_link"`
WebContentLink string `json:"web_content_link"`
@@ -35,8 +39,10 @@ func fileToObj(f File) *model.ObjThumb {
ID: f.Id,
Name: f.Name,
Size: size,
+ Ctime: f.CreatedTime,
Modified: f.ModifiedTime,
IsFolder: f.Kind == "drive#folder",
+ HashInfo: utils.NewHashInfo(hash_extend.GCID, f.Hash),
},
Thumbnail: model.Thumbnail{
Thumbnail: f.ThumbnailLink,
diff --git a/drivers/pikpak/util.go b/drivers/pikpak/util.go
old mode 100644
new mode 100755
diff --git a/drivers/pikpak_share/driver.go b/drivers/pikpak_share/driver.go
old mode 100644
new mode 100755
diff --git a/drivers/pikpak_share/meta.go b/drivers/pikpak_share/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/pikpak_share/types.go b/drivers/pikpak_share/types.go
old mode 100644
new mode 100755
index 144a05a8..e446ab2b
--- a/drivers/pikpak_share/types.go
+++ b/drivers/pikpak_share/types.go
@@ -13,8 +13,8 @@ type RespErr struct {
}
type ShareResp struct {
- ShareStatus string `json:"share_status"`
- ShareStatusText string `json:"share_status_text"`
+ ShareStatus string `json:"share_status"`
+ ShareStatusText string `json:"share_status_text"`
FileInfo File `json:"file_info"`
Files []File `json:"files"`
NextPageToken string `json:"next_page_token"`
diff --git a/drivers/pikpak_share/util.go b/drivers/pikpak_share/util.go
old mode 100644
new mode 100755
diff --git a/drivers/quark_uc/driver.go b/drivers/quark_uc/driver.go
old mode 100644
new mode 100755
index 4969af5a..291189ce
--- a/drivers/quark_uc/driver.go
+++ b/drivers/quark_uc/driver.go
@@ -7,7 +7,6 @@ import (
"encoding/hex"
"io"
"net/http"
- "os"
"time"
"github.com/alist-org/alist/v3/drivers/base"
@@ -75,7 +74,7 @@ func (d *QuarkOrUC) Link(ctx context.Context, file model.Obj, args model.LinkArg
"User-Agent": []string{ua},
},
Concurrency: 2,
- PartSize: 10 * 1024 * 1024,
+ PartSize: 10 * utils.MB,
}, nil
}
@@ -136,13 +135,12 @@ func (d *QuarkOrUC) Remove(ctx context.Context, obj model.Obj) error {
}
func (d *QuarkOrUC) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
- tempFile, err := utils.CreateTempFile(stream.GetReadCloser(), stream.GetSize())
+ tempFile, err := stream.CacheFullInTempFile()
if err != nil {
return err
}
defer func() {
_ = tempFile.Close()
- _ = os.Remove(tempFile.Name())
}()
m := md5.New()
_, err = io.Copy(m, tempFile)
@@ -211,7 +209,7 @@ func (d *QuarkOrUC) Put(ctx context.Context, dstDir model.Obj, stream model.File
}
md5s = append(md5s, m)
partNumber++
- up(int(100 * (total - left) / total))
+ up(100 * float64(total-left) / float64(total))
}
err = d.upCommit(pre, md5s)
if err != nil {
diff --git a/drivers/quark_uc/meta.go b/drivers/quark_uc/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/quark_uc/types.go b/drivers/quark_uc/types.go
old mode 100644
new mode 100755
diff --git a/drivers/quark_uc/util.go b/drivers/quark_uc/util.go
old mode 100644
new mode 100755
diff --git a/drivers/s3/driver.go b/drivers/s3/driver.go
old mode 100644
new mode 100755
index e888ecf8..c8099ee4
--- a/drivers/s3/driver.go
+++ b/drivers/s3/driver.go
@@ -10,6 +10,8 @@ import (
"strings"
"time"
+ "github.com/alist-org/alist/v3/internal/stream"
+
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/aws/aws-sdk-go/aws/session"
@@ -96,14 +98,14 @@ func (d *S3) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*mo
func (d *S3) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
return d.Put(ctx, &model.Object{
Path: stdpath.Join(parentDir.GetPath(), dirName),
- }, &model.FileStream{
+ }, &stream.FileStream{
Obj: &model.Object{
Name: getPlaceholderName(d.Placeholder),
Modified: time.Now(),
},
- ReadCloser: io.NopCloser(bytes.NewReader([]byte{})),
- Mimetype: "application/octet-stream",
- }, func(int) {})
+ Reader: io.NopCloser(bytes.NewReader([]byte{})),
+ Mimetype: "application/octet-stream",
+ }, func(float64) {})
}
func (d *S3) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
diff --git a/drivers/s3/meta.go b/drivers/s3/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/s3/types.go b/drivers/s3/types.go
old mode 100644
new mode 100755
diff --git a/drivers/s3/util.go b/drivers/s3/util.go
old mode 100644
new mode 100755
diff --git a/drivers/seafile/driver.go b/drivers/seafile/driver.go
old mode 100644
new mode 100755
diff --git a/drivers/seafile/meta.go b/drivers/seafile/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/seafile/types.go b/drivers/seafile/types.go
old mode 100644
new mode 100755
diff --git a/drivers/seafile/util.go b/drivers/seafile/util.go
old mode 100644
new mode 100755
diff --git a/drivers/sftp/driver.go b/drivers/sftp/driver.go
old mode 100644
new mode 100755
index cf67ae0e..77f51984
--- a/drivers/sftp/driver.go
+++ b/drivers/sftp/driver.go
@@ -56,7 +56,7 @@ func (d *SFTP) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*
return nil, err
}
link := &model.Link{
- ReadSeekCloser: remoteFile,
+ MFile: remoteFile,
}
return link, nil
}
diff --git a/drivers/sftp/meta.go b/drivers/sftp/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/sftp/types.go b/drivers/sftp/types.go
old mode 100644
new mode 100755
diff --git a/drivers/sftp/util.go b/drivers/sftp/util.go
old mode 100644
new mode 100755
diff --git a/drivers/smb/driver.go b/drivers/smb/driver.go
old mode 100644
new mode 100755
index 7e1be57d..9632f24e
--- a/drivers/smb/driver.go
+++ b/drivers/smb/driver.go
@@ -61,6 +61,7 @@ func (d *SMB) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]m
Modified: f.ModTime(),
Size: f.Size(),
IsFolder: f.IsDir(),
+ Ctime: f.(*smb2.FileStat).CreationTime,
},
}
files = append(files, &file)
@@ -79,7 +80,7 @@ func (d *SMB) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*m
return nil, err
}
link := &model.Link{
- ReadSeekCloser: remoteFile,
+ MFile: remoteFile,
}
d.updateLastConnTime()
return link, nil
diff --git a/drivers/smb/meta.go b/drivers/smb/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/smb/types.go b/drivers/smb/types.go
old mode 100644
new mode 100755
diff --git a/drivers/smb/util.go b/drivers/smb/util.go
old mode 100644
new mode 100755
diff --git a/drivers/teambition/driver.go b/drivers/teambition/driver.go
old mode 100644
new mode 100755
index d4fcc401..c75d2ac0
--- a/drivers/teambition/driver.go
+++ b/drivers/teambition/driver.go
@@ -3,12 +3,12 @@ package teambition
import (
"context"
"errors"
+ "github.com/alist-org/alist/v3/pkg/utils"
"net/http"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
- "github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
)
@@ -128,11 +128,23 @@ func (d *Teambition) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
if d.UseS3UploadMethod {
return d.newUpload(ctx, dstDir, stream, up)
}
- res, err := d.request("/api/v2/users/me", http.MethodGet, nil, nil)
- if err != nil {
- return err
+ var (
+ token string
+ err error
+ )
+ if d.isInternational() {
+ res, err := d.request("/projects", http.MethodGet, nil, nil)
+ if err != nil {
+ return err
+ }
+ token = getBetweenStr(string(res), "strikerAuth":"", "","phoneForLogin")
+ } else {
+ res, err := d.request("/api/v2/users/me", http.MethodGet, nil, nil)
+ if err != nil {
+ return err
+ }
+ token = utils.Json.Get(res, "strikerAuth").ToString()
}
- token := utils.Json.Get(res, "strikerAuth").ToString()
var newFile *FileUpload
if stream.GetSize() <= 20971520 {
// post upload
diff --git a/drivers/teambition/help.go b/drivers/teambition/help.go
new file mode 100755
index 00000000..8581c3e8
--- /dev/null
+++ b/drivers/teambition/help.go
@@ -0,0 +1,18 @@
+package teambition
+
+import "strings"
+
+func getBetweenStr(str, start, end string) string {
+ n := strings.Index(str, start)
+ if n == -1 {
+ return ""
+ }
+ n = n + len(start)
+ str = string([]byte(str)[n:])
+ m := strings.Index(str, end)
+ if m == -1 {
+ return ""
+ }
+ str = string([]byte(str)[:m])
+ return str
+}
diff --git a/drivers/teambition/meta.go b/drivers/teambition/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/teambition/types.go b/drivers/teambition/types.go
old mode 100644
new mode 100755
diff --git a/drivers/teambition/util.go b/drivers/teambition/util.go
old mode 100644
new mode 100755
index 04f222de..79de7007
--- a/drivers/teambition/util.go
+++ b/drivers/teambition/util.go
@@ -126,19 +126,20 @@ func (d *Teambition) upload(ctx context.Context, file model.FileStreamer, token
prefix = "us-tcs"
}
var newFile FileUpload
- _, err := base.RestyClient.R().
+ res, err := base.RestyClient.R().
SetContext(ctx).
SetResult(&newFile).SetHeader("Authorization", token).
SetMultipartFormData(map[string]string{
- "name": file.GetName(),
- "type": file.GetMimetype(),
- "size": strconv.FormatInt(file.GetSize(), 10),
- //"lastModifiedDate": "",
+ "name": file.GetName(),
+ "type": file.GetMimetype(),
+ "size": strconv.FormatInt(file.GetSize(), 10),
+ "lastModifiedDate": time.Now().Format("Mon Jan 02 2006 15:04:05 GMT+0800 (中国标准时间)"),
}).SetMultipartField("file", file.GetName(), file.GetMimetype(), file).
Post(fmt.Sprintf("https://%s.teambition.net/upload", prefix))
if err != nil {
return nil, err
}
+ log.Debugf("[teambition] upload response: %s", res.String())
return &newFile, nil
}
@@ -189,7 +190,7 @@ func (d *Teambition) chunkUpload(ctx context.Context, file model.FileStreamer, t
if err != nil {
return nil, err
}
- up(i * 100 / newChunk.Chunks)
+ up(float64(i) * 100 / float64(newChunk.Chunks))
}
_, err = base.RestyClient.R().SetHeader("Authorization", token).Post(
fmt.Sprintf("https://%s.teambition.net/upload/chunk/%s",
diff --git a/drivers/template/driver.go b/drivers/template/driver.go
old mode 100644
new mode 100755
index bc22b928..439f57f3
--- a/drivers/template/driver.go
+++ b/drivers/template/driver.go
@@ -41,24 +41,24 @@ func (d *Template) Link(ctx context.Context, file model.Obj, args model.LinkArgs
return nil, errs.NotImplement
}
-func (d *Template) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
+func (d *Template) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
// TODO create folder, optional
- return errs.NotImplement
+ return nil, errs.NotImplement
}
-func (d *Template) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
+func (d *Template) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
// TODO move obj, optional
- return errs.NotImplement
+ return nil, errs.NotImplement
}
-func (d *Template) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
+func (d *Template) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
// TODO rename obj, optional
- return errs.NotImplement
+ return nil, errs.NotImplement
}
-func (d *Template) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
+func (d *Template) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
// TODO copy obj, optional
- return errs.NotImplement
+ return nil, errs.NotImplement
}
func (d *Template) Remove(ctx context.Context, obj model.Obj) error {
@@ -66,9 +66,9 @@ func (d *Template) Remove(ctx context.Context, obj model.Obj) error {
return errs.NotImplement
}
-func (d *Template) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
+func (d *Template) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
// TODO upload file, optional
- return errs.NotImplement
+ return nil, errs.NotImplement
}
//func (d *Template) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
diff --git a/drivers/template/meta.go b/drivers/template/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/template/types.go b/drivers/template/types.go
old mode 100644
new mode 100755
diff --git a/drivers/template/util.go b/drivers/template/util.go
old mode 100644
new mode 100755
diff --git a/drivers/terabox/driver.go b/drivers/terabox/driver.go
old mode 100644
new mode 100755
index 4c4ad8b5..c9662fce
--- a/drivers/terabox/driver.go
+++ b/drivers/terabox/driver.go
@@ -1,4 +1,4 @@
-package terbox
+package terabox
import (
"bytes"
@@ -6,16 +6,16 @@ import (
"crypto/md5"
"encoding/hex"
"fmt"
- "github.com/alist-org/alist/v3/drivers/base"
- "github.com/alist-org/alist/v3/pkg/utils"
- log "github.com/sirupsen/logrus"
"io"
"math"
- "os"
stdpath "path"
"strconv"
"strings"
+ "github.com/alist-org/alist/v3/drivers/base"
+ "github.com/alist-org/alist/v3/pkg/utils"
+ log "github.com/sirupsen/logrus"
+
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
)
@@ -23,6 +23,7 @@ import (
type Terabox struct {
model.Storage
Addition
+ JsToken string
}
func (d *Terabox) Config() driver.Config {
@@ -116,14 +117,10 @@ func (d *Terabox) Remove(ctx context.Context, obj model.Obj) error {
}
func (d *Terabox) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
- tempFile, err := utils.CreateTempFile(stream.GetReadCloser(), stream.GetSize())
+ tempFile, err := stream.CacheFullInTempFile()
if err != nil {
return err
}
- defer func() {
- _ = tempFile.Close()
- _ = os.Remove(tempFile.Name())
- }()
var Default int64 = 4 * 1024 * 1024
defaultByteData := make([]byte, Default)
count := int(math.Ceil(float64(stream.GetSize()) / float64(Default)))
@@ -170,6 +167,9 @@ func (d *Terabox) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
return err
}
log.Debugf("%+v", precreateResp)
+ if precreateResp.Errno != 0 {
+ return fmt.Errorf("[terabox] failed to precreate file, errno: %d", precreateResp.Errno)
+ }
if precreateResp.ReturnType == 2 {
return nil
}
@@ -213,7 +213,7 @@ func (d *Terabox) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
}
log.Debugln(res.String())
if len(precreateResp.BlockList) > 0 {
- up(i * 100 / len(precreateResp.BlockList))
+ up(float64(i) * 100 / float64(len(precreateResp.BlockList)))
}
}
_, err = d.create(rawPath, stream.GetSize(), 0, precreateResp.Uploadid, block_list_str)
diff --git a/drivers/terabox/meta.go b/drivers/terabox/meta.go
old mode 100644
new mode 100755
index 797244b2..63ae5856
--- a/drivers/terabox/meta.go
+++ b/drivers/terabox/meta.go
@@ -1,4 +1,4 @@
-package terbox
+package terabox
import (
"github.com/alist-org/alist/v3/internal/driver"
@@ -7,7 +7,8 @@ import (
type Addition struct {
driver.RootPath
- Cookie string `json:"cookie" required:"true"`
+ Cookie string `json:"cookie" required:"true"`
+ //JsToken string `json:"js_token" type:"string" required:"true"`
DownloadAPI string `json:"download_api" type:"select" options:"official,crack" default:"official"`
OrderBy string `json:"order_by" type:"select" options:"name,time,size" default:"name"`
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
diff --git a/drivers/terabox/types.go b/drivers/terabox/types.go
old mode 100644
new mode 100755
index 25bd99c6..890d5305
--- a/drivers/terabox/types.go
+++ b/drivers/terabox/types.go
@@ -1,9 +1,10 @@
-package terbox
+package terabox
import (
- "github.com/alist-org/alist/v3/internal/model"
"strconv"
"time"
+
+ "github.com/alist-org/alist/v3/internal/model"
)
type File struct {
diff --git a/drivers/terabox/util.go b/drivers/terabox/util.go
old mode 100644
new mode 100755
index 9b1f5fd5..0a4e7879
--- a/drivers/terabox/util.go
+++ b/drivers/terabox/util.go
@@ -1,10 +1,11 @@
-package terbox
+package terabox
import (
"encoding/base64"
"fmt"
"net/http"
"net/url"
+ "regexp"
"strconv"
"strings"
"time"
@@ -15,7 +16,39 @@ import (
"github.com/go-resty/resty/v2"
)
-func (d *Terabox) request(furl string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
+func getStrBetween(raw, start, end string) string {
+ regexPattern := fmt.Sprintf(`%s(.*?)%s`, regexp.QuoteMeta(start), regexp.QuoteMeta(end))
+ regex := regexp.MustCompile(regexPattern)
+ matches := regex.FindStringSubmatch(raw)
+ if len(matches) < 2 {
+ return ""
+ }
+ mid := matches[1]
+ return mid
+}
+
+func (d *Terabox) resetJsToken() error {
+ u := "https://www.terabox.com/main"
+ res, err := base.RestyClient.R().SetHeaders(map[string]string{
+ "Cookie": d.Cookie,
+ "Accept": "application/json, text/plain, */*",
+ "Referer": "https://www.terabox.com/",
+ "User-Agent": base.UserAgent,
+ "X-Requested-With": "XMLHttpRequest",
+ }).Get(u)
+ if err != nil {
+ return err
+ }
+ html := res.String()
+ jsToken := getStrBetween(html, "`function%20fn%28a%29%7Bwindow.jsToken%20%3D%20a%7D%3Bfn%28%22", "%22%29`")
+ if jsToken == "" {
+ return fmt.Errorf("jsToken not found, html: %s", html)
+ }
+ d.JsToken = jsToken
+ return nil
+}
+
+func (d *Terabox) request(furl string, method string, callback base.ReqCallback, resp interface{}, noRetry ...bool) ([]byte, error) {
req := base.RestyClient.R()
req.SetHeaders(map[string]string{
"Cookie": d.Cookie,
@@ -24,10 +57,13 @@ func (d *Terabox) request(furl string, method string, callback base.ReqCallback,
"User-Agent": base.UserAgent,
"X-Requested-With": "XMLHttpRequest",
})
- req.SetQueryParam("app_id", "250528")
- req.SetQueryParam("web", "1")
- req.SetQueryParam("channel", "dubox")
- req.SetQueryParam("clienttype", "0")
+ req.SetQueryParams(map[string]string{
+ "app_id": "250528",
+ "web": "1",
+ "channel": "dubox",
+ "clienttype": "0",
+ "jsToken": d.JsToken,
+ })
if callback != nil {
callback(req)
}
@@ -38,6 +74,17 @@ func (d *Terabox) request(furl string, method string, callback base.ReqCallback,
if err != nil {
return nil, err
}
+ errno := utils.Json.Get(res.Body(), "errno").ToInt()
+ if errno == 4000023 {
+ // reget jsToken
+ err = d.resetJsToken()
+ if err != nil {
+ return nil, err
+ }
+ if !utils.IsBool(noRetry...) {
+ return d.request(furl, method, callback, resp, true)
+ }
+ }
return res.Body(), nil
}
@@ -186,7 +233,7 @@ func (d *Terabox) manage(opera string, filelist interface{}) ([]byte, error) {
if err != nil {
return nil, err
}
- data := fmt.Sprintf("async=0&filelist=%s&ondup=newcopy", string(marshal))
+ data := fmt.Sprintf("async=0&filelist=%s&ondup=newcopy", encodeURIComponent(string(marshal)))
return d.post("/api/filemanager", params, data, nil)
}
diff --git a/drivers/thunder/driver.go b/drivers/thunder/driver.go
old mode 100644
new mode 100755
index 8b91b5a9..cac6733f
--- a/drivers/thunder/driver.go
+++ b/drivers/thunder/driver.go
@@ -3,9 +3,7 @@ package thunder
import (
"context"
"fmt"
- "io"
"net/http"
- "os"
"strings"
"github.com/alist-org/alist/v3/drivers/base"
@@ -14,6 +12,7 @@ import (
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/pkg/utils"
+ hash_extend "github.com/alist-org/alist/v3/pkg/utils/hash"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/session"
@@ -333,25 +332,22 @@ func (xc *XunLeiCommon) Remove(ctx context.Context, obj model.Obj) error {
}
func (xc *XunLeiCommon) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
- tempFile, err := utils.CreateTempFile(stream.GetReadCloser(), stream.GetSize())
- if err != nil {
- return err
- }
- defer func() {
- _ = tempFile.Close()
- _ = os.Remove(tempFile.Name())
- }()
+ hi := stream.GetHash()
+ gcid := hi.GetHash(hash_extend.GCID)
+ if len(gcid) < hash_extend.GCID.Width {
+ tFile, err := stream.CacheFullInTempFile()
+ if err != nil {
+ return err
+ }
- gcid, err := getGcid(tempFile, stream.GetSize())
- if err != nil {
- return err
- }
- if _, err := tempFile.Seek(0, io.SeekStart); err != nil {
- return err
+ gcid, err = utils.HashFile(hash_extend.GCID, tFile, stream.GetSize())
+ if err != nil {
+ return err
+ }
}
var resp UploadTaskResponse
- _, err = xc.Request(FILE_API_URL, http.MethodPost, func(r *resty.Request) {
+ _, err := xc.Request(FILE_API_URL, http.MethodPost, func(r *resty.Request) {
r.SetContext(ctx)
r.SetBody(&base.Json{
"kind": FILE,
@@ -381,7 +377,7 @@ func (xc *XunLeiCommon) Put(ctx context.Context, dstDir model.Obj, stream model.
Bucket: aws.String(param.Bucket),
Key: aws.String(param.Key),
Expires: aws.Time(param.Expiration),
- Body: tempFile,
+ Body: stream,
})
return err
}
diff --git a/drivers/thunder/meta.go b/drivers/thunder/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/thunder/types.go b/drivers/thunder/types.go
old mode 100644
new mode 100755
index 0c60dc56..7c223673
--- a/drivers/thunder/types.go
+++ b/drivers/thunder/types.go
@@ -4,6 +4,10 @@ import (
"fmt"
"strconv"
"time"
+
+ "github.com/alist-org/alist/v3/internal/model"
+ "github.com/alist-org/alist/v3/pkg/utils"
+ hash_extend "github.com/alist-org/alist/v3/pkg/utils/hash"
)
type ErrResp struct {
@@ -84,6 +88,8 @@ type Link struct {
Type string `json:"type"`
}
+var _ model.Obj = (*Files)(nil)
+
type Files struct {
Kind string `json:"kind"`
ID string `json:"id"`
@@ -100,39 +106,39 @@ type Files struct {
ModifiedTime time.Time `json:"modified_time"`
IconLink string `json:"icon_link"`
ThumbnailLink string `json:"thumbnail_link"`
- //Md5Checksum string `json:"md5_checksum"`
- //Hash string `json:"hash"`
- Links map[string]Link `json:"links"`
- Phase string `json:"phase"`
- Audit struct {
- Status string `json:"status"`
- Message string `json:"message"`
- Title string `json:"title"`
- } `json:"audit"`
+ // Md5Checksum string `json:"md5_checksum"`
+ Hash string `json:"hash"`
+ // Links map[string]Link `json:"links"`
+ // Phase string `json:"phase"`
+ // Audit struct {
+ // Status string `json:"status"`
+ // Message string `json:"message"`
+ // Title string `json:"title"`
+ // } `json:"audit"`
Medias []struct {
- Category string `json:"category"`
- IconLink string `json:"icon_link"`
- IsDefault bool `json:"is_default"`
- IsOrigin bool `json:"is_origin"`
- IsVisible bool `json:"is_visible"`
- Link Link `json:"link"`
- MediaID string `json:"media_id"`
- MediaName string `json:"media_name"`
- NeedMoreQuota bool `json:"need_more_quota"`
- Priority int `json:"priority"`
- RedirectLink string `json:"redirect_link"`
- ResolutionName string `json:"resolution_name"`
- Video struct {
- AudioCodec string `json:"audio_codec"`
- BitRate int `json:"bit_rate"`
- Duration int `json:"duration"`
- FrameRate int `json:"frame_rate"`
- Height int `json:"height"`
- VideoCodec string `json:"video_codec"`
- VideoType string `json:"video_type"`
- Width int `json:"width"`
- } `json:"video"`
- VipTypes []string `json:"vip_types"`
+ //Category string `json:"category"`
+ //IconLink string `json:"icon_link"`
+ //IsDefault bool `json:"is_default"`
+ //IsOrigin bool `json:"is_origin"`
+ //IsVisible bool `json:"is_visible"`
+ Link Link `json:"link"`
+ //MediaID string `json:"media_id"`
+ //MediaName string `json:"media_name"`
+ //NeedMoreQuota bool `json:"need_more_quota"`
+ //Priority int `json:"priority"`
+ //RedirectLink string `json:"redirect_link"`
+ //ResolutionName string `json:"resolution_name"`
+ // Video struct {
+ // AudioCodec string `json:"audio_codec"`
+ // BitRate int `json:"bit_rate"`
+ // Duration int `json:"duration"`
+ // FrameRate int `json:"frame_rate"`
+ // Height int `json:"height"`
+ // VideoCodec string `json:"video_codec"`
+ // VideoType string `json:"video_type"`
+ // Width int `json:"width"`
+ // } `json:"video"`
+ // VipTypes []string `json:"vip_types"`
} `json:"medias"`
Trashed bool `json:"trashed"`
DeleteTime string `json:"delete_time"`
@@ -146,13 +152,18 @@ type Files struct {
//Collection interface{} `json:"collection"`
}
-func (c *Files) GetSize() int64 { size, _ := strconv.ParseInt(c.Size, 10, 64); return size }
-func (c *Files) GetName() string { return c.Name }
-func (c *Files) ModTime() time.Time { return c.ModifiedTime }
-func (c *Files) IsDir() bool { return c.Kind == FOLDER }
-func (c *Files) GetID() string { return c.ID }
-func (c *Files) GetPath() string { return "" }
-func (c *Files) Thumb() string { return c.ThumbnailLink }
+func (c *Files) GetHash() utils.HashInfo {
+ return utils.NewHashInfo(hash_extend.GCID, c.Hash)
+}
+
+func (c *Files) GetSize() int64 { size, _ := strconv.ParseInt(c.Size, 10, 64); return size }
+func (c *Files) GetName() string { return c.Name }
+func (c *Files) CreateTime() time.Time { return c.CreatedTime }
+func (c *Files) ModTime() time.Time { return c.ModifiedTime }
+func (c *Files) IsDir() bool { return c.Kind == FOLDER }
+func (c *Files) GetID() string { return c.ID }
+func (c *Files) GetPath() string { return "" }
+func (c *Files) Thumb() string { return c.ThumbnailLink }
/*
* 上传
diff --git a/drivers/thunder/util.go b/drivers/thunder/util.go
old mode 100644
new mode 100755
diff --git a/drivers/trainbit/driver.go b/drivers/trainbit/driver.go
old mode 100644
new mode 100755
index 63bd0627..795b2fb8
--- a/drivers/trainbit/driver.go
+++ b/drivers/trainbit/driver.go
@@ -5,7 +5,6 @@ import (
"encoding/json"
"fmt"
"io"
- "math"
"net/http"
"net/url"
"strings"
@@ -128,7 +127,7 @@ func (d *Trainbit) Put(ctx context.Context, dstDir model.Obj, stream model.FileS
stream,
func(byteNum int) {
total += int64(byteNum)
- up(int(math.Round(float64(total) / float64(stream.GetSize()) * 100)))
+ up(float64(total) / float64(stream.GetSize()) * 100)
},
}
req, err := http.NewRequest(http.MethodPost, endpoint.String(), progressReader)
diff --git a/drivers/trainbit/meta.go b/drivers/trainbit/meta.go
old mode 100644
new mode 100755
index 59c09d77..4009268f
--- a/drivers/trainbit/meta.go
+++ b/drivers/trainbit/meta.go
@@ -8,18 +8,18 @@ import (
type Addition struct {
driver.RootID
AUSHELLPORTAL string `json:"AUSHELLPORTAL" required:"true"`
- ApiKey string `json:"apikey" required:"true"`
+ ApiKey string `json:"apikey" required:"true"`
}
var config = driver.Config{
- Name: "Trainbit",
- LocalSort: false,
- OnlyLocal: false,
- OnlyProxy: false,
- NoCache: false,
- NoUpload: false,
- NeedMs: false,
- DefaultRoot: "0_000",
+ Name: "Trainbit",
+ LocalSort: false,
+ OnlyLocal: false,
+ OnlyProxy: false,
+ NoCache: false,
+ NoUpload: false,
+ NeedMs: false,
+ DefaultRoot: "0_000",
}
func init() {
diff --git a/drivers/trainbit/types.go b/drivers/trainbit/types.go
old mode 100644
new mode 100755
index 4de1a0ab..1b30f4c5
--- a/drivers/trainbit/types.go
+++ b/drivers/trainbit/types.go
@@ -1 +1 @@
-package trainbit
\ No newline at end of file
+package trainbit
diff --git a/drivers/trainbit/util.go b/drivers/trainbit/util.go
old mode 100644
new mode 100755
diff --git a/drivers/url_tree/driver.go b/drivers/url_tree/driver.go
old mode 100644
new mode 100755
diff --git a/drivers/url_tree/meta.go b/drivers/url_tree/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/url_tree/types.go b/drivers/url_tree/types.go
old mode 100644
new mode 100755
diff --git a/drivers/url_tree/urls_test.go b/drivers/url_tree/urls_test.go
old mode 100644
new mode 100755
diff --git a/drivers/url_tree/util.go b/drivers/url_tree/util.go
old mode 100644
new mode 100755
diff --git a/drivers/uss/driver.go b/drivers/uss/driver.go
old mode 100644
new mode 100755
index 2ca19c0a..447515d8
--- a/drivers/uss/driver.go
+++ b/drivers/uss/driver.go
@@ -80,7 +80,11 @@ func (d *USS) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*m
downExp := time.Hour * time.Duration(d.SignURLExpire)
expireAt := time.Now().Add(downExp).Unix()
upd := url.QueryEscape(path.Base(file.GetPath()))
- signStr := strings.Join([]string{d.OperatorPassword, fmt.Sprint(expireAt), fmt.Sprintf("/%s", key)}, "&")
+ tokenOrPassword := d.AntiTheftChainToken
+ if tokenOrPassword == "" {
+ tokenOrPassword = d.OperatorPassword
+ }
+ signStr := strings.Join([]string{tokenOrPassword, fmt.Sprint(expireAt), fmt.Sprintf("/%s", key)}, "&")
upt := utils.GetMD5EncodeStr(signStr)[12:20] + fmt.Sprint(expireAt)
link := fmt.Sprintf("%s?_upd=%s&_upt=%s", u, upd, upt)
return &model.Link{URL: link}, nil
diff --git a/drivers/uss/meta.go b/drivers/uss/meta.go
old mode 100644
new mode 100755
index 4ed7c96e..388df589
--- a/drivers/uss/meta.go
+++ b/drivers/uss/meta.go
@@ -7,10 +7,11 @@ import (
type Addition struct {
driver.RootPath
- Bucket string `json:"bucket" required:"true"`
- Endpoint string `json:"endpoint" required:"true"`
- OperatorName string `json:"operator_name" required:"true"`
- OperatorPassword string `json:"operator_password" required:"true"`
+ Bucket string `json:"bucket" required:"true"`
+ Endpoint string `json:"endpoint" required:"true"`
+ OperatorName string `json:"operator_name" required:"true"`
+ OperatorPassword string `json:"operator_password" required:"true"`
+ AntiTheftChainToken string `json:"anti_theft_chain_token" required:"false" default:""`
//CustomHost string `json:"custom_host"` //Endpoint与CustomHost作用相同,去除
SignURLExpire int `json:"sign_url_expire" type:"number" default:"4"`
}
diff --git a/drivers/uss/types.go b/drivers/uss/types.go
old mode 100644
new mode 100755
diff --git a/drivers/uss/util.go b/drivers/uss/util.go
old mode 100644
new mode 100755
diff --git a/drivers/virtual/driver.go b/drivers/virtual/driver.go
old mode 100644
new mode 100755
index e76f9536..d5d37e6b
--- a/drivers/virtual/driver.go
+++ b/drivers/virtual/driver.go
@@ -34,61 +34,79 @@ func (d *Virtual) GetAddition() driver.Additional {
func (d *Virtual) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
var res []model.Obj
for i := 0; i < d.NumFile; i++ {
- res = append(res, &model.Object{
- Name: random.String(10),
- Size: random.RangeInt64(d.MinFileSize, d.MaxFileSize),
- IsFolder: false,
- Modified: time.Now(),
- })
+ res = append(res, d.genObj(false))
}
for i := 0; i < d.NumFolder; i++ {
- res = append(res, &model.Object{
- Name: random.String(10),
- Size: 0,
- IsFolder: true,
- Modified: time.Now(),
- })
+ res = append(res, d.genObj(true))
}
return res, nil
}
-type nopReadSeekCloser struct {
+type DummyMFile struct {
io.Reader
}
-func (nopReadSeekCloser) Seek(offset int64, whence int) (int64, error) {
+func (f DummyMFile) Read(p []byte) (n int, err error) {
+ return f.Reader.Read(p)
+}
+
+func (f DummyMFile) ReadAt(p []byte, off int64) (n int, err error) {
+ return f.Reader.Read(p)
+}
+
+func (f DummyMFile) Close() error {
+ return nil
+}
+
+func (DummyMFile) Seek(offset int64, whence int) (int64, error) {
return offset, nil
}
-func (nopReadSeekCloser) Close() error { return nil }
func (d *Virtual) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
return &model.Link{
- ReadSeekCloser: nopReadSeekCloser{io.LimitReader(random.Rand, file.GetSize())},
+ MFile: DummyMFile{Reader: random.Rand},
}, nil
}
-func (d *Virtual) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
- return nil
+func (d *Virtual) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
+ dir := &model.Object{
+ Name: dirName,
+ Size: 0,
+ IsFolder: true,
+ Modified: time.Now(),
+ }
+ return dir, nil
}
-func (d *Virtual) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
- return nil
+func (d *Virtual) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
+ return srcObj, nil
}
-func (d *Virtual) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
- return nil
+func (d *Virtual) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
+ obj := &model.Object{
+ Name: newName,
+ Size: srcObj.GetSize(),
+ IsFolder: srcObj.IsDir(),
+ Modified: time.Now(),
+ }
+ return obj, nil
}
-func (d *Virtual) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
- return nil
+func (d *Virtual) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
+ return srcObj, nil
}
func (d *Virtual) Remove(ctx context.Context, obj model.Obj) error {
return nil
}
-func (d *Virtual) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
- return nil
+func (d *Virtual) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
+ file := &model.Object{
+ Name: stream.GetName(),
+ Size: stream.GetSize(),
+ Modified: time.Now(),
+ }
+ return file, nil
}
var _ driver.Driver = (*Virtual)(nil)
diff --git a/drivers/virtual/meta.go b/drivers/virtual/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/virtual/util.go b/drivers/virtual/util.go
new file mode 100755
index 00000000..5ed8314c
--- /dev/null
+++ b/drivers/virtual/util.go
@@ -0,0 +1,22 @@
+package virtual
+
+import (
+ "time"
+
+ "github.com/alist-org/alist/v3/internal/model"
+ "github.com/alist-org/alist/v3/pkg/utils/random"
+)
+
+func (d *Virtual) genObj(dir bool) model.Obj {
+ obj := &model.Object{
+ Name: random.String(10),
+ Size: 0,
+ IsFolder: true,
+ Modified: time.Now(),
+ }
+ if !dir {
+ obj.Size = random.RangeInt64(d.MinFileSize, d.MaxFileSize)
+ obj.IsFolder = false
+ }
+ return obj
+}
diff --git a/drivers/vtencent/drive.go b/drivers/vtencent/drive.go
new file mode 100755
index 00000000..67643143
--- /dev/null
+++ b/drivers/vtencent/drive.go
@@ -0,0 +1,208 @@
+package vtencent
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "strconv"
+ "time"
+
+ "github.com/alist-org/alist/v3/drivers/base"
+ "github.com/alist-org/alist/v3/internal/driver"
+ "github.com/alist-org/alist/v3/internal/errs"
+ "github.com/alist-org/alist/v3/internal/model"
+ "github.com/alist-org/alist/v3/internal/op"
+ "github.com/alist-org/alist/v3/pkg/cron"
+ "github.com/alist-org/alist/v3/pkg/utils"
+ "github.com/go-resty/resty/v2"
+)
+
+type Vtencent struct {
+ model.Storage
+ Addition
+ cron *cron.Cron
+ config driver.Config
+ conf Conf
+}
+
+func (d *Vtencent) Config() driver.Config {
+ return d.config
+}
+
+func (d *Vtencent) GetAddition() driver.Additional {
+ return &d.Addition
+}
+
+func (d *Vtencent) Init(ctx context.Context) error {
+ tfUid, err := d.LoadUser()
+ if err != nil {
+ d.Status = err.Error()
+ op.MustSaveDriverStorage(d)
+ return nil
+ }
+ d.Addition.TfUid = tfUid
+ op.MustSaveDriverStorage(d)
+ d.cron = cron.NewCron(time.Hour * 12)
+ d.cron.Do(func() {
+ _, err := d.LoadUser()
+ if err != nil {
+ d.Status = err.Error()
+ op.MustSaveDriverStorage(d)
+ }
+ })
+ return nil
+}
+
+func (d *Vtencent) Drop(ctx context.Context) error {
+ d.cron.Stop()
+ return nil
+}
+
+func (d *Vtencent) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
+ files, err := d.GetFiles(dir.GetID())
+ if err != nil {
+ return nil, err
+ }
+ return utils.SliceConvert(files, func(src File) (model.Obj, error) {
+ return fileToObj(src), nil
+ })
+}
+
+func (d *Vtencent) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
+ form := fmt.Sprintf(`{"MaterialIds":["%s"]}`, file.GetID())
+ var dat map[string]interface{}
+ if err := json.Unmarshal([]byte(form), &dat); err != nil {
+ return nil, err
+ }
+ var resps RspDown
+ api := "https://api.vs.tencent.com/SaaS/Material/DescribeMaterialDownloadUrl"
+ rsp, err := d.request(api, http.MethodPost, func(req *resty.Request) {
+ req.SetBody(dat)
+ }, &resps)
+ if err != nil {
+ return nil, err
+ }
+ if err := json.Unmarshal(rsp, &resps); err != nil {
+ return nil, err
+ }
+ if len(resps.Data.DownloadURLInfoSet) == 0 {
+ return nil, err
+ }
+ u := resps.Data.DownloadURLInfoSet[0].DownloadURL
+ link := &model.Link{
+ URL: u,
+ Header: http.Header{
+ "Referer": []string{d.conf.referer},
+ "User-Agent": []string{d.conf.ua},
+ },
+ Concurrency: 2,
+ PartSize: 10 * utils.MB,
+ }
+ if file.GetSize() == 0 {
+ link.Concurrency = 0
+ link.PartSize = 0
+ }
+ return link, nil
+}
+
+func (d *Vtencent) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
+ classId, err := strconv.Atoi(parentDir.GetID())
+ if err != nil {
+ return err
+ }
+ _, err = d.request("https://api.vs.tencent.com/PaaS/Material/CreateClass", http.MethodPost, func(req *resty.Request) {
+ req.SetBody(base.Json{
+ "Owner": base.Json{
+ "Type": "PERSON",
+ "Id": d.TfUid,
+ },
+ "ParentClassId": classId,
+ "Name": dirName,
+ "VerifySign": ""})
+ }, nil)
+ return err
+}
+
+func (d *Vtencent) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
+ srcType := "MATERIAL"
+ if srcObj.IsDir() {
+ srcType = "CLASS"
+ }
+ form := fmt.Sprintf(`{"SourceInfos":[
+ {"Owner":{"Id":"%s","Type":"PERSON"},
+ "Resource":{"Type":"%s","Id":"%s"}}
+ ],
+ "Destination":{"Owner":{"Id":"%s","Type":"PERSON"},
+ "Resource":{"Type":"CLASS","Id":"%s"}}
+ }`, d.TfUid, srcType, srcObj.GetID(), d.TfUid, dstDir.GetID())
+ var dat map[string]interface{}
+ if err := json.Unmarshal([]byte(form), &dat); err != nil {
+ return err
+ }
+ _, err := d.request("https://api.vs.tencent.com/PaaS/Material/MoveResource", http.MethodPost, func(req *resty.Request) {
+ req.SetBody(dat)
+ }, nil)
+ return err
+}
+
+func (d *Vtencent) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
+ api := "https://api.vs.tencent.com/PaaS/Material/ModifyMaterial"
+ form := fmt.Sprintf(`{
+ "Owner":{"Type":"PERSON","Id":"%s"},
+ "MaterialId":"%s","Name":"%s"}`, d.TfUid, srcObj.GetID(), newName)
+ if srcObj.IsDir() {
+ classId, err := strconv.Atoi(srcObj.GetID())
+ if err != nil {
+ return err
+ }
+ api = "https://api.vs.tencent.com/PaaS/Material/ModifyClass"
+ form = fmt.Sprintf(`{"Owner":{"Type":"PERSON","Id":"%s"},
+ "ClassId":%d,"Name":"%s"}`, d.TfUid, classId, newName)
+ }
+ var dat map[string]interface{}
+ if err := json.Unmarshal([]byte(form), &dat); err != nil {
+ return err
+ }
+ _, err := d.request(api, http.MethodPost, func(req *resty.Request) {
+ req.SetBody(dat)
+ }, nil)
+ return err
+}
+
+func (d *Vtencent) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
+ // TODO copy obj, optional
+ return errs.NotImplement
+}
+
+func (d *Vtencent) Remove(ctx context.Context, obj model.Obj) error {
+ srcType := "MATERIAL"
+ if obj.IsDir() {
+ srcType = "CLASS"
+ }
+ form := fmt.Sprintf(`{
+ "SourceInfos":[
+ {"Owner":{"Type":"PERSON","Id":"%s"},
+ "Resource":{"Type":"%s","Id":"%s"}}
+ ]
+ }`, d.TfUid, srcType, obj.GetID())
+ var dat map[string]interface{}
+ if err := json.Unmarshal([]byte(form), &dat); err != nil {
+ return err
+ }
+ _, err := d.request("https://api.vs.tencent.com/PaaS/Material/DeleteResource", http.MethodPost, func(req *resty.Request) {
+ req.SetBody(dat)
+ }, nil)
+ return err
+}
+
+func (d *Vtencent) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
+ err := d.FileUpload(ctx, dstDir, stream, up)
+ return err
+}
+
+//func (d *Vtencent) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
+// return nil, errs.NotSupport
+//}
+
+var _ driver.Driver = (*Vtencent)(nil)
diff --git a/drivers/vtencent/meta.go b/drivers/vtencent/meta.go
new file mode 100755
index 00000000..3bb6cf74
--- /dev/null
+++ b/drivers/vtencent/meta.go
@@ -0,0 +1,39 @@
+package vtencent
+
+import (
+ "github.com/alist-org/alist/v3/internal/driver"
+ "github.com/alist-org/alist/v3/internal/op"
+)
+
+type Addition struct {
+ driver.RootID
+ Cookie string `json:"cookie" required:"true"`
+ TfUid string `json:"tf_uid"`
+ OrderBy string `json:"order_by" type:"select" options:"Name,Size,UpdateTime,CreatTime"`
+ OrderDirection string `json:"order_direction" type:"select" options:"Asc,Desc"`
+}
+
+type Conf struct {
+ ua string
+ referer string
+ origin string
+}
+
+func init() {
+ op.RegisterDriver(func() driver.Driver {
+ return &Vtencent{
+ config: driver.Config{
+ Name: "VTencent",
+ OnlyProxy: true,
+ OnlyLocal: false,
+ DefaultRoot: "9",
+ NoOverwriteUpload: true,
+ },
+ conf: Conf{
+ ua: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) quark-cloud-drive/2.5.20 Chrome/100.0.4896.160 Electron/18.3.5.4-b478491100 Safari/537.36 Channel/pckk_other_ch",
+ referer: "https://app.v.tencent.com/",
+ origin: "https://app.v.tencent.com",
+ },
+ }
+ })
+}
diff --git a/drivers/vtencent/signature.go b/drivers/vtencent/signature.go
new file mode 100755
index 00000000..14fda9bd
--- /dev/null
+++ b/drivers/vtencent/signature.go
@@ -0,0 +1,33 @@
+package vtencent
+
+import (
+ "crypto/hmac"
+ "crypto/sha1"
+ "encoding/hex"
+)
+
+func QSignatureKey(timeKey string, signPath string, key string) string {
+ signKey := hmac.New(sha1.New, []byte(key))
+ signKey.Write([]byte(timeKey))
+ signKeyBytes := signKey.Sum(nil)
+ signKeyHex := hex.EncodeToString(signKeyBytes)
+ sha := sha1.New()
+ sha.Write([]byte(signPath))
+ shaBytes := sha.Sum(nil)
+ shaHex := hex.EncodeToString(shaBytes)
+
+ O := "sha1\n" + timeKey + "\n" + shaHex + "\n"
+ dataSignKey := hmac.New(sha1.New, []byte(signKeyHex))
+ dataSignKey.Write([]byte(O))
+ dataSignKeyBytes := dataSignKey.Sum(nil)
+ dataSignKeyHex := hex.EncodeToString(dataSignKeyBytes)
+ return dataSignKeyHex
+}
+
+func QTwoSignatureKey(timeKey string, key string) string {
+ signKey := hmac.New(sha1.New, []byte(key))
+ signKey.Write([]byte(timeKey))
+ signKeyBytes := signKey.Sum(nil)
+ signKeyHex := hex.EncodeToString(signKeyBytes)
+ return signKeyHex
+}
diff --git a/drivers/vtencent/types.go b/drivers/vtencent/types.go
new file mode 100755
index 00000000..b967481e
--- /dev/null
+++ b/drivers/vtencent/types.go
@@ -0,0 +1,252 @@
+package vtencent
+
+import (
+ "strconv"
+ "time"
+
+ "github.com/alist-org/alist/v3/internal/model"
+)
+
+type RespErr struct {
+ Code string `json:"Code"`
+ Message string `json:"Message"`
+}
+
+type Reqfiles struct {
+ ScrollToken string `json:"ScrollToken"`
+ Text string `json:"Text"`
+ Offset int `json:"Offset"`
+ Limit int `json:"Limit"`
+ Sort struct {
+ Field string `json:"Field"`
+ Order string `json:"Order"`
+ } `json:"Sort"`
+ CreateTimeRanges []any `json:"CreateTimeRanges"`
+ MaterialTypes []any `json:"MaterialTypes"`
+ ReviewStatuses []any `json:"ReviewStatuses"`
+ Tags []any `json:"Tags"`
+ SearchScopes []struct {
+ Owner struct {
+ Type string `json:"Type"`
+ ID string `json:"Id"`
+ } `json:"Owner"`
+ ClassID int `json:"ClassId"`
+ SearchOneDepth bool `json:"SearchOneDepth"`
+ } `json:"SearchScopes"`
+}
+
+type File struct {
+ Type string `json:"Type"`
+ ClassInfo struct {
+ ClassID int `json:"ClassId"`
+ Name string `json:"Name"`
+ UpdateTime time.Time `json:"UpdateTime"`
+ CreateTime time.Time `json:"CreateTime"`
+ FileInboxID string `json:"FileInboxId"`
+ Owner struct {
+ Type string `json:"Type"`
+ ID string `json:"Id"`
+ } `json:"Owner"`
+ ClassPath string `json:"ClassPath"`
+ ParentClassID int `json:"ParentClassId"`
+ AttachmentInfo struct {
+ SubClassCount int `json:"SubClassCount"`
+ MaterialCount int `json:"MaterialCount"`
+ Size int64 `json:"Size"`
+ } `json:"AttachmentInfo"`
+ ClassPreviewURLSet []string `json:"ClassPreviewUrlSet"`
+ } `json:"ClassInfo"`
+ MaterialInfo struct {
+ BasicInfo struct {
+ MaterialID string `json:"MaterialId"`
+ MaterialType string `json:"MaterialType"`
+ Name string `json:"Name"`
+ CreateTime time.Time `json:"CreateTime"`
+ UpdateTime time.Time `json:"UpdateTime"`
+ ClassPath string `json:"ClassPath"`
+ ClassID int `json:"ClassId"`
+ TagInfoSet []any `json:"TagInfoSet"`
+ TagSet []any `json:"TagSet"`
+ PreviewURL string `json:"PreviewUrl"`
+ MediaURL string `json:"MediaUrl"`
+ UnifiedMediaPreviewURL string `json:"UnifiedMediaPreviewUrl"`
+ Owner struct {
+ Type string `json:"Type"`
+ ID string `json:"Id"`
+ } `json:"Owner"`
+ PermissionSet any `json:"PermissionSet"`
+ PermissionInfoSet []any `json:"PermissionInfoSet"`
+ TfUID string `json:"TfUid"`
+ GroupID string `json:"GroupId"`
+ VersionMaterialIDSet []any `json:"VersionMaterialIdSet"`
+ FileType string `json:"FileType"`
+ CmeMaterialPlayList []any `json:"CmeMaterialPlayList"`
+ Status string `json:"Status"`
+ DownloadSwitch string `json:"DownloadSwitch"`
+ } `json:"BasicInfo"`
+ MediaInfo struct {
+ Width int `json:"Width"`
+ Height int `json:"Height"`
+ Size int `json:"Size"`
+ Duration float64 `json:"Duration"`
+ Fps int `json:"Fps"`
+ BitRate int `json:"BitRate"`
+ Codec string `json:"Codec"`
+ MediaType string `json:"MediaType"`
+ FavoriteStatus string `json:"FavoriteStatus"`
+ } `json:"MediaInfo"`
+ MaterialStatus struct {
+ ContentReviewStatus string `json:"ContentReviewStatus"`
+ EditorUsableStatus string `json:"EditorUsableStatus"`
+ UnifiedPreviewStatus string `json:"UnifiedPreviewStatus"`
+ EditPreviewImageSpiritStatus string `json:"EditPreviewImageSpiritStatus"`
+ TranscodeStatus string `json:"TranscodeStatus"`
+ AdaptiveStreamingStatus string `json:"AdaptiveStreamingStatus"`
+ StreamConnectable string `json:"StreamConnectable"`
+ AiAnalysisStatus string `json:"AiAnalysisStatus"`
+ AiRecognitionStatus string `json:"AiRecognitionStatus"`
+ } `json:"MaterialStatus"`
+ ImageMaterial struct {
+ Height int `json:"Height"`
+ Width int `json:"Width"`
+ Size int `json:"Size"`
+ MaterialURL string `json:"MaterialUrl"`
+ Resolution string `json:"Resolution"`
+ VodFileID string `json:"VodFileId"`
+ OriginalURL string `json:"OriginalUrl"`
+ } `json:"ImageMaterial"`
+ VideoMaterial struct {
+ MetaData struct {
+ Size int `json:"Size"`
+ Container string `json:"Container"`
+ Bitrate int `json:"Bitrate"`
+ Height int `json:"Height"`
+ Width int `json:"Width"`
+ Duration float64 `json:"Duration"`
+ Rotate int `json:"Rotate"`
+ VideoStreamInfoSet []struct {
+ Bitrate int `json:"Bitrate"`
+ Height int `json:"Height"`
+ Width int `json:"Width"`
+ Codec string `json:"Codec"`
+ Fps int `json:"Fps"`
+ } `json:"VideoStreamInfoSet"`
+ AudioStreamInfoSet []struct {
+ Bitrate int `json:"Bitrate"`
+ SamplingRate int `json:"SamplingRate"`
+ Codec string `json:"Codec"`
+ } `json:"AudioStreamInfoSet"`
+ } `json:"MetaData"`
+ ImageSpriteInfo any `json:"ImageSpriteInfo"`
+ MaterialURL string `json:"MaterialUrl"`
+ CoverURL string `json:"CoverUrl"`
+ Resolution string `json:"Resolution"`
+ VodFileID string `json:"VodFileId"`
+ OriginalURL string `json:"OriginalUrl"`
+ AudioWaveformURL string `json:"AudioWaveformUrl"`
+ SubtitleURL string `json:"SubtitleUrl"`
+ TranscodeInfoSet []any `json:"TranscodeInfoSet"`
+ ImageSpriteInfoSet []any `json:"ImageSpriteInfoSet"`
+ } `json:"VideoMaterial"`
+ } `json:"MaterialInfo"`
+}
+
+type RspFiles struct {
+ Code string `json:"Code"`
+ Message string `json:"Message"`
+ EnglishMessage string `json:"EnglishMessage"`
+ Data struct {
+ TotalCount int `json:"TotalCount"`
+ ResourceInfoSet []File `json:"ResourceInfoSet"`
+ ScrollToken string `json:"ScrollToken"`
+ } `json:"Data"`
+}
+
+type RspDown struct {
+ Code string `json:"Code"`
+ Message string `json:"Message"`
+ EnglishMessage string `json:"EnglishMessage"`
+ Data struct {
+ DownloadURLInfoSet []struct {
+ MaterialID string `json:"MaterialId"`
+ DownloadURL string `json:"DownloadUrl"`
+ } `json:"DownloadUrlInfoSet"`
+ } `json:"Data"`
+}
+
+type RspCreatrMaterial struct {
+ Code string `json:"Code"`
+ Message string `json:"Message"`
+ EnglishMessage string `json:"EnglishMessage"`
+ Data struct {
+ UploadContext string `json:"UploadContext"`
+ VodUploadSign string `json:"VodUploadSign"`
+ QuickUpload bool `json:"QuickUpload"`
+ } `json:"Data"`
+}
+
+type RspApplyUploadUGC struct {
+ Code int `json:"code"`
+ Message string `json:"message"`
+ Data struct {
+ Video struct {
+ StorageSignature string `json:"storageSignature"`
+ StoragePath string `json:"storagePath"`
+ } `json:"video"`
+ StorageAppID int `json:"storageAppId"`
+ StorageBucket string `json:"storageBucket"`
+ StorageRegion string `json:"storageRegion"`
+ StorageRegionV5 string `json:"storageRegionV5"`
+ Domain string `json:"domain"`
+ VodSessionKey string `json:"vodSessionKey"`
+ TempCertificate struct {
+ SecretID string `json:"secretId"`
+ SecretKey string `json:"secretKey"`
+ Token string `json:"token"`
+ ExpiredTime int `json:"expiredTime"`
+ } `json:"tempCertificate"`
+ AppID int `json:"appId"`
+ Timestamp int `json:"timestamp"`
+ StorageRegionV50 string `json:"StorageRegionV5"`
+ MiniProgramAccelerateHost string `json:"MiniProgramAccelerateHost"`
+ } `json:"data"`
+}
+
+type RspCommitUploadUGC struct {
+ Code int `json:"code"`
+ Message string `json:"message"`
+ Data struct {
+ Video struct {
+ URL string `json:"url"`
+ VerifyContent string `json:"verify_content"`
+ } `json:"video"`
+ FileID string `json:"fileId"`
+ } `json:"data"`
+}
+
+type RspFinishUpload struct {
+ Code string `json:"Code"`
+ Message string `json:"Message"`
+ EnglishMessage string `json:"EnglishMessage"`
+ Data struct {
+ MaterialID string `json:"MaterialId"`
+ } `json:"Data"`
+}
+
+func fileToObj(f File) *model.Object {
+ obj := &model.Object{}
+ if f.Type == "CLASS" {
+ obj.Name = f.ClassInfo.Name
+ obj.ID = strconv.Itoa(f.ClassInfo.ClassID)
+ obj.IsFolder = true
+ obj.Modified = f.ClassInfo.CreateTime
+ obj.Size = 0
+ } else if f.Type == "MATERIAL" {
+ obj.Name = f.MaterialInfo.BasicInfo.Name
+ obj.ID = f.MaterialInfo.BasicInfo.MaterialID
+ obj.IsFolder = false
+ obj.Modified = f.MaterialInfo.BasicInfo.CreateTime
+ obj.Size = int64(f.MaterialInfo.MediaInfo.Size)
+ }
+ return obj
+}
diff --git a/drivers/vtencent/util.go b/drivers/vtencent/util.go
new file mode 100755
index 00000000..ad69793e
--- /dev/null
+++ b/drivers/vtencent/util.go
@@ -0,0 +1,289 @@
+package vtencent
+
+import (
+ "context"
+ "crypto/sha1"
+ "encoding/hex"
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
+ "path"
+ "strconv"
+ "strings"
+
+ "github.com/alist-org/alist/v3/drivers/base"
+ "github.com/alist-org/alist/v3/internal/driver"
+ "github.com/alist-org/alist/v3/internal/model"
+ "github.com/alist-org/alist/v3/pkg/http_range"
+ "github.com/alist-org/alist/v3/pkg/utils"
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/aws/credentials"
+ "github.com/aws/aws-sdk-go/aws/session"
+ "github.com/aws/aws-sdk-go/service/s3/s3manager"
+ "github.com/go-resty/resty/v2"
+)
+
+func (d *Vtencent) request(url, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
+ req := base.RestyClient.R()
+ req.SetHeaders(map[string]string{
+ "cookie": d.Cookie,
+ "content-type": "application/json",
+ "origin": d.conf.origin,
+ "referer": d.conf.referer,
+ })
+ if callback != nil {
+ callback(req)
+ } else {
+ req.SetBody("{}")
+ }
+ if resp != nil {
+ req.SetResult(resp)
+ }
+ res, err := req.Execute(method, url)
+ if err != nil {
+ return nil, err
+ }
+ code := utils.Json.Get(res.Body(), "Code").ToString()
+ if code != "Success" {
+ switch code {
+ case "AuthFailure.SessionInvalid":
+ if err != nil {
+ return nil, errors.New(code)
+ }
+ default:
+ return nil, errors.New(code)
+ }
+ return d.request(url, method, callback, resp)
+ }
+ return res.Body(), nil
+}
+
+func (d *Vtencent) ugcRequest(url, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
+ req := base.RestyClient.R()
+ req.SetHeaders(map[string]string{
+ "cookie": d.Cookie,
+ "content-type": "application/json",
+ "origin": d.conf.origin,
+ "referer": d.conf.referer,
+ })
+ if callback != nil {
+ callback(req)
+ } else {
+ req.SetBody("{}")
+ }
+ if resp != nil {
+ req.SetResult(resp)
+ }
+ res, err := req.Execute(method, url)
+ if err != nil {
+ return nil, err
+ }
+ code := utils.Json.Get(res.Body(), "Code").ToInt()
+ if code != 0 {
+ message := utils.Json.Get(res.Body(), "message").ToString()
+ if len(message) == 0 {
+ message = utils.Json.Get(res.Body(), "msg").ToString()
+ }
+ return nil, errors.New(message)
+ }
+ return res.Body(), nil
+}
+
+func (d *Vtencent) LoadUser() (string, error) {
+ api := "https://api.vs.tencent.com/SaaS/Account/DescribeAccount"
+ res, err := d.request(api, http.MethodPost, func(req *resty.Request) {}, nil)
+ if err != nil {
+ return "", err
+ }
+ return utils.Json.Get(res, "Data", "TfUid").ToString(), nil
+}
+
+func (d *Vtencent) GetFiles(dirId string) ([]File, error) {
+ api := "https://api.vs.tencent.com/PaaS/Material/SearchResource"
+ form := fmt.Sprintf(`{
+ "Text":"",
+ "Text":"",
+ "Offset":0,
+ "Limit":20000,
+ "Sort":{"Field":"%s","Order":"%s"},
+ "CreateTimeRanges":[],
+ "MaterialTypes":[],
+ "ReviewStatuses":[],
+ "Tags":[],
+ "SearchScopes":[{"Owner":{"Type":"PERSON","Id":"%s"},"ClassId":%s,"SearchOneDepth":true}]
+ }`, d.Addition.OrderBy, d.Addition.OrderDirection, d.TfUid, dirId)
+ var resps RspFiles
+ _, err := d.request(api, http.MethodPost, func(req *resty.Request) {
+ req.SetBody(form).ForceContentType("application/json")
+ }, &resps)
+ if err != nil {
+ return []File{}, err
+ }
+ return resps.Data.ResourceInfoSet, nil
+}
+
+func (d *Vtencent) CreateUploadMaterial(classId int, fileName string, UploadSummaryKey string) (RspCreatrMaterial, error) {
+ api := "https://api.vs.tencent.com/PaaS/Material/CreateUploadMaterial"
+ form := base.Json{"Owner": base.Json{"Type": "PERSON", "Id": d.TfUid},
+ "MaterialType": "VIDEO", "Name": fileName, "ClassId": classId,
+ "UploadSummaryKey": UploadSummaryKey}
+ var resps RspCreatrMaterial
+ _, err := d.request(api, http.MethodPost, func(req *resty.Request) {
+ req.SetBody(form).ForceContentType("application/json")
+ }, &resps)
+ if err != nil {
+ return RspCreatrMaterial{}, err
+ }
+ return resps, nil
+}
+
+func (d *Vtencent) ApplyUploadUGC(signature string, stream model.FileStreamer) (RspApplyUploadUGC, error) {
+ api := "https://vod2.qcloud.com/v3/index.php?Action=ApplyUploadUGC"
+ form := base.Json{
+ "signature": signature,
+ "videoName": stream.GetName(),
+ "videoType": strings.ReplaceAll(path.Ext(stream.GetName()), ".", ""),
+ "videoSize": stream.GetSize(),
+ }
+ var resps RspApplyUploadUGC
+ _, err := d.ugcRequest(api, http.MethodPost, func(req *resty.Request) {
+ req.SetBody(form).ForceContentType("application/json")
+ }, &resps)
+ if err != nil {
+ return RspApplyUploadUGC{}, err
+ }
+ return resps, nil
+}
+
+func (d *Vtencent) CommitUploadUGC(signature string, vodSessionKey string) (RspCommitUploadUGC, error) {
+ api := "https://vod2.qcloud.com/v3/index.php?Action=CommitUploadUGC"
+ form := base.Json{
+ "signature": signature,
+ "vodSessionKey": vodSessionKey,
+ }
+ var resps RspCommitUploadUGC
+ rsp, err := d.ugcRequest(api, http.MethodPost, func(req *resty.Request) {
+ req.SetBody(form).ForceContentType("application/json")
+ }, &resps)
+ if err != nil {
+ return RspCommitUploadUGC{}, err
+ }
+ if len(resps.Data.Video.URL) == 0 {
+ return RspCommitUploadUGC{}, errors.New(string(rsp))
+ }
+ return resps, nil
+}
+
+func (d *Vtencent) FinishUploadMaterial(SummaryKey string, VodVerifyKey string, UploadContext, VodFileId string) (RspFinishUpload, error) {
+ api := "https://api.vs.tencent.com/PaaS/Material/FinishUploadMaterial"
+ form := base.Json{
+ "UploadContext": UploadContext,
+ "VodVerifyKey": VodVerifyKey,
+ "VodFileId": VodFileId,
+ "UploadFullKey": SummaryKey}
+ var resps RspFinishUpload
+ rsp, err := d.request(api, http.MethodPost, func(req *resty.Request) {
+ req.SetBody(form).ForceContentType("application/json")
+ }, &resps)
+ if err != nil {
+ return RspFinishUpload{}, err
+ }
+ if len(resps.Data.MaterialID) == 0 {
+ return RspFinishUpload{}, errors.New(string(rsp))
+ }
+ return resps, nil
+}
+
+func (d *Vtencent) FinishHashUploadMaterial(SummaryKey string, UploadContext string) (RspFinishUpload, error) {
+ api := "https://api.vs.tencent.com/PaaS/Material/FinishUploadMaterial"
+ var resps RspFinishUpload
+ form := base.Json{
+ "UploadContext": UploadContext,
+ "UploadFullKey": SummaryKey}
+ rsp, err := d.request(api, http.MethodPost, func(req *resty.Request) {
+ req.SetBody(form).ForceContentType("application/json")
+ }, &resps)
+ if err != nil {
+ return RspFinishUpload{}, err
+ }
+ if len(resps.Data.MaterialID) == 0 {
+ return RspFinishUpload{}, errors.New(string(rsp))
+ }
+ return resps, nil
+}
+
+func (d *Vtencent) FileUpload(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
+ classId, err := strconv.Atoi(dstDir.GetID())
+ if err != nil {
+ return err
+ }
+ const chunkLength int64 = 1024 * 1024 * 10
+ reader, err := stream.RangeRead(http_range.Range{Start: 0, Length: chunkLength})
+ if err != nil {
+ return err
+ }
+ chunkHash, err := utils.HashReader(utils.SHA1, reader)
+ if err != nil {
+ return err
+ }
+ rspCreatrMaterial, err := d.CreateUploadMaterial(classId, stream.GetName(), chunkHash)
+ if err != nil {
+ return err
+ }
+ if rspCreatrMaterial.Data.QuickUpload {
+ SummaryKey := stream.GetHash().GetHash(utils.SHA1)
+ if len(SummaryKey) < utils.SHA1.Width {
+ if SummaryKey, err = utils.HashReader(utils.SHA1, stream); err != nil {
+ return err
+ }
+ }
+ UploadContext := rspCreatrMaterial.Data.UploadContext
+ _, err = d.FinishHashUploadMaterial(SummaryKey, UploadContext)
+ if err != nil {
+ return err
+ }
+ return nil
+ }
+ hash := sha1.New()
+ rspUGC, err := d.ApplyUploadUGC(rspCreatrMaterial.Data.VodUploadSign, stream)
+ if err != nil {
+ return err
+ }
+ params := rspUGC.Data
+ certificate := params.TempCertificate
+ cfg := &aws.Config{
+ HTTPClient: base.HttpClient,
+ // S3ForcePathStyle: aws.Bool(true),
+ Credentials: credentials.NewStaticCredentials(certificate.SecretID, certificate.SecretKey, certificate.Token),
+ Region: aws.String(params.StorageRegionV5),
+ Endpoint: aws.String(fmt.Sprintf("cos.%s.myqcloud.com", params.StorageRegionV5)),
+ }
+ ss, err := session.NewSession(cfg)
+ if err != nil {
+ return err
+ }
+ uploader := s3manager.NewUploader(ss)
+ input := &s3manager.UploadInput{
+ Bucket: aws.String(fmt.Sprintf("%s-%d", params.StorageBucket, params.StorageAppID)),
+ Key: ¶ms.Video.StoragePath,
+ Body: io.TeeReader(stream, io.MultiWriter(hash, driver.NewProgress(stream.GetSize(), up))),
+ }
+ _, err = uploader.UploadWithContext(ctx, input)
+ if err != nil {
+ return err
+ }
+ rspCommitUGC, err := d.CommitUploadUGC(rspCreatrMaterial.Data.VodUploadSign, rspUGC.Data.VodSessionKey)
+ if err != nil {
+ return err
+ }
+ VodVerifyKey := rspCommitUGC.Data.Video.VerifyContent
+ VodFileId := rspCommitUGC.Data.FileID
+ UploadContext := rspCreatrMaterial.Data.UploadContext
+ SummaryKey := hex.EncodeToString(hash.Sum(nil))
+ _, err = d.FinishUploadMaterial(SummaryKey, VodVerifyKey, UploadContext, VodFileId)
+ if err != nil {
+ return err
+ }
+ return nil
+}
diff --git a/drivers/webdav/driver.go b/drivers/webdav/driver.go
old mode 100644
new mode 100755
diff --git a/drivers/webdav/meta.go b/drivers/webdav/meta.go
old mode 100644
new mode 100755
index d66499bc..2294d482
--- a/drivers/webdav/meta.go
+++ b/drivers/webdav/meta.go
@@ -11,6 +11,7 @@ type Addition struct {
Username string `json:"username" required:"true"`
Password string `json:"password" required:"true"`
driver.RootPath
+ TlsInsecureSkipVerify bool `json:"tls_insecure_skip_verify" default:"false"`
}
var config = driver.Config{
diff --git a/drivers/webdav/odrvcookie/cookie.go b/drivers/webdav/odrvcookie/cookie.go
old mode 100644
new mode 100755
diff --git a/drivers/webdav/odrvcookie/fetch.go b/drivers/webdav/odrvcookie/fetch.go
old mode 100644
new mode 100755
diff --git a/drivers/webdav/types.go b/drivers/webdav/types.go
old mode 100644
new mode 100755
diff --git a/drivers/webdav/util.go b/drivers/webdav/util.go
old mode 100644
new mode 100755
index 92557c4f..23dc909f
--- a/drivers/webdav/util.go
+++ b/drivers/webdav/util.go
@@ -1,7 +1,9 @@
package webdav
import (
+ "crypto/tls"
"net/http"
+ "net/http/cookiejar"
"github.com/alist-org/alist/v3/drivers/webdav/odrvcookie"
"github.com/alist-org/alist/v3/internal/model"
@@ -16,6 +18,10 @@ func (d *WebDav) isSharepoint() bool {
func (d *WebDav) setClient() error {
c := gowebdav.NewClient(d.Address, d.Username, d.Password)
+ c.SetTransport(&http.Transport{
+ Proxy: http.ProxyFromEnvironment,
+ TLSClientConfig: &tls.Config{InsecureSkipVerify: d.TlsInsecureSkipVerify},
+ })
if d.isSharepoint() {
cookie, err := odrvcookie.GetCookie(d.Username, d.Password, d.Address)
if err == nil {
@@ -26,6 +32,13 @@ func (d *WebDav) setClient() error {
} else {
return err
}
+ } else {
+ cookieJar, err := cookiejar.New(nil)
+ if err == nil {
+ c.SetJar(cookieJar)
+ } else {
+ return err
+ }
}
d.client = c
return nil
diff --git a/drivers/weiyun/driver.go b/drivers/weiyun/driver.go
old mode 100644
new mode 100755
index 628536f0..e6d5897c
--- a/drivers/weiyun/driver.go
+++ b/drivers/weiyun/driver.go
@@ -6,7 +6,6 @@ import (
"io"
"math"
"net/http"
- "os"
"strconv"
"time"
@@ -309,15 +308,14 @@ func (d *WeiYun) Remove(ctx context.Context, obj model.Obj) error {
}
func (d *WeiYun) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
+ // NOTE:
+ // 秒传需要sha1最后一个状态,但sha1无法逆运算需要读完整个文件(或许可以??)
+ // 服务器支持上传进度恢复,不需要额外实现
if folder, ok := dstDir.(*Folder); ok {
- file, err := utils.CreateTempFile(stream, stream.GetSize())
+ file, err := stream.CacheFullInTempFile()
if err != nil {
return nil, err
}
- defer func() {
- _ = file.Close()
- _ = os.Remove(file.Name())
- }()
// step 1.
preData, err := d.client.PreUpload(ctx, weiyunsdkgo.UpdloadFileParam{
@@ -335,7 +333,7 @@ func (d *WeiYun) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
return nil, err
}
- // fast upload
+ // not fast upload
if !preData.FileExist {
// step.2 增加上传通道
if len(preData.ChannelList) < d.uploadThread {
diff --git a/drivers/weiyun/meta.go b/drivers/weiyun/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/weiyun/types.go b/drivers/weiyun/types.go
old mode 100644
new mode 100755
index 35558327..664693c8
--- a/drivers/weiyun/types.go
+++ b/drivers/weiyun/types.go
@@ -1,6 +1,7 @@
package weiyun
import (
+ "github.com/alist-org/alist/v3/pkg/utils"
"time"
weiyunsdkgo "github.com/foxxorcat/weiyun-sdk-go"
@@ -21,12 +22,27 @@ func (f *File) GetPath() string { return "" }
func (f *File) GetPKey() string {
return f.PFolder.DirKey
}
+func (f *File) CreateTime() time.Time {
+ return time.Time(f.FileCtime)
+}
+
+func (f *File) GetHash() utils.HashInfo {
+ return utils.NewHashInfo(utils.SHA1, f.FileSha)
+}
type Folder struct {
PFolder *Folder
weiyunsdkgo.Folder
}
+func (f *Folder) CreateTime() time.Time {
+ return time.Time(f.DirCtime)
+}
+
+func (f *Folder) GetHash() utils.HashInfo {
+ return utils.HashInfo{}
+}
+
func (f *Folder) GetID() string { return f.DirKey }
func (f *Folder) GetSize() int64 { return 0 }
func (f *Folder) GetName() string { return f.DirName }
diff --git a/drivers/wopan/driver.go b/drivers/wopan/driver.go
old mode 100644
new mode 100755
index a3f222e8..e5e26c94
--- a/drivers/wopan/driver.go
+++ b/drivers/wopan/driver.go
@@ -159,7 +159,7 @@ func (d *Wopan) Put(ctx context.Context, dstDir model.Obj, stream model.FileStre
ContentType: stream.GetMimetype(),
}, dstDir.GetID(), d.FamilyID, wopan.Upload2COption{
OnProgress: func(current, total int64) {
- up(int(100 * current / total))
+ up(100 * float64(current) / float64(total))
},
})
return err
diff --git a/drivers/wopan/meta.go b/drivers/wopan/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/wopan/types.go b/drivers/wopan/types.go
old mode 100644
new mode 100755
diff --git a/drivers/wopan/util.go b/drivers/wopan/util.go
old mode 100644
new mode 100755
diff --git a/drivers/yandex_disk/driver.go b/drivers/yandex_disk/driver.go
old mode 100644
new mode 100755
diff --git a/drivers/yandex_disk/meta.go b/drivers/yandex_disk/meta.go
old mode 100644
new mode 100755
diff --git a/drivers/yandex_disk/types.go b/drivers/yandex_disk/types.go
old mode 100644
new mode 100755
diff --git a/drivers/yandex_disk/util.go b/drivers/yandex_disk/util.go
old mode 100644
new mode 100755
diff --git a/entrypoint.sh b/entrypoint.sh
old mode 100644
new mode 100755
diff --git a/go.mod b/go.mod
old mode 100644
new mode 100755
index 609798c2..e6ced69b
--- a/go.mod
+++ b/go.mod
@@ -1,52 +1,60 @@
module github.com/alist-org/alist/v3
-go 1.20
+go 1.21
require (
github.com/SheltonZhu/115driver v1.0.21
github.com/Xhofe/go-cache v0.0.0-20220723083548-714439c8af9a
github.com/Xhofe/rateg v0.0.0-20230728072201-251a4e1adad4
- github.com/Xhofe/wopan-sdk-go v0.1.1
+ github.com/Xhofe/wopan-sdk-go v0.1.2
+ github.com/aliyun/aliyun-oss-go-sdk v2.2.9+incompatible
github.com/avast/retry-go v3.0.0+incompatible
- github.com/aws/aws-sdk-go v1.44.316
- github.com/blevesearch/bleve/v2 v2.3.9
+ github.com/aws/aws-sdk-go v1.46.7
+ github.com/blevesearch/bleve/v2 v2.3.10
github.com/caarlos0/env/v9 v9.0.0
github.com/charmbracelet/bubbles v0.16.1
github.com/charmbracelet/bubbletea v0.24.2
- github.com/charmbracelet/lipgloss v0.7.1
+ github.com/charmbracelet/lipgloss v0.9.1
github.com/coreos/go-oidc v2.2.1+incompatible
- github.com/deckarep/golang-set/v2 v2.3.0
+ github.com/deckarep/golang-set/v2 v2.3.1
github.com/disintegration/imaging v1.6.2
+ github.com/djherbis/times v1.5.0
github.com/dustinxie/ecc v0.0.0-20210511000915-959544187564
- github.com/foxxorcat/mopan-sdk-go v0.1.3
- github.com/foxxorcat/weiyun-sdk-go v0.1.2
+ github.com/foxxorcat/mopan-sdk-go v0.1.4
+ github.com/foxxorcat/weiyun-sdk-go v0.1.3
github.com/gin-contrib/cors v1.4.0
github.com/gin-gonic/gin v1.9.1
- github.com/go-resty/resty/v2 v2.7.0
+ github.com/go-resty/resty/v2 v2.9.1
+ github.com/go-webauthn/webauthn v0.8.6
github.com/golang-jwt/jwt/v4 v4.5.0
- github.com/google/uuid v1.3.0
+ github.com/google/uuid v1.3.1
github.com/gorilla/websocket v1.5.0
github.com/hirochachacha/go-smb2 v1.1.0
- github.com/ipfs/go-ipfs-api v0.6.1
+ github.com/ipfs/go-ipfs-api v0.7.0
github.com/jlaffaye/ftp v0.2.0
github.com/json-iterator/go v1.1.12
github.com/maruel/natural v1.1.0
github.com/natefinch/lumberjack v2.0.0+incompatible
+ github.com/orzogc/fake115uploader v0.3.3-0.20230715111618-58f9eb76f831
github.com/pkg/errors v0.9.1
- github.com/pkg/sftp v1.13.6-0.20230213180117-971c283182b6
+ github.com/pkg/sftp v1.13.6
github.com/pquerna/otp v1.4.0
github.com/rclone/rclone v1.63.1
github.com/sirupsen/logrus v1.9.3
github.com/spf13/cobra v1.7.0
+ github.com/stretchr/testify v1.8.4
github.com/t3rm1n4l/go-mega v0.0.0-20230228171823-a01a2cda13ca
- github.com/u2takey/ffmpeg-go v0.4.1
+ github.com/u2takey/ffmpeg-go v0.5.0
github.com/upyun/go-sdk/v3 v3.0.4
- github.com/winfsp/cgofuse v1.5.1-0.20221118130120-84c0898ad2e0
- golang.org/x/crypto v0.11.0
- golang.org/x/exp v0.0.0-20230801115018-d63ba01acd4b
- golang.org/x/image v0.10.0
- golang.org/x/net v0.13.0
- golang.org/x/oauth2 v0.10.0
+ github.com/winfsp/cgofuse v1.5.1-0.20230130140708-f87f5db493b5
+ github.com/xhofe/tache v0.1.1
+ golang.org/x/crypto v0.14.0
+ golang.org/x/exp v0.0.0-20231006140011-7918f672742d
+ golang.org/x/image v0.11.0
+ golang.org/x/net v0.17.0
+ golang.org/x/oauth2 v0.12.0
+ golang.org/x/time v0.3.0
+ google.golang.org/appengine v1.6.7
gorm.io/driver/mysql v1.4.7
gorm.io/driver/postgres v1.4.8
gorm.io/driver/sqlite v1.4.4
@@ -60,27 +68,27 @@ require (
github.com/RoaringBitmap/roaring v1.2.3 // indirect
github.com/abbot/go-http-auth v0.4.0 // indirect
github.com/aead/ecdh v0.2.0 // indirect
- github.com/aliyun/aliyun-oss-go-sdk v2.2.5+incompatible // indirect
- github.com/andreburgaud/crypt2go v1.1.0 // indirect
+ github.com/andreburgaud/crypt2go v1.2.0 // indirect
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
github.com/benbjohnson/clock v1.3.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/bits-and-blooms/bitset v1.2.0 // indirect
- github.com/blevesearch/bleve_index_api v1.0.5 // indirect
- github.com/blevesearch/geo v0.1.17 // indirect
+ github.com/blang/semver/v4 v4.0.0 // indirect
+ github.com/blevesearch/bleve_index_api v1.0.6 // indirect
+ github.com/blevesearch/geo v0.1.18 // indirect
github.com/blevesearch/go-porterstemmer v1.0.3 // indirect
github.com/blevesearch/gtreap v0.1.1 // indirect
github.com/blevesearch/mmap-go v1.0.4 // indirect
- github.com/blevesearch/scorch_segment_api/v2 v2.1.5 // indirect
+ github.com/blevesearch/scorch_segment_api/v2 v2.1.6 // indirect
github.com/blevesearch/segment v0.9.1 // indirect
github.com/blevesearch/snowballstem v0.9.0 // indirect
github.com/blevesearch/upsidedown_store_api v1.0.2 // indirect
github.com/blevesearch/vellum v1.0.10 // indirect
- github.com/blevesearch/zapx/v11 v11.3.9 // indirect
- github.com/blevesearch/zapx/v12 v12.3.9 // indirect
- github.com/blevesearch/zapx/v13 v13.3.9 // indirect
- github.com/blevesearch/zapx/v14 v14.3.9 // indirect
- github.com/blevesearch/zapx/v15 v15.3.12 // indirect
+ github.com/blevesearch/zapx/v11 v11.3.10 // indirect
+ github.com/blevesearch/zapx/v12 v12.3.10 // indirect
+ github.com/blevesearch/zapx/v13 v13.3.10 // indirect
+ github.com/blevesearch/zapx/v14 v14.3.10 // indirect
+ github.com/blevesearch/zapx/v15 v15.3.13 // indirect
github.com/bluele/gcache v0.0.2 // indirect
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc // indirect
github.com/bytedance/sonic v1.9.1 // indirect
@@ -89,7 +97,9 @@ require (
github.com/containerd/console v1.0.4-0.20230313162750-1ae8d489ac81 // indirect
github.com/coreos/go-semver v0.3.1 // indirect
github.com/crackcomm/go-gitignore v0.0.0-20170627025303-887ab5e44cc3 // indirect
+ github.com/davecgh/go-spew v1.1.1 // indirect
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0 // indirect
+ github.com/fxamacker/cbor/v2 v2.4.0 // indirect
github.com/gabriel-vasile/mimetype v1.4.2 // indirect
github.com/gaoyb7/115drive-webdav v0.1.8 // indirect
github.com/geoffgarside/ber v1.1.0 // indirect
@@ -100,18 +110,22 @@ require (
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.14.0 // indirect
github.com/go-sql-driver/mysql v1.7.0 // indirect
+ github.com/go-webauthn/x v0.1.4 // indirect
github.com/goccy/go-json v0.10.2 // indirect
+ github.com/golang-jwt/jwt/v5 v5.0.0 // indirect
github.com/golang/geo v0.0.0-20210211234256-740aa86cb551 // indirect
github.com/golang/protobuf v1.5.3 // indirect
github.com/golang/snappy v0.0.4 // indirect
+ github.com/google/go-tpm v0.9.0 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
- github.com/ipfs/boxo v0.8.0 // indirect
+ github.com/ipfs/boxo v0.12.0 // indirect
github.com/ipfs/go-cid v0.4.1 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect
github.com/jackc/pgx/v5 v5.3.0 // indirect
+ github.com/jaevor/go-nanoid v1.3.0 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
@@ -127,11 +141,12 @@ require (
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.19 // indirect
github.com/mattn/go-localereader v0.0.1 // indirect
- github.com/mattn/go-runewidth v0.0.14 // indirect
+ github.com/mattn/go-runewidth v0.0.15 // indirect
github.com/mattn/go-sqlite3 v1.14.15 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
github.com/minio/sha256-simd v1.0.0 // indirect
github.com/mitchellh/go-homedir v1.1.0 // indirect
+ github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/mr-tron/base58 v1.2.0 // indirect
@@ -139,19 +154,19 @@ require (
github.com/muesli/ansi v0.0.0-20211018074035-2e021307bc4b // indirect
github.com/muesli/cancelreader v0.2.2 // indirect
github.com/muesli/reflow v0.3.0 // indirect
- github.com/muesli/termenv v0.15.1 // indirect
+ github.com/muesli/termenv v0.15.2 // indirect
github.com/multiformats/go-base32 v0.1.0 // indirect
github.com/multiformats/go-base36 v0.2.0 // indirect
github.com/multiformats/go-multiaddr v0.9.0 // indirect
github.com/multiformats/go-multibase v0.2.0 // indirect
- github.com/multiformats/go-multicodec v0.8.1 // indirect
- github.com/multiformats/go-multihash v0.2.1 // indirect
+ github.com/multiformats/go-multicodec v0.9.0 // indirect
+ github.com/multiformats/go-multihash v0.2.3 // indirect
github.com/multiformats/go-multistream v0.4.1 // indirect
github.com/multiformats/go-varint v0.0.7 // indirect
github.com/ncw/swift/v2 v2.0.2 // indirect
- github.com/orzogc/fake115uploader v0.3.3-0.20221009101310-08b764073b77 // indirect
github.com/pelletier/go-toml/v2 v2.0.8 // indirect
- github.com/pierrec/lz4/v4 v4.1.17 // indirect
+ github.com/pierrec/lz4/v4 v4.1.18 // indirect
+ github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/power-devops/perfstat v0.0.0-20221212215047-62379fc7944b // indirect
github.com/pquerna/cachecontrol v0.1.0 // indirect
github.com/prometheus/client_golang v1.16.0 // indirect
@@ -170,16 +185,16 @@ require (
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/u2takey/go-utils v0.3.1 // indirect
github.com/ugorji/go/codec v1.2.11 // indirect
+ github.com/x448/float16 v0.8.4 // indirect
+ github.com/xhofe/gsync v0.0.0-20230917091818-2111ceb38a25 // indirect
github.com/yusufpapurcu/wmi v1.2.3 // indirect
go.etcd.io/bbolt v1.3.7 // indirect
golang.org/x/arch v0.3.0 // indirect
golang.org/x/sync v0.3.0 // indirect
- golang.org/x/sys v0.10.0 // indirect
- golang.org/x/term v0.10.0 // indirect
- golang.org/x/text v0.11.0 // indirect
- golang.org/x/time v0.3.0 // indirect
+ golang.org/x/sys v0.13.0 // indirect
+ golang.org/x/term v0.13.0 // indirect
+ golang.org/x/text v0.13.0 // indirect
google.golang.org/api v0.134.0 // indirect
- google.golang.org/appengine v1.6.7 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20230803162519-f966b187b2e5 // indirect
google.golang.org/grpc v1.57.0 // indirect
google.golang.org/protobuf v1.31.0 // indirect
diff --git a/go.sum b/go.sum
old mode 100644
new mode 100755
index 3a839815..36c2b49a
--- a/go.sum
+++ b/go.sum
@@ -2,36 +2,36 @@ cloud.google.com/go v0.110.2 h1:sdFPBr6xG9/wkBbfhmUz/JmZC7X6LavQgcrVINrKiVA=
cloud.google.com/go/compute v1.23.0 h1:tP41Zoavr8ptEqaW6j+LQOnyBBhO7OkOMAGrgLopTwY=
cloud.google.com/go/compute v1.23.0/go.mod h1:4tCnrn48xsqlwSAiLf1HXMQk8CONslYbdiEZc9FEIbM=
cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY=
+cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA=
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/Max-Sum/base32768 v0.0.0-20230304063302-18e6ce5945fd h1:nzE1YQBdx1bq9IlZinHa+HVffy+NmVRoKr+wHN8fpLE=
github.com/Max-Sum/base32768 v0.0.0-20230304063302-18e6ce5945fd/go.mod h1:C8yoIfvESpM3GD07OCHU7fqI7lhwyZ2Td1rbNbTAhnc=
github.com/RoaringBitmap/roaring v1.2.3 h1:yqreLINqIrX22ErkKI0vY47/ivtJr6n+kMhVOVmhWBY=
github.com/RoaringBitmap/roaring v1.2.3/go.mod h1:plvDsJQpxOC5bw8LRteu/MLWHsHez/3y6cubLI4/1yE=
-github.com/SheltonZhu/115driver v1.0.14 h1:uW3dl8J9KDMw+3gPxQdhTysoGhw0/uI1484GT9xhfU4=
-github.com/SheltonZhu/115driver v1.0.14/go.mod h1:00ixivHH5HqDj4S7kAWbkuUrjtsJTxc7cGv5RMw3RVs=
github.com/SheltonZhu/115driver v1.0.21 h1:Pz6r14VwIiuSyHj+OmJe57FHhbmWB/6IfnXAFL2iXbU=
github.com/SheltonZhu/115driver v1.0.21/go.mod h1:e3fPOBANbH/FsTya8FquJwOR3ErhCQgEab3q6CVY2k4=
github.com/Unknwon/goconfig v1.0.0 h1:9IAu/BYbSLQi8puFjUQApZTxIHqSwrj5d8vpP8vTq4A=
+github.com/Unknwon/goconfig v1.0.0/go.mod h1:wngxua9XCNjvHjDiTiV26DaKDT+0c63QR6H5hjVUUxw=
github.com/Xhofe/go-cache v0.0.0-20220723083548-714439c8af9a h1:RenIAa2q4H8UcS/cqmwdT1WCWIAH5aumP8m8RpbqVsE=
github.com/Xhofe/go-cache v0.0.0-20220723083548-714439c8af9a/go.mod h1:sSBbaOg90XwWKtpT56kVujF0bIeVITnPlssLclogS04=
github.com/Xhofe/rateg v0.0.0-20230728072201-251a4e1adad4 h1:WnvifFgYyogPz2ZFvaVLk4gI/Co0paF92FmxSR6U1zY=
github.com/Xhofe/rateg v0.0.0-20230728072201-251a4e1adad4/go.mod h1:8pWlL2rpusvx7Xa6yYaIWOJ8bR3gPdFBUT7OystyGOY=
-github.com/Xhofe/wopan-sdk-go v0.1.1 h1:dSrTxNYclqNuo9libjtC+R6C4RCen/inh/dUXd12vpM=
-github.com/Xhofe/wopan-sdk-go v0.1.1/go.mod h1:xWcUS7PoFLDD9gy2BK2VQfilEsZngLMz2Vkx3oF2zJY=
+github.com/Xhofe/wopan-sdk-go v0.1.2 h1:6Gh4YTT7b7YHN0OoJ33j7Jm9ru/ckuvcDxPnRmH07jc=
+github.com/Xhofe/wopan-sdk-go v0.1.2/go.mod h1:ktLYb4t7rnPFq1AshLaPXq5kZER+DkEagT6/i/in0uo=
github.com/abbot/go-http-auth v0.4.0 h1:QjmvZ5gSC7jm3Zg54DqWE/T5m1t2AfDu6QlXJT0EVT0=
github.com/abbot/go-http-auth v0.4.0/go.mod h1:Cz6ARTIzApMJDzh5bRMSUou6UMSp0IEXg9km/ci7TJM=
github.com/aead/ecdh v0.2.0 h1:pYop54xVaq/CEREFEcukHRZfTdjiWvYIsZDXXrBapQQ=
github.com/aead/ecdh v0.2.0/go.mod h1:a9HHtXuSo8J1Js1MwLQx2mBhkXMT6YwUmVVEY4tTB8U=
-github.com/aliyun/aliyun-oss-go-sdk v2.2.5+incompatible h1:QoRMR0TCctLDqBCMyOu1eXdZyMw3F7uGA9qPn2J4+R8=
-github.com/aliyun/aliyun-oss-go-sdk v2.2.5+incompatible/go.mod h1:T/Aws4fEfogEE9v+HPhhw+CntffsBHJ8nXQCwKr0/g8=
-github.com/andreburgaud/crypt2go v1.1.0 h1:eitZxTPY1krUsxinsng3Qvt/Ud7q/aQmmYRh8p4hyPw=
-github.com/andreburgaud/crypt2go v1.1.0/go.mod h1:4qhZPzarj1dCIRmCkpdgCklwp+hBq9yEt0zPe9Ayuhc=
+github.com/aliyun/aliyun-oss-go-sdk v2.2.9+incompatible h1:Sg/2xHwDrioHpxTN6WMiwbXTpUEinBpHsN7mG21Rc2k=
+github.com/aliyun/aliyun-oss-go-sdk v2.2.9+incompatible/go.mod h1:T/Aws4fEfogEE9v+HPhhw+CntffsBHJ8nXQCwKr0/g8=
+github.com/andreburgaud/crypt2go v1.2.0 h1:oly/ENAodeqTYpUafgd4r3v+VKLQnmOKUyfpj+TxHbE=
+github.com/andreburgaud/crypt2go v1.2.0/go.mod h1:kKRqlrX/3Q9Ki7HdUsoh0cX1Urq14/Hcta4l4VrIXrI=
github.com/avast/retry-go v3.0.0+incompatible h1:4SOWQ7Qs+oroOTQOYnAHqelpCO0biHSxpiH9JdtuBj0=
github.com/avast/retry-go v3.0.0+incompatible/go.mod h1:XtSnn+n/sHqQIpZ10K1qAevBhOOCWBLXXy3hyiqqBrY=
github.com/aws/aws-sdk-go v1.38.20/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro=
-github.com/aws/aws-sdk-go v1.44.316 h1:UC3alCEyzj2XU13ZFGIOHW3yjCNLGTIGVauyetl9fwE=
-github.com/aws/aws-sdk-go v1.44.316/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
+github.com/aws/aws-sdk-go v1.46.7 h1:IjvAWeiJZlbETOemOwvheN5L17CvKvKW0T1xOC6d3Sc=
+github.com/aws/aws-sdk-go v1.46.7/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A=
@@ -40,20 +40,22 @@ github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/bits-and-blooms/bitset v1.2.0 h1:Kn4yilvwNtMACtf1eYDlG8H77R07mZSPbMjLyS07ChA=
github.com/bits-and-blooms/bitset v1.2.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA=
-github.com/blevesearch/bleve/v2 v2.3.9 h1:pUMvK0mxAexqasZcVj8lazmWnEW5XiV0tASIqANiNTQ=
-github.com/blevesearch/bleve/v2 v2.3.9/go.mod h1:1PibElcjlQMQHF9uS9mRv58ODQgj4pCWHA1Wfd+qagU=
-github.com/blevesearch/bleve_index_api v1.0.5 h1:Lc986kpC4Z0/n1g3gg8ul7H+lxgOQPcXb9SxvQGu+tw=
-github.com/blevesearch/bleve_index_api v1.0.5/go.mod h1:YXMDwaXFFXwncRS8UobWs7nvo0DmusriM1nztTlj1ms=
-github.com/blevesearch/geo v0.1.17 h1:AguzI6/5mHXapzB0gE9IKWo+wWPHZmXZoscHcjFgAFA=
-github.com/blevesearch/geo v0.1.17/go.mod h1:uRMGWG0HJYfWfFJpK3zTdnnr1K+ksZTuWKhXeSokfnM=
+github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM=
+github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ=
+github.com/blevesearch/bleve/v2 v2.3.10 h1:z8V0wwGoL4rp7nG/O3qVVLYxUqCbEwskMt4iRJsPLgg=
+github.com/blevesearch/bleve/v2 v2.3.10/go.mod h1:RJzeoeHC+vNHsoLR54+crS1HmOWpnH87fL70HAUCzIA=
+github.com/blevesearch/bleve_index_api v1.0.6 h1:gyUUxdsrvmW3jVhhYdCVL6h9dCjNT/geNU7PxGn37p8=
+github.com/blevesearch/bleve_index_api v1.0.6/go.mod h1:YXMDwaXFFXwncRS8UobWs7nvo0DmusriM1nztTlj1ms=
+github.com/blevesearch/geo v0.1.18 h1:Np8jycHTZ5scFe7VEPLrDoHnnb9C4j636ue/CGrhtDw=
+github.com/blevesearch/geo v0.1.18/go.mod h1:uRMGWG0HJYfWfFJpK3zTdnnr1K+ksZTuWKhXeSokfnM=
github.com/blevesearch/go-porterstemmer v1.0.3 h1:GtmsqID0aZdCSNiY8SkuPJ12pD4jI+DdXTAn4YRcHCo=
github.com/blevesearch/go-porterstemmer v1.0.3/go.mod h1:angGc5Ht+k2xhJdZi511LtmxuEf0OVpvUUNrwmM1P7M=
github.com/blevesearch/gtreap v0.1.1 h1:2JWigFrzDMR+42WGIN/V2p0cUvn4UP3C4Q5nmaZGW8Y=
github.com/blevesearch/gtreap v0.1.1/go.mod h1:QaQyDRAT51sotthUWAH4Sj08awFSSWzgYICSZ3w0tYk=
github.com/blevesearch/mmap-go v1.0.4 h1:OVhDhT5B/M1HNPpYPBKIEJaD0F3Si+CrEKULGCDPWmc=
github.com/blevesearch/mmap-go v1.0.4/go.mod h1:EWmEAOmdAS9z/pi/+Toxu99DnsbhG1TIxUoRmJw/pSs=
-github.com/blevesearch/scorch_segment_api/v2 v2.1.5 h1:1g713kpCQZ8u4a3stRGBfrwVOuGRnmxOVU5MQkUPrHU=
-github.com/blevesearch/scorch_segment_api/v2 v2.1.5/go.mod h1:f2nOkKS1HcjgIWZgDAErgBdxmr2eyt0Kn7IY+FU1Xe4=
+github.com/blevesearch/scorch_segment_api/v2 v2.1.6 h1:CdekX/Ob6YCYmeHzD72cKpwzBjvkOGegHOqhAkXp6yA=
+github.com/blevesearch/scorch_segment_api/v2 v2.1.6/go.mod h1:nQQYlp51XvoSVxcciBjtvuHPIVjlWrN1hX4qwK2cqdc=
github.com/blevesearch/segment v0.9.1 h1:+dThDy+Lvgj5JMxhmOVlgFfkUtZV2kw49xax4+jTfSU=
github.com/blevesearch/segment v0.9.1/go.mod h1:zN21iLm7+GnBHWTao9I+Au/7MBiL8pPFtJBJTsk6kQw=
github.com/blevesearch/snowballstem v0.9.0 h1:lMQ189YspGP6sXvZQ4WZ+MLawfV8wOmPoD/iWeNXm8s=
@@ -62,16 +64,16 @@ github.com/blevesearch/upsidedown_store_api v1.0.2 h1:U53Q6YoWEARVLd1OYNc9kvhBMG
github.com/blevesearch/upsidedown_store_api v1.0.2/go.mod h1:M01mh3Gpfy56Ps/UXHjEO/knbqyQ1Oamg8If49gRwrQ=
github.com/blevesearch/vellum v1.0.10 h1:HGPJDT2bTva12hrHepVT3rOyIKFFF4t7Gf6yMxyMIPI=
github.com/blevesearch/vellum v1.0.10/go.mod h1:ul1oT0FhSMDIExNjIxHqJoGpVrBpKCdgDQNxfqgJt7k=
-github.com/blevesearch/zapx/v11 v11.3.9 h1:y3ijS4h4MJdmQ07MHASxat4owAixreK2xdo76w9ncrw=
-github.com/blevesearch/zapx/v11 v11.3.9/go.mod h1:jcAYnQwlr+LqD2vLjDWjWiZDXDXGFqPbpPDRTd3XmS4=
-github.com/blevesearch/zapx/v12 v12.3.9 h1:MXGLlZ03oxXH3DMJTZaBaRj2xb6t4wQVZeZK/wu1M6w=
-github.com/blevesearch/zapx/v12 v12.3.9/go.mod h1:QXCMwmOkdLnMDgTN1P4CcuX5F851iUOtOwXbw0HMBYs=
-github.com/blevesearch/zapx/v13 v13.3.9 h1:+VAz9V0VmllHXlZV4DCvfYj0nqaZHgF3MeEHwOyRBwQ=
-github.com/blevesearch/zapx/v13 v13.3.9/go.mod h1:s+WjNp4WSDtrBVBpa37DUOd7S/Gr/jTZ7ST/MbCVj/0=
-github.com/blevesearch/zapx/v14 v14.3.9 h1:wuqxATgsTCNHM9xsOFOeFp8H2heZ/gMX/tsl9lRK8U4=
-github.com/blevesearch/zapx/v14 v14.3.9/go.mod h1:MWZ4v8AzFBRurhDzkLvokFW8ljcq9Evm27mkWe8OGbM=
-github.com/blevesearch/zapx/v15 v15.3.12 h1:w/kU9aHyfMDEdwHGZzCiakC3HZ9z5gYlXaALDC4Dct8=
-github.com/blevesearch/zapx/v15 v15.3.12/go.mod h1:tx53gDJS/7Oa3Je820cmVurqCuJ4dqdAy1kiDMV/IUo=
+github.com/blevesearch/zapx/v11 v11.3.10 h1:hvjgj9tZ9DeIqBCxKhi70TtSZYMdcFn7gDb71Xo/fvk=
+github.com/blevesearch/zapx/v11 v11.3.10/go.mod h1:0+gW+FaE48fNxoVtMY5ugtNHHof/PxCqh7CnhYdnMzQ=
+github.com/blevesearch/zapx/v12 v12.3.10 h1:yHfj3vXLSYmmsBleJFROXuO08mS3L1qDCdDK81jDl8s=
+github.com/blevesearch/zapx/v12 v12.3.10/go.mod h1:0yeZg6JhaGxITlsS5co73aqPtM04+ycnI6D1v0mhbCs=
+github.com/blevesearch/zapx/v13 v13.3.10 h1:0KY9tuxg06rXxOZHg3DwPJBjniSlqEgVpxIqMGahDE8=
+github.com/blevesearch/zapx/v13 v13.3.10/go.mod h1:w2wjSDQ/WBVeEIvP0fvMJZAzDwqwIEzVPnCPrz93yAk=
+github.com/blevesearch/zapx/v14 v14.3.10 h1:SG6xlsL+W6YjhX5N3aEiL/2tcWh3DO75Bnz77pSwwKU=
+github.com/blevesearch/zapx/v14 v14.3.10/go.mod h1:qqyuR0u230jN1yMmE4FIAuCxmahRQEOehF78m6oTgns=
+github.com/blevesearch/zapx/v15 v15.3.13 h1:6EkfaZiPlAxqXz0neniq35my6S48QI94W/wyhnpDHHQ=
+github.com/blevesearch/zapx/v15 v15.3.13/go.mod h1:Turk/TNRKj9es7ZpKK95PS7f6D44Y7fAFy8F4LXQtGg=
github.com/bluele/gcache v0.0.2 h1:WcbfdXICg7G/DGBh1PFfcirkWOQV+v077yF1pSy3DGw=
github.com/bluele/gcache v0.0.2/go.mod h1:m15KV+ECjptwSPxKhOhQoAFQVtUFjTVkc3H8o0t/fp0=
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc h1:biVzkmvwrH8WK8raXaxBx6fRVTlJILwEwQGL1I/ByEI=
@@ -81,16 +83,16 @@ github.com/bytedance/sonic v1.9.1 h1:6iJ6NqdoxCDr6mbY8h18oSO+cShGSMRGCEo7F2h0x8s
github.com/bytedance/sonic v1.9.1/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZXU064P/U=
github.com/caarlos0/env/v9 v9.0.0 h1:SI6JNsOA+y5gj9njpgybykATIylrRMklbs5ch6wO6pc=
github.com/caarlos0/env/v9 v9.0.0/go.mod h1:ye5mlCVMYh6tZ+vCgrs/B95sj88cg5Tlnc0XIzgZ020=
-github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44=
github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/charmbracelet/bubbles v0.16.1 h1:6uzpAAaT9ZqKssntbvZMlksWHruQLNxg49H5WdeuYSY=
github.com/charmbracelet/bubbles v0.16.1/go.mod h1:2QCp9LFlEsBQMvIYERr7Ww2H2bA7xen1idUDIzm/+Xc=
github.com/charmbracelet/bubbletea v0.24.2 h1:uaQIKx9Ai6Gdh5zpTbGiWpytMU+CfsPp06RaW2cx/SY=
github.com/charmbracelet/bubbletea v0.24.2/go.mod h1:XdrNrV4J8GiyshTtx3DNuYkR1FDaJmO3l2nejekbsgg=
-github.com/charmbracelet/lipgloss v0.7.1 h1:17WMwi7N1b1rVWOjMT+rCh7sQkvDU75B2hbZpc5Kc1E=
-github.com/charmbracelet/lipgloss v0.7.1/go.mod h1:yG0k3giv8Qj8edTCbbg6AlQ5e8KNWpFujkNawKNhE2c=
+github.com/charmbracelet/lipgloss v0.9.1 h1:PNyd3jvaJbg4jRHKWXnCj1akQm4rh8dbEzN1p/u1KWg=
+github.com/charmbracelet/lipgloss v0.9.1/go.mod h1:1mPmG4cxScwUQALAAnacHaigiiHB9Pmr+v1VEawJl6I=
github.com/cheekybits/is v0.0.0-20150225183255-68e9c0620927 h1:SKI1/fuSdodxmNNyVBR8d7X/HuLnRpvvFO0AgyQk764=
+github.com/cheekybits/is v0.0.0-20150225183255-68e9c0620927/go.mod h1:h/aW8ynjgkuj+NQRlZcDbAbM1ORAbXjXX77sX7T289U=
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 h1:qSGYFH7+jGhDF8vLC+iwCD4WpbV1EBDSzWkJODFLams=
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
@@ -107,20 +109,25 @@ github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ3
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/deckarep/golang-set/v2 v2.3.0 h1:qs18EKUfHm2X9fA50Mr/M5hccg2tNnVqsiBImnyDs0g=
-github.com/deckarep/golang-set/v2 v2.3.0/go.mod h1:VAky9rY/yGXJOLEDv3OMci+7wtDpOF4IN+y82NBOac4=
+github.com/deckarep/golang-set/v2 v2.3.1 h1:vjmkvJt/IV27WXPyYQpAh4bRyWJc5Y435D17XQ9QU5A=
+github.com/deckarep/golang-set/v2 v2.3.1/go.mod h1:VAky9rY/yGXJOLEDv3OMci+7wtDpOF4IN+y82NBOac4=
github.com/decred/dcrd/crypto/blake256 v1.0.0 h1:/8DMNYp9SGi5f0w7uCm6d6M4OU2rGFK09Y2A4Xv7EE0=
+github.com/decred/dcrd/crypto/blake256 v1.0.0/go.mod h1:sQl2p6Y26YV+ZOcSTP6thNdn47hh8kt6rqSlvmrXFAc=
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0 h1:HbphB4TFFXpv7MNrT52FGrrgVXF1owhMVTHFZIlnvd4=
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0/go.mod h1:DZGJHZMqrU4JJqFAWUS2UO1+lbSKsdiOoYi9Zzey7Fc=
github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c=
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
+github.com/djherbis/times v1.5.0 h1:79myA211VwPhFTqUk8xehWrsEO+zcIZj0zT8mXPVARU=
+github.com/djherbis/times v1.5.0/go.mod h1:5q7FDLvbNg1L/KaBmPcWlVR9NmoKo3+ucqUA3ijQhA0=
github.com/dustinxie/ecc v0.0.0-20210511000915-959544187564 h1:I6KUy4CI6hHjqnyJLNCEi7YHVMkwwtfSr2k9splgdSM=
github.com/dustinxie/ecc v0.0.0-20210511000915-959544187564/go.mod h1:yekO+3ZShy19S+bsmnERmznGy9Rfg6dWWWpiGJjNAz8=
-github.com/foxxorcat/mopan-sdk-go v0.1.3 h1:6ww0ulyLDh6neXZBqUM2PDbxQ6lfdkQbr0FCh9BTY0Y=
-github.com/foxxorcat/mopan-sdk-go v0.1.3/go.mod h1:iWHA2JFhzmKR28ySp1ON0g6DjLaYtvb5jhTqPVTDW9A=
-github.com/foxxorcat/weiyun-sdk-go v0.1.2 h1:waRWIBmjL9GCcndJ8HvOYrrVB4hhoPYzRrn3I/Cnzqw=
-github.com/foxxorcat/weiyun-sdk-go v0.1.2/go.mod h1:AKsLFuWhWlClpGrg1zxTdMejugZEZtmhIuElAk3W83s=
+github.com/foxxorcat/mopan-sdk-go v0.1.4 h1:6utvPiBv8KDRDVKB7A4FERdrVxcHKZd2fBFCNuKcXzU=
+github.com/foxxorcat/mopan-sdk-go v0.1.4/go.mod h1:iWHA2JFhzmKR28ySp1ON0g6DjLaYtvb5jhTqPVTDW9A=
+github.com/foxxorcat/weiyun-sdk-go v0.1.3 h1:I5c5nfGErhq9DBumyjCVCggRA74jhgriMqRRFu5jeeY=
+github.com/foxxorcat/weiyun-sdk-go v0.1.3/go.mod h1:TPxzN0d2PahweUEHlOBWlwZSA+rELSUlGYMWgXRn9ps=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
+github.com/fxamacker/cbor/v2 v2.4.0 h1:ri0ArlOR+5XunOP8CRUowT0pSJOwhW098ZCUyskZD88=
+github.com/fxamacker/cbor/v2 v2.4.0/go.mod h1:TA1xS00nchWmaBnEIxPSE5oHLuJBAVvqrtAnWBwBCVo=
github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU=
github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA=
github.com/gaoyb7/115drive-webdav v0.1.8 h1:EJt4PSmcbvBY4KUh2zSo5p6fN9LZFNkIzuKejipubVw=
@@ -136,12 +143,12 @@ github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU=
github.com/go-chi/chi/v5 v5.0.10 h1:rLz5avzKpjqxrYwXNfmjkrYYXOyLJd37pz53UFHC6vk=
github.com/go-chi/chi/v5 v5.0.10/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
-github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs=
github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas=
github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY=
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
+github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs=
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
@@ -152,20 +159,27 @@ github.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXS
github.com/go-playground/validator/v10 v10.11.0/go.mod h1:i+3WkQ1FvaUjjxh1kSvIA4dMGDBiPU55YFDl0WbKdWU=
github.com/go-playground/validator/v10 v10.14.0 h1:vgvQWe3XCz3gIeFDm/HnTIbj6UGmg/+t63MyGU2n5js=
github.com/go-playground/validator/v10 v10.14.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
-github.com/go-resty/resty/v2 v2.7.0 h1:me+K9p3uhSmXtrBZ4k9jcEAfJmuC8IivWHwaLZwPrFY=
github.com/go-resty/resty/v2 v2.7.0/go.mod h1:9PWDzw47qPphMRFfhsyk0NnSgvluHcljSMVIq3w7q0I=
+github.com/go-resty/resty/v2 v2.9.1 h1:PIgGx4VrHvag0juCJ4dDv3MiFRlDmP0vicBucwf+gLM=
+github.com/go-resty/resty/v2 v2.9.1/go.mod h1:4/GYJVjh9nhkhGR6AUNW3XhpDYNUr+Uvy9gV/VGZIy4=
github.com/go-sql-driver/mysql v1.7.0 h1:ueSltNNllEqE3qcWBTD0iQd3IpL/6U+mJxLkazJ7YPc=
github.com/go-sql-driver/mysql v1.7.0/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI=
+github.com/go-webauthn/webauthn v0.8.6 h1:bKMtL1qzd2WTFkf1mFTVbreYrwn7dsYmEPjTq6QN90E=
+github.com/go-webauthn/webauthn v0.8.6/go.mod h1:emwVLMCI5yx9evTTvr0r+aOZCdWJqMfbRhF0MufyUog=
+github.com/go-webauthn/x v0.1.4 h1:sGmIFhcY70l6k7JIDfnjVBiAAFEssga5lXIUXe0GtAs=
+github.com/go-webauthn/x v0.1.4/go.mod h1:75Ug0oK6KYpANh5hDOanfDI+dvPWHk788naJVG/37H8=
github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o=
github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg=
github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
+github.com/golang-jwt/jwt/v5 v5.0.0 h1:1n1XNM9hk7O9mnQoNBGolZvzebBQ7p93ULHRc28XJUE=
+github.com/golang-jwt/jwt/v5 v5.0.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/golang/geo v0.0.0-20210211234256-740aa86cb551 h1:gtexQ/VGyN+VVFRXSFiguSNcXmS6rkKT+X7FdIrTtfo=
github.com/golang/geo v0.0.0-20210211234256-740aa86cb551/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI=
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE=
-github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs=
+github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
@@ -177,13 +191,18 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
+github.com/google/go-tpm v0.9.0 h1:sQF6YqWMi+SCXpsmS3fd21oPy/vSddwZry4JnmltHVk=
+github.com/google/go-tpm v0.9.0/go.mod h1:FkNVkc6C+IsvDI9Jw1OveJmxGZUUaKxtrpOS47QWKfU=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/s2a-go v0.1.4 h1:1kZ/sQM3srePvKs3tXAvQzo66XfcReoqFpIpIccE7Oc=
+github.com/google/s2a-go v0.1.4/go.mod h1:Ej+mSEMGRnqRzjc7VtF+jdBwYG5fuJfiZ8ELkjEwM0A=
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
-github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
-github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/google/uuid v1.3.1 h1:KjJaJ9iWZ3jOFZIf1Lqf4laDRCasjl0BCmnEGxkdLb4=
+github.com/google/uuid v1.3.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/enterprise-certificate-proxy v0.2.5 h1:UR4rDjcgpgEnqpIEvkiqTYKBCKLNmlge2eVjoZfySzM=
+github.com/googleapis/enterprise-certificate-proxy v0.2.5/go.mod h1:RxW0N9901Cko1VOCW3SXCpWP+mlIEkk2tP7jnHy9a3w=
github.com/googleapis/gax-go/v2 v2.12.0 h1:A+gCJKdRfqXkr+BIRGtZLibNXf0m1f9E4HG56etFpas=
+github.com/googleapis/gax-go/v2 v2.12.0/go.mod h1:y+aIqrI5eb1YGMVJfuV3185Ts/D7qKpsEkdD5+I6QGU=
github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
@@ -196,12 +215,12 @@ github.com/hirochachacha/go-smb2 v1.1.0 h1:b6hs9qKIql9eVXAiN0M2wSFY5xnhbHAQoCwRK
github.com/hirochachacha/go-smb2 v1.1.0/go.mod h1:8F1A4d5EZzrGu5R7PU163UcMRDJQl4FtcxjBfsY8TZE=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
-github.com/ipfs/boxo v0.8.0 h1:UdjAJmHzQHo/j3g3b1bAcAXCj/GM6iTwvSlBDvPBNBs=
-github.com/ipfs/boxo v0.8.0/go.mod h1:RIsi4CnTyQ7AUsNn5gXljJYZlQrHBMnJp94p73liFiA=
+github.com/ipfs/boxo v0.12.0 h1:AXHg/1ONZdRQHQLgG5JHsSC3XoE4DjCAMgK+asZvUcQ=
+github.com/ipfs/boxo v0.12.0/go.mod h1:xAnfiU6PtxWCnRqu7dcXQ10bB5/kvI1kXRotuGqGBhg=
github.com/ipfs/go-cid v0.4.1 h1:A/T3qGvxi4kpKWWcPC/PgbvDA2bjVLO7n4UeVwnbs/s=
github.com/ipfs/go-cid v0.4.1/go.mod h1:uQHwDeX4c6CtyrFwdqyhpNcxVewur1M7l7fNU7LKwZk=
-github.com/ipfs/go-ipfs-api v0.6.1 h1:nK5oeFOdMh1ogT+GCOcyBFOOcFGNuudSb1rg9YDyAKE=
-github.com/ipfs/go-ipfs-api v0.6.1/go.mod h1:8pl+ZMF2LX42szbqGbpOBEiI1/rYaImvTvJtG0g+rL4=
+github.com/ipfs/go-ipfs-api v0.7.0 h1:CMBNCUl0b45coC+lQCXEVpMhwoqjiaCwUIrM+coYW2Q=
+github.com/ipfs/go-ipfs-api v0.7.0/go.mod h1:AIxsTNB0+ZhkqIfTZpdZ0VR/cpX5zrXjATa3prSay3g=
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk=
@@ -209,6 +228,8 @@ github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZ
github.com/jackc/pgx/v5 v5.3.0 h1:/NQi8KHMpKWHInxXesC8yD4DhkXPrVhmnwYkjp9AmBA=
github.com/jackc/pgx/v5 v5.3.0/go.mod h1:t3JDKnCBlYIc0ewLF0Q7B8MXmoIaBOZj/ic7iHozM/8=
github.com/jackc/puddle/v2 v2.2.0/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
+github.com/jaevor/go-nanoid v1.3.0 h1:nD+iepesZS6pr3uOVf20vR9GdGgJW1HPaR46gtrxzkg=
+github.com/jaevor/go-nanoid v1.3.0/go.mod h1:SI+jFaPuddYkqkVQoNGHs81navCtH388TcrH0RqFKgY=
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
github.com/jinzhu/now v1.1.4/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
@@ -220,11 +241,9 @@ github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9Y
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=
github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
-github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4=
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
-github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM=
github.com/jzelinskie/whirlpool v0.0.0-20201016144138-0675e54bb004 h1:G+9t9cEtnC9jFiTxyptEKuNIAbiN5ZCQzX2a74lj3xg=
github.com/jzelinskie/whirlpool v0.0.0-20201016144138-0675e54bb004/go.mod h1:KmHnJWQrgEvbuy0vcvj00gtMqbvNn1L+3YUZLK/B92c=
github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00=
@@ -239,6 +258,7 @@ github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORN
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
+github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
@@ -268,17 +288,18 @@ github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D
github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4=
github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88=
github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
-github.com/mattn/go-runewidth v0.0.14 h1:+xnbZSEeDbOIg5/mE6JF0w6n9duR1l3/WmbinWVwUuU=
-github.com/mattn/go-runewidth v0.0.14/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
+github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U=
+github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/mattn/go-sqlite3 v1.14.15 h1:vfoHhTN1af61xCRSWzFIWzx2YskyMTwHLrExkBOjvxI=
github.com/mattn/go-sqlite3 v1.14.15/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo=
github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4=
-github.com/minio/blake2b-simd v0.0.0-20160723061019-3f5f724cb5b1/go.mod h1:pD8RvIylQ358TN4wwqatJ8rNavkEINozVn9DtGI3dfQ=
github.com/minio/sha256-simd v1.0.0 h1:v1ta+49hkWZyvaKwrQB8elexRqm6Y0aMLjCNsrYxo6g=
github.com/minio/sha256-simd v1.0.0/go.mod h1:OuYzVNI5vcoYIAmbIvHPl3N3jUzVedXbKy5RFepssQM=
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
+github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
+github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
@@ -296,8 +317,8 @@ github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELU
github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo=
github.com/muesli/reflow v0.3.0 h1:IFsN6K9NfGtjeggFP+68I4chLZV2yIKsXJFNZ+eWh6s=
github.com/muesli/reflow v0.3.0/go.mod h1:pbwTDkVPibjO2kyvBQRBxTWEEGDGq0FlB1BIKtnHY/8=
-github.com/muesli/termenv v0.15.1 h1:UzuTb/+hhlBugQz28rpzey4ZuKcZ03MeKsoG7IJZIxs=
-github.com/muesli/termenv v0.15.1/go.mod h1:HeAQPTzpfs016yGtA4g00CsdYnVLJvxsS4ANqrZs2sQ=
+github.com/muesli/termenv v0.15.2 h1:GohcuySI0QmI3wN8Ok9PtKGkgkFIk7y6Vpb5PvrY+Wo=
+github.com/muesli/termenv v0.15.2/go.mod h1:Epx+iuz8sNs7mNKhxzH4fWXGNpZwUaJKRS1noLXviQ8=
github.com/multiformats/go-base32 v0.1.0 h1:pVx9xoSPqEIQG8o+UbAe7DNi51oej1NtK+aGkbLYxPE=
github.com/multiformats/go-base32 v0.1.0/go.mod h1:Kj3tFY6zNr+ABYMqeUNeGvkIC/UYgtWibDcT0rExnbI=
github.com/multiformats/go-base36 v0.2.0 h1:lFsAbNOGeKtuKozrtBsAkSVhv1p9D0/qedU9rQyccr0=
@@ -306,34 +327,34 @@ github.com/multiformats/go-multiaddr v0.9.0 h1:3h4V1LHIk5w4hJHekMKWALPXErDfz/sgg
github.com/multiformats/go-multiaddr v0.9.0/go.mod h1:mI67Lb1EeTOYb8GQfL/7wpIZwc46ElrvzhYnoJOmTT0=
github.com/multiformats/go-multibase v0.2.0 h1:isdYCVLvksgWlMW9OZRYJEa9pZETFivncJHmHnnd87g=
github.com/multiformats/go-multibase v0.2.0/go.mod h1:bFBZX4lKCA/2lyOFSAoKH5SS6oPyjtnzK/XTFDPkNuk=
-github.com/multiformats/go-multicodec v0.8.1 h1:ycepHwavHafh3grIbR1jIXnKCsFm0fqsfEOsJ8NtKE8=
-github.com/multiformats/go-multicodec v0.8.1/go.mod h1:L3QTQvMIaVBkXOXXtVmYE+LI16i14xuaojr/H7Ai54k=
-github.com/multiformats/go-multihash v0.2.1 h1:aem8ZT0VA2nCHHk7bPJ1BjUbHNciqZC/d16Vve9l108=
-github.com/multiformats/go-multihash v0.2.1/go.mod h1:WxoMcYG85AZVQUyRyo9s4wULvW5qrI9vb2Lt6evduFc=
+github.com/multiformats/go-multicodec v0.9.0 h1:pb/dlPnzee/Sxv/j4PmkDRxCOi3hXTz3IbPKOXWJkmg=
+github.com/multiformats/go-multicodec v0.9.0/go.mod h1:L3QTQvMIaVBkXOXXtVmYE+LI16i14xuaojr/H7Ai54k=
+github.com/multiformats/go-multihash v0.2.3 h1:7Lyc8XfX/IY2jWb/gI7JP+o7JEq9hOa7BFvVU9RSh+U=
+github.com/multiformats/go-multihash v0.2.3/go.mod h1:dXgKXCXjBzdscBLk9JkjINiEsCKRVch90MdaGiKsvSM=
github.com/multiformats/go-multistream v0.4.1 h1:rFy0Iiyn3YT0asivDUIR05leAdwZq3de4741sbiSdfo=
github.com/multiformats/go-multistream v0.4.1/go.mod h1:Mz5eykRVAjJWckE2U78c6xqdtyNUEhKSM0Lwar2p77Q=
github.com/multiformats/go-varint v0.0.7 h1:sWSGR+f/eu5ABZA2ZpYKBILXTTs9JWpdEM/nEGOHFS8=
github.com/multiformats/go-varint v0.0.7/go.mod h1:r8PUYw/fD/SjBCiKOoDlGF6QawOELpZAu9eioSos/OU=
-github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/natefinch/lumberjack v2.0.0+incompatible h1:4QJd3OLAMgj7ph+yZTuX13Ld4UpgHp07nNdFX7mqFfM=
github.com/natefinch/lumberjack v2.0.0+incompatible/go.mod h1:Wi9p2TTF5DG5oU+6YfsmYQpsTIOm0B1VNzQg9Mw6nPk=
github.com/ncw/swift/v2 v2.0.2 h1:jx282pcAKFhmoZBSdMcCRFn9VWkoBIRsCpe+yZq7vEk=
github.com/ncw/swift/v2 v2.0.2/go.mod h1:z0A9RVdYPjNjXVo2pDOPxZ4eu3oarO1P91fTItcb+Kg=
-github.com/orzogc/fake115uploader v0.3.3-0.20221009101310-08b764073b77 h1:dg/EaaJLPIg4xn2kaZil7Ax3wfoxcFXaBwyOTlcz5AI=
-github.com/orzogc/fake115uploader v0.3.3-0.20221009101310-08b764073b77/go.mod h1:FD9a09Vw07CSMTdT0Y7ttStOa1WZsnPBslliMw2DkeM=
+github.com/orzogc/fake115uploader v0.3.3-0.20230715111618-58f9eb76f831 h1:K3T3eu4h5aYIOzUtLjN08L4Qt4WGaJONMgcaD0ayBJQ=
+github.com/orzogc/fake115uploader v0.3.3-0.20230715111618-58f9eb76f831/go.mod h1:lSHD4lC4zlMl+zcoysdJcd5KFzsWwOD8BJbyg1Ws9Ng=
github.com/panjf2000/ants/v2 v2.4.2/go.mod h1:f6F0NZVFsGCp5A7QW/Zj/m92atWwOkY0OIhFxRNFr4A=
github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo=
github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ=
github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4=
-github.com/pierrec/lz4/v4 v4.1.17 h1:kV4Ip+/hUBC+8T6+2EgburRtkE9ef4nbY3f4dFhGjMc=
-github.com/pierrec/lz4/v4 v4.1.17/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
+github.com/pierrec/lz4/v4 v4.1.18 h1:xaKrnTkyoqfh1YItXl56+6KJNVYWlEEPuAQW9xsplYQ=
+github.com/pierrec/lz4/v4 v4.1.18/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
-github.com/pkg/sftp v1.13.6-0.20230213180117-971c283182b6 h1:5TvW1dv00Y13njmQ1AWkxSWtPkwE7ZEF6yDuv9q+Als=
-github.com/pkg/sftp v1.13.6-0.20230213180117-971c283182b6/go.mod h1:tz1ryNURKu77RL+GuCzmoJYxQczL3wLNNpPWagdg4Qk=
+github.com/pkg/sftp v1.13.6 h1:JFZT4XbOU7l77xGSpOdW+pwIMqP044IyjXX6FGyEKFo=
+github.com/pkg/sftp v1.13.6/go.mod h1:tz1ryNURKu77RL+GuCzmoJYxQczL3wLNNpPWagdg4Qk=
github.com/pkg/xattr v0.4.9 h1:5883YPCtkSd8LFbs13nXplj9g9tlrwoJRjgpgMu1/fE=
+github.com/pkg/xattr v0.4.9/go.mod h1:di8WF84zAKk8jzR1UBTEWh9AUlIZZ7M/JNt8e9B6ktU=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
@@ -362,6 +383,7 @@ github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUc
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ=
+github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/shirou/gopsutil/v3 v3.23.7 h1:C+fHO8hfIppoJ1WdsVm1RoI0RwXoNdfTK7yWXV0wVj4=
github.com/shirou/gopsutil/v3 v3.23.7/go.mod h1:c4gnmoRC0hQuaLqvxnx1//VXQ0Ms/X9UnJF8pddY5z4=
@@ -375,6 +397,7 @@ github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVs
github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e h1:MRM5ITcdelLK2j1vwZ3Je0FKVCfqOLp5zO6trqMLYs0=
github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e/go.mod h1:XV66xRDqSt+GTGFMVlhk3ULuV0y9ZmzeVGR4mloJI3M=
github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 h1:JIAuq3EEf9cgbU6AtGPK4CTG3Zf6CKMNqf0MHTggAUA=
+github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog=
github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI=
github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk=
@@ -408,8 +431,8 @@ github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+F
github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY=
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
-github.com/u2takey/ffmpeg-go v0.4.1 h1:l5ClIwL3N2LaH1zF3xivb3kP2HW95eyG5xhHE1JdZ9Y=
-github.com/u2takey/ffmpeg-go v0.4.1/go.mod h1:ruZWkvC1FEiUNjmROowOAps3ZcWxEiOpFoHCvk97kGc=
+github.com/u2takey/ffmpeg-go v0.5.0 h1:r7d86XuL7uLWJ5mzSeQ03uvjfIhiJYvsRAJFCW4uklU=
+github.com/u2takey/ffmpeg-go v0.5.0/go.mod h1:ruZWkvC1FEiUNjmROowOAps3ZcWxEiOpFoHCvk97kGc=
github.com/u2takey/go-utils v0.3.1 h1:TaQTgmEZZeDHQFYfd+AdUT1cT4QJgJn/XVPELhHw4ys=
github.com/u2takey/go-utils v0.3.1/go.mod h1:6e+v5vEZ/6gu12w/DC2ixZdZtCrNokVxD0JUklcqdCs=
github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6M=
@@ -418,15 +441,25 @@ github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4d
github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
github.com/upyun/go-sdk/v3 v3.0.4 h1:2DCJa/Yi7/3ZybT9UCPATSzvU3wpPPxhXinNlb1Hi8Q=
github.com/upyun/go-sdk/v3 v3.0.4/go.mod h1:P/SnuuwhrIgAVRd/ZpzDWqCsBAf/oHg7UggbAxyZa0E=
-github.com/valyala/fastjson v1.6.3 h1:tAKFnnwmeMGPbwJ7IwxcTPCNr3uIzoIj3/Fh90ra4xc=
-github.com/winfsp/cgofuse v1.5.1-0.20221118130120-84c0898ad2e0 h1:j3un8DqYvvAOqKI5OPz+/RRVhDFipbPKI4t2Uk5RBJw=
-github.com/winfsp/cgofuse v1.5.1-0.20221118130120-84c0898ad2e0/go.mod h1:uxjoF2jEYT3+x+vC2KJddEGdk/LU8pRowXmyVMHSV5I=
+github.com/valyala/fastjson v1.6.4 h1:uAUNq9Z6ymTgGhcm0UynUAB6tlbakBrz6CQFax3BXVQ=
+github.com/valyala/fastjson v1.6.4/go.mod h1:CLCAqky6SMuOcxStkYQvblddUtoRxhYMGLrsQns1aXY=
+github.com/winfsp/cgofuse v1.5.1-0.20230130140708-f87f5db493b5 h1:jxZvjx8Ve5sOXorZG0KzTxbp0Cr1n3FEegfmyd9br1k=
+github.com/winfsp/cgofuse v1.5.1-0.20230130140708-f87f5db493b5/go.mod h1:uxjoF2jEYT3+x+vC2KJddEGdk/LU8pRowXmyVMHSV5I=
+github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM=
+github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg=
+github.com/xhofe/gsync v0.0.0-20230917091818-2111ceb38a25 h1:eDfebW/yfq9DtG9RO3KP7BT2dot2CvJGIvrB0NEoDXI=
+github.com/xhofe/gsync v0.0.0-20230917091818-2111ceb38a25/go.mod h1:fH4oNm5F9NfI5dLi0oIMtsLNKQOirUDbEMCIBb/7SU0=
+github.com/xhofe/tache v0.1.0 h1:W0uoyLWCmUEQudXwB93owdlGSlN8gwZmiiDlKFCerKA=
+github.com/xhofe/tache v0.1.0/go.mod h1:iKumPFvywf30FRpAHHCt64G0JHLMzT0K+wyGedHsmTQ=
+github.com/xhofe/tache v0.1.1 h1:O5QY4cVjIGELx3UGh6LbVAc18MWGXgRNQjMt72x6w/8=
+github.com/xhofe/tache v0.1.1/go.mod h1:iKumPFvywf30FRpAHHCt64G0JHLMzT0K+wyGedHsmTQ=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw=
github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
go.etcd.io/bbolt v1.3.7 h1:j+zJOnnEjF/kyHlDDgGnVL/AIqIJPq8UoB2GSNfkUfQ=
go.etcd.io/bbolt v1.3.7/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw=
go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
+go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
gocv.io/x/gocv v0.25.0/go.mod h1:Rar2PS6DV+T4FL+PM535EImD/h13hGVaHhnCu1xarBs=
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
golang.org/x/arch v0.3.0 h1:02VY4/ZcO/gBOH6PUaoiptASxtXU10jazRCP865E97k=
@@ -441,13 +474,14 @@ golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw=
golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58=
-golang.org/x/crypto v0.11.0 h1:6Ewdq3tDic1mg5xRO4milcWCfMVQhI4NkqWWvqejpuA=
-golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio=
-golang.org/x/exp v0.0.0-20230801115018-d63ba01acd4b h1:r+vk0EmXNmekl0S0BascoeeoHk/L7wmaW2QF90K+kYI=
-golang.org/x/exp v0.0.0-20230801115018-d63ba01acd4b/go.mod h1:FXUEEKJgO7OQYeo8N01OfiKP8RXMtf6e8aTskBGqWdc=
+golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
+golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc=
+golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
+golang.org/x/exp v0.0.0-20231006140011-7918f672742d h1:jtJma62tbqLibJ5sFQz8bKtEM8rJBtfilJ2qTU199MI=
+golang.org/x/exp v0.0.0-20231006140011-7918f672742d/go.mod h1:ldy0pHrwJyGW56pPQzzkH36rKxoZW1tw7ZJpeKx+hdo=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
-golang.org/x/image v0.10.0 h1:gXjUUtwtx5yOE0VKWq1CH4IJAClq4UGgUA3i+rpON9M=
-golang.org/x/image v0.10.0/go.mod h1:jtrku+n79PfroUbvDdeUWMAI+heR786BofxrbiSF+J0=
+golang.org/x/image v0.11.0 h1:ds2RoQvBvYTiJkwpSFDwCcDFNX7DqjL2WsUgTNk0Ooo=
+golang.org/x/image v0.11.0/go.mod h1:bglhjqbqVuEb9e9+eNR45Jfu7D+T4Qan+NhQk8Ck2P8=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
@@ -460,10 +494,12 @@ golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qx
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
-golang.org/x/net v0.13.0 h1:Nvo8UFsZ8X3BhAC9699Z1j7XQ3rsZnUUm7jfBEk1ueY=
-golang.org/x/net v0.13.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA=
-golang.org/x/oauth2 v0.10.0 h1:zHCpF2Khkwy4mMB4bv0U37YtJdTGW8jI0glAApi0Kh8=
-golang.org/x/oauth2 v0.10.0/go.mod h1:kTpgurOux7LqtuxjuyZa4Gj2gdezIt/jQtGnNFfypQI=
+golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
+golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
+golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM=
+golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
+golang.org/x/oauth2 v0.12.0 h1:smVPGxink+n1ZI5pkQa8y6fZT0RW0MgCO5bFpepy4B4=
+golang.org/x/oauth2 v0.12.0/go.mod h1:A74bZ3aGXgCY0qaIC9Ahg6Lglin4AMAco8cIv9baba4=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@@ -492,14 +528,18 @@ golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA=
golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE=
+golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
-golang.org/x/term v0.10.0 h1:3R7pNqamzBraeqj/Tj8qt1aQ2HpmlC+Cx/qL/7hn4/c=
-golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o=
+golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
+golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
+golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek=
+golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
@@ -507,9 +547,12 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
-golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4=
-golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
+golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
+golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
+golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k=
+golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
+golang.org/x/time v0.0.0-20211116232009-f0f3c7e86c11/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20220722155302-e5dcc9cfc0b9/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4=
golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
diff --git a/internal/aria2/add.go b/internal/aria2/add.go
deleted file mode 100644
index 4eb83f3d..00000000
--- a/internal/aria2/add.go
+++ /dev/null
@@ -1,61 +0,0 @@
-package aria2
-
-import (
- "context"
- "fmt"
- "path/filepath"
-
- "github.com/alist-org/alist/v3/internal/conf"
- "github.com/alist-org/alist/v3/internal/errs"
- "github.com/alist-org/alist/v3/internal/op"
- "github.com/alist-org/alist/v3/pkg/task"
- "github.com/google/uuid"
- "github.com/pkg/errors"
-)
-
-func AddURI(ctx context.Context, uri string, dstDirPath string) error {
- // check storage
- storage, dstDirActualPath, err := op.GetStorageAndActualPath(dstDirPath)
- if err != nil {
- return errors.WithMessage(err, "failed get storage")
- }
- // check is it could upload
- if storage.Config().NoUpload {
- return errors.WithStack(errs.UploadNotSupported)
- }
- // check path is valid
- obj, err := op.Get(ctx, storage, dstDirActualPath)
- if err != nil {
- if !errs.IsObjectNotFound(err) {
- return errors.WithMessage(err, "failed get object")
- }
- } else {
- if !obj.IsDir() {
- // can't add to a file
- return errors.WithStack(errs.NotFolder)
- }
- }
- // call aria2 rpc
- tempDir := filepath.Join(conf.Conf.TempDir, "aria2", uuid.NewString())
- options := map[string]interface{}{
- "dir": tempDir,
- }
- gid, err := client.AddURI([]string{uri}, options)
- if err != nil {
- return errors.Wrapf(err, "failed to add uri %s", uri)
- }
- DownTaskManager.Submit(task.WithCancelCtx(&task.Task[string]{
- ID: gid,
- Name: fmt.Sprintf("download %s to [%s](%s)", uri, storage.GetStorage().MountPath, dstDirActualPath),
- Func: func(tsk *task.Task[string]) error {
- m := &Monitor{
- tsk: tsk,
- tempDir: tempDir,
- retried: 0,
- dstDirPath: dstDirPath,
- }
- return m.Loop()
- },
- }))
- return nil
-}
diff --git a/internal/aria2/aria2.go b/internal/aria2/aria2.go
deleted file mode 100644
index 7250afab..00000000
--- a/internal/aria2/aria2.go
+++ /dev/null
@@ -1,42 +0,0 @@
-package aria2
-
-import (
- "context"
- "time"
-
- "github.com/alist-org/alist/v3/internal/conf"
- "github.com/alist-org/alist/v3/internal/setting"
- "github.com/alist-org/alist/v3/pkg/aria2/rpc"
- "github.com/alist-org/alist/v3/pkg/task"
- "github.com/pkg/errors"
- log "github.com/sirupsen/logrus"
-)
-
-var DownTaskManager = task.NewTaskManager[string](3)
-var notify = NewNotify()
-var client rpc.Client
-
-func InitClient(timeout int) (string, error) {
- client = nil
- uri := setting.GetStr(conf.Aria2Uri)
- secret := setting.GetStr(conf.Aria2Secret)
- return InitAria2Client(uri, secret, timeout)
-}
-
-func InitAria2Client(uri string, secret string, timeout int) (string, error) {
- c, err := rpc.New(context.Background(), uri, secret, time.Duration(timeout)*time.Second, notify)
- if err != nil {
- return "", errors.Wrap(err, "failed to init aria2 client")
- }
- version, err := c.GetVersion()
- if err != nil {
- return "", errors.Wrapf(err, "failed get aria2 version")
- }
- client = c
- log.Infof("using aria2 version: %s", version.Version)
- return version.Version, nil
-}
-
-func IsAria2Ready() bool {
- return client != nil
-}
diff --git a/internal/aria2/aria2_test.go b/internal/aria2/aria2_test.go
deleted file mode 100644
index 1e1b296b..00000000
--- a/internal/aria2/aria2_test.go
+++ /dev/null
@@ -1,87 +0,0 @@
-package aria2
-
-import (
- "context"
- "path/filepath"
- "testing"
- "time"
-
- _ "github.com/alist-org/alist/v3/drivers"
- conf2 "github.com/alist-org/alist/v3/internal/conf"
- "github.com/alist-org/alist/v3/internal/db"
- "github.com/alist-org/alist/v3/internal/model"
- "github.com/alist-org/alist/v3/internal/op"
- "github.com/alist-org/alist/v3/pkg/task"
- "gorm.io/driver/sqlite"
- "gorm.io/gorm"
-)
-
-func init() {
- conf2.Conf = conf2.DefaultConfig()
- absPath, err := filepath.Abs("../../data/temp")
- if err != nil {
- panic(err)
- }
- conf2.Conf.TempDir = absPath
- dB, err := gorm.Open(sqlite.Open("file::memory:?cache=shared"), &gorm.Config{})
- if err != nil {
- panic("failed to connect database")
- }
- db.Init(dB)
-}
-
-func TestConnect(t *testing.T) {
- _, err := InitAria2Client("http://localhost:16800/jsonrpc", "secret", 3)
- if err != nil {
- t.Errorf("failed to init aria2: %+v", err)
- }
-}
-
-func TestDown(t *testing.T) {
- TestConnect(t)
- _, err := op.CreateStorage(context.Background(), model.Storage{
- ID: 0,
- MountPath: "/",
- Order: 0,
- Driver: "Local",
- Status: "",
- Addition: `{"root_folder":"../../data"}`,
- Remark: "",
- })
- if err != nil {
- t.Fatalf("failed to create storage: %+v", err)
- }
- err = AddURI(context.Background(), "https://nodejs.org/dist/index.json", "/test")
- if err != nil {
- t.Errorf("failed to add uri: %+v", err)
- }
- tasks := DownTaskManager.GetAll()
- if len(tasks) != 1 {
- t.Errorf("failed to get tasks: %+v", tasks)
- }
- for {
- tsk := tasks[0]
- t.Logf("task: %+v", tsk)
- if tsk.GetState() == task.SUCCEEDED {
- break
- }
- if tsk.GetState() == task.ERRORED {
- t.Fatalf("failed to download: %+v", tsk)
- }
- time.Sleep(time.Second)
- }
- for {
- if len(TransferTaskManager.GetAll()) == 0 {
- continue
- }
- tsk := TransferTaskManager.GetAll()[0]
- t.Logf("task: %+v", tsk)
- if tsk.GetState() == task.SUCCEEDED {
- break
- }
- if tsk.GetState() == task.ERRORED {
- t.Fatalf("failed to download: %+v", tsk)
- }
- time.Sleep(time.Second)
- }
-}
diff --git a/internal/aria2/monitor.go b/internal/aria2/monitor.go
deleted file mode 100644
index 583cd72d..00000000
--- a/internal/aria2/monitor.go
+++ /dev/null
@@ -1,185 +0,0 @@
-package aria2
-
-import (
- "fmt"
- "os"
- "path"
- "path/filepath"
- "strconv"
- "sync"
- "sync/atomic"
- "time"
-
- "github.com/alist-org/alist/v3/internal/model"
- "github.com/alist-org/alist/v3/internal/op"
- "github.com/alist-org/alist/v3/pkg/task"
- "github.com/alist-org/alist/v3/pkg/utils"
- "github.com/pkg/errors"
- log "github.com/sirupsen/logrus"
-)
-
-type Monitor struct {
- tsk *task.Task[string]
- tempDir string
- retried int
- c chan int
- dstDirPath string
- finish chan struct{}
-}
-
-func (m *Monitor) Loop() error {
- defer func() {
- notify.Signals.Delete(m.tsk.ID)
- // clear temp dir, should do while complete
- //_ = os.RemoveAll(m.tempDir)
- }()
- m.c = make(chan int)
- m.finish = make(chan struct{})
- notify.Signals.Store(m.tsk.ID, m.c)
- var (
- err error
- ok bool
- )
-outer:
- for {
- select {
- case <-m.tsk.Ctx.Done():
- _, err := client.Remove(m.tsk.ID)
- return err
- case <-m.c:
- ok, err = m.Update()
- if ok {
- break outer
- }
- case <-time.After(time.Second * 2):
- ok, err = m.Update()
- if ok {
- break outer
- }
- }
- }
- if err != nil {
- return err
- }
- m.tsk.SetStatus("aria2 download completed, transferring")
- <-m.finish
- m.tsk.SetStatus("completed")
- return nil
-}
-
-func (m *Monitor) Update() (bool, error) {
- info, err := client.TellStatus(m.tsk.ID)
- if err != nil {
- m.retried++
- log.Errorf("failed to get status of %s, retried %d times", m.tsk.ID, m.retried)
- return false, nil
- }
- if m.retried > 5 {
- return true, errors.Errorf("failed to get status of %s, retried %d times", m.tsk.ID, m.retried)
- }
- m.retried = 0
- if len(info.FollowedBy) != 0 {
- log.Debugf("followen by: %+v", info.FollowedBy)
- gid := info.FollowedBy[0]
- notify.Signals.Delete(m.tsk.ID)
- oldId := m.tsk.ID
- m.tsk.ID = gid
- DownTaskManager.RawTasks().Delete(oldId)
- DownTaskManager.RawTasks().Store(m.tsk.ID, m.tsk)
- notify.Signals.Store(gid, m.c)
- return false, nil
- }
- // update download status
- total, err := strconv.ParseUint(info.TotalLength, 10, 64)
- if err != nil {
- total = 0
- }
- downloaded, err := strconv.ParseUint(info.CompletedLength, 10, 64)
- if err != nil {
- downloaded = 0
- }
- progress := float64(downloaded) / float64(total) * 100
- m.tsk.SetProgress(int(progress))
- switch info.Status {
- case "complete":
- err := m.Complete()
- return true, errors.WithMessage(err, "failed to transfer file")
- case "error":
- return true, errors.Errorf("failed to download %s, error: %s", m.tsk.ID, info.ErrorMessage)
- case "active":
- m.tsk.SetStatus("aria2: " + info.Status)
- if info.Seeder == "true" {
- err := m.Complete()
- return true, errors.WithMessage(err, "failed to transfer file")
- }
- return false, nil
- case "waiting", "paused":
- m.tsk.SetStatus("aria2: " + info.Status)
- return false, nil
- case "removed":
- return true, errors.Errorf("failed to download %s, removed", m.tsk.ID)
- default:
- return true, errors.Errorf("failed to download %s, unknown status %s", m.tsk.ID, info.Status)
- }
-}
-
-var TransferTaskManager = task.NewTaskManager(3, func(k *uint64) {
- atomic.AddUint64(k, 1)
-})
-
-func (m *Monitor) Complete() error {
- // check dstDir again
- storage, dstDirActualPath, err := op.GetStorageAndActualPath(m.dstDirPath)
- if err != nil {
- return errors.WithMessage(err, "failed get storage")
- }
- // get files
- files, err := client.GetFiles(m.tsk.ID)
- log.Debugf("files len: %d", len(files))
- if err != nil {
- return errors.Wrapf(err, "failed to get files of %s", m.tsk.ID)
- }
- // upload files
- var wg sync.WaitGroup
- wg.Add(len(files))
- go func() {
- wg.Wait()
- err := os.RemoveAll(m.tempDir)
- m.finish <- struct{}{}
- if err != nil {
- log.Errorf("failed to remove aria2 temp dir: %+v", err.Error())
- }
- }()
- for i, _ := range files {
- file := files[i]
- TransferTaskManager.Submit(task.WithCancelCtx(&task.Task[uint64]{
- Name: fmt.Sprintf("transfer %s to [%s](%s)", file.Path, storage.GetStorage().MountPath, dstDirActualPath),
- Func: func(tsk *task.Task[uint64]) error {
- defer wg.Done()
- size, _ := strconv.ParseInt(file.Length, 10, 64)
- mimetype := utils.GetMimeType(file.Path)
- f, err := os.Open(file.Path)
- if err != nil {
- return errors.Wrapf(err, "failed to open file %s", file.Path)
- }
- stream := &model.FileStream{
- Obj: &model.Object{
- Name: path.Base(file.Path),
- Size: size,
- Modified: time.Now(),
- IsFolder: false,
- },
- ReadCloser: f,
- Mimetype: mimetype,
- }
- relDir, err := filepath.Rel(m.tempDir, filepath.Dir(file.Path))
- if err != nil {
- log.Errorf("find relation directory error: %v", err)
- }
- newDistDir := filepath.Join(dstDirActualPath, relDir)
- return op.Put(tsk.Ctx, storage, newDistDir, stream, tsk.SetProgress)
- },
- }))
- }
- return nil
-}
diff --git a/internal/authn/authn.go b/internal/authn/authn.go
new file mode 100755
index 00000000..df1d1fc6
--- /dev/null
+++ b/internal/authn/authn.go
@@ -0,0 +1,25 @@
+package authn
+
+import (
+ "net/http"
+ "net/url"
+
+ "github.com/alist-org/alist/v3/internal/conf"
+ "github.com/alist-org/alist/v3/internal/setting"
+ "github.com/alist-org/alist/v3/server/common"
+ "github.com/go-webauthn/webauthn/webauthn"
+)
+
+func NewAuthnInstance(r *http.Request) (*webauthn.WebAuthn, error) {
+ siteUrl, err := url.Parse(common.GetApiUrl(r))
+ if err != nil {
+ return nil, err
+ }
+ return webauthn.New(&webauthn.Config{
+ RPDisplayName: setting.GetStr(conf.SiteTitle),
+ RPID: siteUrl.Hostname(),
+ //RPOrigin: siteUrl.String(),
+ RPOrigins: []string{siteUrl.String()},
+ // RPOrigin: "http://localhost:5173"
+ })
+}
diff --git a/internal/bootstrap/aria2.go b/internal/bootstrap/aria2.go
deleted file mode 100644
index 60017dda..00000000
--- a/internal/bootstrap/aria2.go
+++ /dev/null
@@ -1,16 +0,0 @@
-package bootstrap
-
-import (
- "github.com/alist-org/alist/v3/internal/aria2"
- "github.com/alist-org/alist/v3/pkg/utils"
-)
-
-func InitAria2() {
- go func() {
- _, err := aria2.InitClient(2)
- if err != nil {
- //utils.Log.Errorf("failed to init aria2 client: %+v", err)
- utils.Log.Infof("Aria2 not ready.")
- }
- }()
-}
diff --git a/internal/bootstrap/config.go b/internal/bootstrap/config.go
old mode 100644
new mode 100755
diff --git a/internal/bootstrap/data/data.go b/internal/bootstrap/data/data.go
old mode 100644
new mode 100755
diff --git a/internal/bootstrap/data/dev.go b/internal/bootstrap/data/dev.go
old mode 100644
new mode 100755
diff --git a/internal/bootstrap/data/setting.go b/internal/bootstrap/data/setting.go
old mode 100644
new mode 100755
index 05325d17..5dbfd3c2
--- a/internal/bootstrap/data/setting.go
+++ b/internal/bootstrap/data/setting.go
@@ -4,6 +4,7 @@ import (
"github.com/alist-org/alist/v3/cmd/flags"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/model"
+ "github.com/alist-org/alist/v3/internal/offline_download/tool"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/alist-org/alist/v3/pkg/utils/random"
@@ -140,10 +141,8 @@ func InitialSettings() []model.SettingItem {
{Key: conf.OcrApi, Value: "https://api.nn.ci/ocr/file/json", Type: conf.TypeString, Group: model.GLOBAL},
{Key: conf.FilenameCharMapping, Value: `{"/": "|"}`, Type: conf.TypeText, Group: model.GLOBAL},
{Key: conf.ForwardDirectLinkParams, Value: "false", Type: conf.TypeBool, Group: model.GLOBAL},
-
- // aria2 settings
- {Key: conf.Aria2Uri, Value: "http://localhost:6800/jsonrpc", Type: conf.TypeString, Group: model.ARIA2, Flag: model.PRIVATE},
- {Key: conf.Aria2Secret, Value: "", Type: conf.TypeString, Group: model.ARIA2, Flag: model.PRIVATE},
+ {Key: conf.IgnoreDirectLinkParams, Value: "sign,alist_ts", Type: conf.TypeString, Group: model.GLOBAL},
+ {Key: conf.WebauthnLoginEnabled, Value: "false", Type: conf.TypeBool, Group: model.GLOBAL, Flag: model.PUBLIC},
// single settings
{Key: conf.Token, Value: token, Type: conf.TypeString, Group: model.SINGLE, Flag: model.PRIVATE},
@@ -158,6 +157,7 @@ func InitialSettings() []model.SettingItem {
{Key: conf.SSOLoginPlatform, Type: conf.TypeSelect, Options: "Casdoor,Github,Microsoft,Google,Dingtalk,OIDC", Group: model.SSO, Flag: model.PUBLIC},
{Key: conf.SSOClientId, Value: "", Type: conf.TypeString, Group: model.SSO, Flag: model.PRIVATE},
{Key: conf.SSOClientSecret, Value: "", Type: conf.TypeString, Group: model.SSO, Flag: model.PRIVATE},
+ {Key: conf.SSOOIDCUsernameKey, Value: "name", Type: conf.TypeString, Group: model.SSO, Flag: model.PRIVATE},
{Key: conf.SSOOrganizationName, Value: "", Type: conf.TypeString, Group: model.SSO, Flag: model.PRIVATE},
{Key: conf.SSOApplicationName, Value: "", Type: conf.TypeString, Group: model.SSO, Flag: model.PRIVATE},
{Key: conf.SSOEndpointName, Value: "", Type: conf.TypeString, Group: model.SSO, Flag: model.PRIVATE},
@@ -165,14 +165,12 @@ func InitialSettings() []model.SettingItem {
{Key: conf.SSOAutoRegister, Value: "false", Type: conf.TypeBool, Group: model.SSO, Flag: model.PRIVATE},
{Key: conf.SSODefaultDir, Value: "/", Type: conf.TypeString, Group: model.SSO, Flag: model.PRIVATE},
{Key: conf.SSODefaultPermission, Value: "0", Type: conf.TypeNumber, Group: model.SSO, Flag: model.PRIVATE},
-
- // qbittorrent settings
- {Key: conf.QbittorrentUrl, Value: "http://admin:adminadmin@localhost:8080/", Type: conf.TypeString, Group: model.SINGLE, Flag: model.PRIVATE},
- {Key: conf.QbittorrentSeedtime, Value: "0", Type: conf.TypeNumber, Group: model.SINGLE, Flag: model.PRIVATE},
+ {Key: conf.SSOCompatibilityMode, Value: "false", Type: conf.TypeBool, Group: model.SSO, Flag: model.PUBLIC},
// customized settings
{Key: "open_token_url", Value: "https://api.xhofe.top/alist/ali_open/token", Type: conf.TypeString, Group: model.SINGLE, Flag: model.PRIVATE},
{Key: "ali_account_id", Value: "0", Type: conf.TypeNumber, Group: model.SINGLE, Flag: model.PRIVATE},
}
+ initialSettingItems = append(initialSettingItems, tool.Tools.Items()...)
if flags.Dev {
initialSettingItems = append(initialSettingItems, []model.SettingItem{
{Key: "test_deprecated", Value: "test_value", Type: conf.TypeString, Flag: model.DEPRECATED},
diff --git a/internal/bootstrap/data/user.go b/internal/bootstrap/data/user.go
old mode 100644
new mode 100755
diff --git a/internal/bootstrap/db.go b/internal/bootstrap/db.go
old mode 100644
new mode 100755
diff --git a/internal/bootstrap/index.go b/internal/bootstrap/index.go
old mode 100644
new mode 100755
diff --git a/internal/bootstrap/log.go b/internal/bootstrap/log.go
old mode 100644
new mode 100755
diff --git a/internal/bootstrap/offline_download.go b/internal/bootstrap/offline_download.go
new file mode 100755
index 00000000..26e04071
--- /dev/null
+++ b/internal/bootstrap/offline_download.go
@@ -0,0 +1,17 @@
+package bootstrap
+
+import (
+ "github.com/alist-org/alist/v3/internal/offline_download/tool"
+ "github.com/alist-org/alist/v3/pkg/utils"
+)
+
+func InitOfflineDownloadTools() {
+ for k, v := range tool.Tools {
+ res, err := v.Init()
+ if err != nil {
+ utils.Log.Warnf("init tool %s failed: %s", k, err)
+ } else {
+ utils.Log.Infof("init tool %s success: %s", k, res)
+ }
+ }
+}
diff --git a/internal/bootstrap/qbittorrent.go b/internal/bootstrap/qbittorrent.go
deleted file mode 100644
index 315977eb..00000000
--- a/internal/bootstrap/qbittorrent.go
+++ /dev/null
@@ -1,15 +0,0 @@
-package bootstrap
-
-import (
- "github.com/alist-org/alist/v3/internal/qbittorrent"
- "github.com/alist-org/alist/v3/pkg/utils"
-)
-
-func InitQbittorrent() {
- go func() {
- err := qbittorrent.InitClient()
- if err != nil {
- utils.Log.Infof("qbittorrent not ready.")
- }
- }()
-}
diff --git a/internal/bootstrap/storage.go b/internal/bootstrap/storage.go
old mode 100644
new mode 100755
diff --git a/internal/bootstrap/task.go b/internal/bootstrap/task.go
new file mode 100755
index 00000000..5d52e9d2
--- /dev/null
+++ b/internal/bootstrap/task.go
@@ -0,0 +1,15 @@
+package bootstrap
+
+import (
+ "github.com/alist-org/alist/v3/internal/conf"
+ "github.com/alist-org/alist/v3/internal/fs"
+ "github.com/alist-org/alist/v3/internal/offline_download/tool"
+ "github.com/xhofe/tache"
+)
+
+func InitTaskManager() {
+ fs.UploadTaskManager = tache.NewManager[*fs.UploadTask](tache.WithWorks(conf.Conf.Tasks.Upload.Workers), tache.WithMaxRetry(conf.Conf.Tasks.Upload.MaxRetry))
+ fs.CopyTaskManager = tache.NewManager[*fs.CopyTask](tache.WithWorks(conf.Conf.Tasks.Copy.Workers), tache.WithMaxRetry(conf.Conf.Tasks.Copy.MaxRetry))
+ tool.DownloadTaskManager = tache.NewManager[*tool.DownloadTask](tache.WithWorks(conf.Conf.Tasks.Download.Workers), tache.WithMaxRetry(conf.Conf.Tasks.Download.MaxRetry))
+ tool.TransferTaskManager = tache.NewManager[*tool.TransferTask](tache.WithWorks(conf.Conf.Tasks.Transfer.Workers), tache.WithMaxRetry(conf.Conf.Tasks.Transfer.MaxRetry))
+}
diff --git a/internal/conf/config.go b/internal/conf/config.go
old mode 100644
new mode 100755
index b18bc536..966f5444
--- a/internal/conf/config.go
+++ b/internal/conf/config.go
@@ -8,15 +8,15 @@ import (
)
type Database struct {
- Type string `json:"type" env:"DB_TYPE"`
- Host string `json:"host" env:"DB_HOST"`
- Port int `json:"port" env:"DB_PORT"`
- User string `json:"user" env:"DB_USER"`
- Password string `json:"password" env:"DB_PASS"`
- Name string `json:"name" env:"DB_NAME"`
- DBFile string `json:"db_file" env:"DB_FILE"`
- TablePrefix string `json:"table_prefix" env:"DB_TABLE_PREFIX"`
- SSLMode string `json:"ssl_mode" env:"DB_SSL_MODE"`
+ Type string `json:"type" env:"TYPE"`
+ Host string `json:"host" env:"HOST"`
+ Port int `json:"port" env:"PORT"`
+ User string `json:"user" env:"USER"`
+ Password string `json:"password" env:"PASS"`
+ Name string `json:"name" env:"NAME"`
+ DBFile string `json:"db_file" env:"FILE"`
+ TablePrefix string `json:"table_prefix" env:"TABLE_PREFIX"`
+ SSLMode string `json:"ssl_mode" env:"SSL_MODE"`
}
type Scheme struct {
@@ -39,21 +39,42 @@ type LogConfig struct {
Compress bool `json:"compress" env:"COMPRESS"`
}
+type TaskConfig struct {
+ Workers int `json:"workers" env:"WORKERS"`
+ MaxRetry int `json:"max_retry" env:"MAX_RETRY"`
+}
+
+type TasksConfig struct {
+ Download TaskConfig `json:"download" envPrefix:"DOWNLOAD_"`
+ Transfer TaskConfig `json:"transfer" envPrefix:"TRANSFER_"`
+ Upload TaskConfig `json:"upload" envPrefix:"UPLOAD_"`
+ Copy TaskConfig `json:"copy" envPrefix:"COPY_"`
+}
+
+type Cors struct {
+ AllowOrigins []string `json:"allow_origins" env:"ALLOW_ORIGINS"`
+ AllowMethods []string `json:"allow_methods" env:"ALLOW_METHODS"`
+ AllowHeaders []string `json:"allow_headers" env:"ALLOW_HEADERS"`
+}
+
type Config struct {
- Force bool `json:"force" env:"FORCE"`
- SiteURL string `json:"site_url" env:"SITE_URL"`
- Cdn string `json:"cdn" env:"CDN"`
- JwtSecret string `json:"jwt_secret" env:"JWT_SECRET"`
- TokenExpiresIn int `json:"token_expires_in" env:"TOKEN_EXPIRES_IN"`
- Database Database `json:"database"`
- Scheme Scheme `json:"scheme"`
- OpenTokenAuthUrl string `json:"opentoken_auth_url"`
- TempDir string `json:"temp_dir" env:"TEMP_DIR"`
- BleveDir string `json:"bleve_dir" env:"BLEVE_DIR"`
- Log LogConfig `json:"log"`
- DelayedStart int `json:"delayed_start" env:"DELAYED_START"`
- MaxConnections int `json:"max_connections" env:"MAX_CONNECTIONS"`
- TlsInsecureSkipVerify bool `json:"tls_insecure_skip_verify" env:"TLS_INSECURE_SKIP_VERIFY"`
+ Force bool `json:"force" env:"FORCE"`
+ SiteURL string `json:"site_url" env:"SITE_URL"`
+ Cdn string `json:"cdn" env:"CDN"`
+ JwtSecret string `json:"jwt_secret" env:"JWT_SECRET"`
+ TokenExpiresIn int `json:"token_expires_in" env:"TOKEN_EXPIRES_IN"`
+ Database Database `json:"database" envPrefix:"DB_"`
+ Scheme Scheme `json:"scheme"`
+ OpenTokenAuthUrl string `json:"opentoken_auth_url"`
+ TempDir string `json:"temp_dir" env:"TEMP_DIR"`
+ BleveDir string `json:"bleve_dir" env:"BLEVE_DIR"`
+ DistDir string `json:"dist_dir"`
+ Log LogConfig `json:"log"`
+ DelayedStart int `json:"delayed_start" env:"DELAYED_START"`
+ MaxConnections int `json:"max_connections" env:"MAX_CONNECTIONS"`
+ TlsInsecureSkipVerify bool `json:"tls_insecure_skip_verify" env:"TLS_INSECURE_SKIP_VERIFY"`
+ Tasks TasksConfig `json:"tasks" envPrefix:"TASKS_"`
+ Cors Cors `json:"cors" envPrefix:"CORS_"`
}
func DefaultConfig() *Config {
@@ -91,5 +112,27 @@ func DefaultConfig() *Config {
},
MaxConnections: 0,
TlsInsecureSkipVerify: true,
+ Tasks: TasksConfig{
+ Download: TaskConfig{
+ Workers: 5,
+ MaxRetry: 1,
+ },
+ Transfer: TaskConfig{
+ Workers: 5,
+ MaxRetry: 2,
+ },
+ Upload: TaskConfig{
+ Workers: 5,
+ },
+ Copy: TaskConfig{
+ Workers: 5,
+ MaxRetry: 2,
+ },
+ },
+ Cors: Cors{
+ AllowOrigins: []string{"*"},
+ AllowMethods: []string{"*"},
+ AllowHeaders: []string{"*"},
+ },
}
}
diff --git a/internal/conf/const.go b/internal/conf/const.go
old mode 100644
new mode 100755
index 01243a6d..4d560449
--- a/internal/conf/const.go
+++ b/internal/conf/const.go
@@ -42,6 +42,8 @@ const (
OcrApi = "ocr_api"
FilenameCharMapping = "filename_char_mapping"
ForwardDirectLinkParams = "forward_direct_link_params"
+ IgnoreDirectLinkParams = "ignore_direct_link_params"
+ WebauthnLoginEnabled = "webauthn_login_enabled"
// index
SearchIndex = "search_index"
@@ -62,6 +64,7 @@ const (
SSOClientSecret = "sso_client_secret"
SSOLoginEnabled = "sso_login_enabled"
SSOLoginPlatform = "sso_login_platform"
+ SSOOIDCUsernameKey = "sso_oidc_username_key"
SSOOrganizationName = "sso_organization_name"
SSOApplicationName = "sso_application_name"
SSOEndpointName = "sso_endpoint_name"
@@ -69,6 +72,7 @@ const (
SSOAutoRegister = "sso_auto_register"
SSODefaultDir = "sso_default_dir"
SSODefaultPermission = "sso_default_permission"
+ SSOCompatibilityMode = "sso_compatibility_mode"
// qbittorrent
QbittorrentUrl = "qbittorrent_url"
@@ -84,3 +88,8 @@ const (
TEXT
IMAGE
)
+
+// ContextKey is the type of context keys.
+const (
+ NoTaskKey = "no_task"
+)
diff --git a/internal/conf/var.go b/internal/conf/var.go
old mode 100644
new mode 100755
diff --git a/internal/db/db.go b/internal/db/db.go
old mode 100644
new mode 100755
index 19a86b8e..19f5403f
--- a/internal/db/db.go
+++ b/internal/db/db.go
@@ -31,3 +31,17 @@ func AutoMigrate(dst ...interface{}) error {
func GetDb() *gorm.DB {
return db
}
+
+func Close() {
+ log.Info("closing db")
+ sqlDB, err := db.DB()
+ if err != nil {
+ log.Errorf("failed to get db: %s", err.Error())
+ return
+ }
+ err = sqlDB.Close()
+ if err != nil {
+ log.Errorf("failed to close db: %s", err.Error())
+ return
+ }
+}
diff --git a/internal/db/meta.go b/internal/db/meta.go
old mode 100644
new mode 100755
diff --git a/internal/db/searchnode.go b/internal/db/searchnode.go
old mode 100644
new mode 100755
diff --git a/internal/db/settingitem.go b/internal/db/settingitem.go
old mode 100644
new mode 100755
diff --git a/internal/db/storage.go b/internal/db/storage.go
old mode 100644
new mode 100755
diff --git a/internal/db/token.go b/internal/db/token.go
old mode 100644
new mode 100755
diff --git a/internal/db/user.go b/internal/db/user.go
old mode 100644
new mode 100755
index 497f0905..82292666
--- a/internal/db/user.go
+++ b/internal/db/user.go
@@ -1,7 +1,11 @@
package db
import (
+ "encoding/base64"
+
"github.com/alist-org/alist/v3/internal/model"
+ "github.com/alist-org/alist/v3/pkg/utils"
+ "github.com/go-webauthn/webauthn/webauthn"
"github.com/pkg/errors"
)
@@ -59,3 +63,40 @@ func GetUsers(pageIndex, pageSize int) (users []model.User, count int64, err err
func DeleteUserById(id uint) error {
return errors.WithStack(db.Delete(&model.User{}, id).Error)
}
+
+func UpdateAuthn(userID uint, authn string) error {
+ return db.Model(&model.User{ID: userID}).Update("authn", authn).Error
+}
+
+func RegisterAuthn(u *model.User, credential *webauthn.Credential) error {
+ if u == nil {
+ return errors.New("user is nil")
+ }
+ exists := u.WebAuthnCredentials()
+ if credential != nil {
+ exists = append(exists, *credential)
+ }
+ res, err := utils.Json.Marshal(exists)
+ if err != nil {
+ return err
+ }
+ return UpdateAuthn(u.ID, string(res))
+}
+
+func RemoveAuthn(u *model.User, id string) error {
+ exists := u.WebAuthnCredentials()
+ for i := 0; i < len(exists); i++ {
+ idEncoded := base64.StdEncoding.EncodeToString(exists[i].ID)
+ if idEncoded == id {
+ exists[len(exists)-1], exists[i] = exists[i], exists[len(exists)-1]
+ exists = exists[:len(exists)-1]
+ break
+ }
+ }
+
+ res, err := utils.Json.Marshal(exists)
+ if err != nil {
+ return err
+ }
+ return UpdateAuthn(u.ID, string(res))
+}
diff --git a/internal/db/util.go b/internal/db/util.go
old mode 100644
new mode 100755
diff --git a/internal/driver/config.go b/internal/driver/config.go
old mode 100644
new mode 100755
index 35ff6e4f..c9e3f949
--- a/internal/driver/config.go
+++ b/internal/driver/config.go
@@ -11,7 +11,7 @@ type Config struct {
DefaultRoot string `json:"default_root"`
CheckStatus bool `json:"-"`
Alert string `json:"alert"` //info,success,warning,danger
- NoOverwriteUpload bool `json:"-"`
+ NoOverwriteUpload bool `json:"-"` // whether to support overwrite upload
}
func (c Config) MustProxy() bool {
diff --git a/internal/driver/driver.go b/internal/driver/driver.go
old mode 100644
new mode 100755
index e0a7c93d..781e8532
--- a/internal/driver/driver.go
+++ b/internal/driver/driver.go
@@ -109,7 +109,7 @@ type PutResult interface {
Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up UpdateProgress) (model.Obj, error)
}
-type UpdateProgress func(percentage int)
+type UpdateProgress func(percentage float64)
type Progress struct {
Total int64
@@ -120,7 +120,7 @@ type Progress struct {
func (p *Progress) Write(b []byte) (n int, err error) {
n = len(b)
p.Done += int64(n)
- p.up(int(float64(p.Done) / float64(p.Total) * 100))
+ p.up(float64(p.Done) / float64(p.Total) * 100)
return
}
diff --git a/internal/driver/item.go b/internal/driver/item.go
old mode 100644
new mode 100755
diff --git a/internal/errs/driver.go b/internal/errs/driver.go
old mode 100644
new mode 100755
diff --git a/internal/errs/errors.go b/internal/errs/errors.go
old mode 100644
new mode 100755
index 0cab4135..cd681e60
--- a/internal/errs/errors.go
+++ b/internal/errs/errors.go
@@ -17,6 +17,7 @@ var (
MetaNotFound = errors.New("meta not found")
StorageNotFound = errors.New("storage not found")
StreamIncomplete = errors.New("upload/download stream incomplete, possible network issue")
+ StreamPeekFail = errors.New("StreamPeekFail")
)
// NewErr wrap constant error with an extra message
@@ -28,3 +29,7 @@ func NewErr(err error, format string, a ...any) error {
func IsNotFoundError(err error) bool {
return errors.Is(pkgerr.Cause(err), ObjectNotFound) || errors.Is(pkgerr.Cause(err), StorageNotFound)
}
+
+func IsNotSupportError(err error) bool {
+ return errors.Is(pkgerr.Cause(err), NotSupport)
+}
diff --git a/internal/errs/errors_test.go b/internal/errs/errors_test.go
old mode 100644
new mode 100755
diff --git a/internal/errs/object.go b/internal/errs/object.go
old mode 100644
new mode 100755
diff --git a/internal/errs/operate.go b/internal/errs/operate.go
old mode 100644
new mode 100755
diff --git a/internal/errs/search.go b/internal/errs/search.go
old mode 100644
new mode 100755
diff --git a/internal/errs/user.go b/internal/errs/user.go
old mode 100644
new mode 100755
diff --git a/internal/fs/copy.go b/internal/fs/copy.go
old mode 100644
new mode 100755
index b8f92599..25f068f0
--- a/internal/fs/copy.go
+++ b/internal/fs/copy.go
@@ -3,100 +3,141 @@ package fs
import (
"context"
"fmt"
- "net/http"
- stdpath "path"
- "sync/atomic"
-
+ "github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
- "github.com/alist-org/alist/v3/pkg/task"
+ "github.com/alist-org/alist/v3/internal/stream"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/pkg/errors"
- log "github.com/sirupsen/logrus"
+ "github.com/xhofe/tache"
+ "net/http"
+ stdpath "path"
)
-var CopyTaskManager = task.NewTaskManager(3, func(tid *uint64) {
- atomic.AddUint64(tid, 1)
-})
+type CopyTask struct {
+ tache.Base
+ Status string `json:"status"`
+ srcStorage, dstStorage driver.Driver
+ srcObjPath, dstDirPath string
+}
+
+func (t *CopyTask) GetName() string {
+ return fmt.Sprintf("copy [%s](%s) to [%s](%s)",
+ t.srcStorage.GetStorage().MountPath, t.srcObjPath, t.dstStorage.GetStorage().MountPath, t.dstDirPath)
+}
+
+func (t *CopyTask) GetStatus() string {
+ return t.Status
+}
+
+func (t *CopyTask) Run() error {
+ return copyBetween2Storages(t, t.srcStorage, t.dstStorage, t.srcObjPath, t.dstDirPath)
+}
+
+var CopyTaskManager *tache.Manager[*CopyTask]
// Copy if in the same storage, call move method
// if not, add copy task
-func _copy(ctx context.Context, srcObjPath, dstDirPath string, lazyCache ...bool) (bool, error) {
+func _copy(ctx context.Context, srcObjPath, dstDirPath string, lazyCache ...bool) (tache.TaskWithInfo, error) {
srcStorage, srcObjActualPath, err := op.GetStorageAndActualPath(srcObjPath)
if err != nil {
- return false, errors.WithMessage(err, "failed get src storage")
+ return nil, errors.WithMessage(err, "failed get src storage")
}
dstStorage, dstDirActualPath, err := op.GetStorageAndActualPath(dstDirPath)
if err != nil {
- return false, errors.WithMessage(err, "failed get dst storage")
+ return nil, errors.WithMessage(err, "failed get dst storage")
}
// copy if in the same storage, just call driver.Copy
if srcStorage.GetStorage() == dstStorage.GetStorage() {
- return false, op.Copy(ctx, srcStorage, srcObjActualPath, dstDirActualPath, lazyCache...)
+ return nil, op.Copy(ctx, srcStorage, srcObjActualPath, dstDirActualPath, lazyCache...)
+ }
+ if ctx.Value(conf.NoTaskKey) != nil {
+ srcObj, err := op.Get(ctx, srcStorage, srcObjActualPath)
+ if err != nil {
+ return nil, errors.WithMessagef(err, "failed get src [%s] file", srcObjPath)
+ }
+ if !srcObj.IsDir() {
+ // copy file directly
+ link, _, err := op.Link(ctx, srcStorage, srcObjActualPath, model.LinkArgs{
+ Header: http.Header{},
+ })
+ if err != nil {
+ return nil, errors.WithMessagef(err, "failed get [%s] link", srcObjPath)
+ }
+ fs := stream.FileStream{
+ Obj: srcObj,
+ Ctx: ctx,
+ }
+ // any link provided is seekable
+ ss, err := stream.NewSeekableStream(fs, link)
+ if err != nil {
+ return nil, errors.WithMessagef(err, "failed get [%s] stream", srcObjPath)
+ }
+ return nil, op.Put(ctx, dstStorage, dstDirActualPath, ss, nil, false)
+ }
}
// not in the same storage
- CopyTaskManager.Submit(task.WithCancelCtx(&task.Task[uint64]{
- Name: fmt.Sprintf("copy [%s](%s) to [%s](%s)", srcStorage.GetStorage().MountPath, srcObjActualPath, dstStorage.GetStorage().MountPath, dstDirActualPath),
- Func: func(task *task.Task[uint64]) error {
- return copyBetween2Storages(task, srcStorage, dstStorage, srcObjActualPath, dstDirActualPath)
- },
- }))
- return true, nil
+ t := &CopyTask{
+ srcStorage: srcStorage,
+ dstStorage: dstStorage,
+ srcObjPath: srcObjActualPath,
+ dstDirPath: dstDirActualPath,
+ }
+ CopyTaskManager.Add(t)
+ return t, nil
}
-func copyBetween2Storages(t *task.Task[uint64], srcStorage, dstStorage driver.Driver, srcObjPath, dstDirPath string) error {
- t.SetStatus("getting src object")
- srcObj, err := op.Get(t.Ctx, srcStorage, srcObjPath)
+func copyBetween2Storages(t *CopyTask, srcStorage, dstStorage driver.Driver, srcObjPath, dstDirPath string) error {
+ t.Status = "getting src object"
+ srcObj, err := op.Get(t.Ctx(), srcStorage, srcObjPath)
if err != nil {
return errors.WithMessagef(err, "failed get src [%s] file", srcObjPath)
}
if srcObj.IsDir() {
- t.SetStatus("src object is dir, listing objs")
- objs, err := op.List(t.Ctx, srcStorage, srcObjPath, model.ListArgs{})
+ t.Status = "src object is dir, listing objs"
+ objs, err := op.List(t.Ctx(), srcStorage, srcObjPath, model.ListArgs{})
if err != nil {
return errors.WithMessagef(err, "failed list src [%s] objs", srcObjPath)
}
for _, obj := range objs {
- if utils.IsCanceled(t.Ctx) {
+ if utils.IsCanceled(t.Ctx()) {
return nil
}
srcObjPath := stdpath.Join(srcObjPath, obj.GetName())
dstObjPath := stdpath.Join(dstDirPath, srcObj.GetName())
- CopyTaskManager.Submit(task.WithCancelCtx(&task.Task[uint64]{
- Name: fmt.Sprintf("copy [%s](%s) to [%s](%s)", srcStorage.GetStorage().MountPath, srcObjPath, dstStorage.GetStorage().MountPath, dstObjPath),
- Func: func(t *task.Task[uint64]) error {
- return copyBetween2Storages(t, srcStorage, dstStorage, srcObjPath, dstObjPath)
- },
- }))
+ CopyTaskManager.Add(&CopyTask{
+ srcStorage: srcStorage,
+ dstStorage: dstStorage,
+ srcObjPath: srcObjPath,
+ dstDirPath: dstObjPath,
+ })
}
- } else {
- CopyTaskManager.Submit(task.WithCancelCtx(&task.Task[uint64]{
- Name: fmt.Sprintf("copy [%s](%s) to [%s](%s)", srcStorage.GetStorage().MountPath, srcObjPath, dstStorage.GetStorage().MountPath, dstDirPath),
- Func: func(t *task.Task[uint64]) error {
- err := copyFileBetween2Storages(t, srcStorage, dstStorage, srcObjPath, dstDirPath)
- log.Debugf("copy file between storages: %+v", err)
- return err
- },
- }))
+ t.Status = "src object is dir, added all copy tasks of objs"
+ return nil
}
- return nil
+ return copyFileBetween2Storages(t, srcStorage, dstStorage, srcObjPath, dstDirPath)
}
-func copyFileBetween2Storages(tsk *task.Task[uint64], srcStorage, dstStorage driver.Driver, srcFilePath, dstDirPath string) error {
- srcFile, err := op.Get(tsk.Ctx, srcStorage, srcFilePath)
+func copyFileBetween2Storages(tsk *CopyTask, srcStorage, dstStorage driver.Driver, srcFilePath, dstDirPath string) error {
+ srcFile, err := op.Get(tsk.Ctx(), srcStorage, srcFilePath)
if err != nil {
return errors.WithMessagef(err, "failed get src [%s] file", srcFilePath)
}
- link, _, err := op.Link(tsk.Ctx, srcStorage, srcFilePath, model.LinkArgs{
+ link, _, err := op.Link(tsk.Ctx(), srcStorage, srcFilePath, model.LinkArgs{
Header: http.Header{},
})
if err != nil {
return errors.WithMessagef(err, "failed get [%s] link", srcFilePath)
}
- stream, err := getFileStreamFromLink(tsk.Ctx, srcFile, link)
+ fs := stream.FileStream{
+ Obj: srcFile,
+ Ctx: tsk.Ctx(),
+ }
+ // any link provided is seekable
+ ss, err := stream.NewSeekableStream(fs, link)
if err != nil {
return errors.WithMessagef(err, "failed get [%s] stream", srcFilePath)
}
- return op.Put(tsk.Ctx, dstStorage, dstDirPath, stream, tsk.SetProgress, true)
+ return op.Put(tsk.Ctx(), dstStorage, dstDirPath, ss, tsk.SetProgress, true)
}
diff --git a/internal/fs/fs.go b/internal/fs/fs.go
old mode 100644
new mode 100755
index ce922bc7..23e8a87a
--- a/internal/fs/fs.go
+++ b/internal/fs/fs.go
@@ -2,11 +2,11 @@ package fs
import (
"context"
-
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
log "github.com/sirupsen/logrus"
+ "github.com/xhofe/tache"
)
// the param named path of functions in this package is a mount path
@@ -69,7 +69,7 @@ func Move(ctx context.Context, srcPath, dstDirPath string, lazyCache ...bool) er
return err
}
-func Copy(ctx context.Context, srcObjPath, dstDirPath string, lazyCache ...bool) (bool, error) {
+func Copy(ctx context.Context, srcObjPath, dstDirPath string, lazyCache ...bool) (tache.TaskWithInfo, error) {
res, err := _copy(ctx, srcObjPath, dstDirPath, lazyCache...)
if err != nil {
log.Errorf("failed copy %s to %s: %+v", srcObjPath, dstDirPath, err)
@@ -93,7 +93,7 @@ func Remove(ctx context.Context, path string) error {
return err
}
-func PutDirectly(ctx context.Context, dstDirPath string, file *model.FileStream, lazyCache ...bool) error {
+func PutDirectly(ctx context.Context, dstDirPath string, file model.FileStreamer, lazyCache ...bool) error {
err := putDirectly(ctx, dstDirPath, file, lazyCache...)
if err != nil {
log.Errorf("failed put %s: %+v", dstDirPath, err)
@@ -101,12 +101,12 @@ func PutDirectly(ctx context.Context, dstDirPath string, file *model.FileStream,
return err
}
-func PutAsTask(dstDirPath string, file *model.FileStream) error {
- err := putAsTask(dstDirPath, file)
+func PutAsTask(dstDirPath string, file model.FileStreamer) (tache.TaskWithInfo, error) {
+ t, err := putAsTask(dstDirPath, file)
if err != nil {
log.Errorf("failed put %s: %+v", dstDirPath, err)
}
- return err
+ return t, err
}
type GetStoragesArgs struct {
diff --git a/internal/fs/get.go b/internal/fs/get.go
old mode 100644
new mode 100755
diff --git a/internal/fs/link.go b/internal/fs/link.go
old mode 100644
new mode 100755
diff --git a/internal/fs/list.go b/internal/fs/list.go
old mode 100644
new mode 100755
diff --git a/internal/fs/other.go b/internal/fs/other.go
old mode 100644
new mode 100755
index 7f534a3b..85b7b1d1
--- a/internal/fs/other.go
+++ b/internal/fs/other.go
@@ -2,7 +2,6 @@ package fs
import (
"context"
- "github.com/alist-org/alist/v3/pkg/utils"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
@@ -55,6 +54,5 @@ func other(ctx context.Context, args model.FsOtherArgs) (interface{}, error) {
return nil, errors.WithMessage(err, "failed get storage")
}
args.Path = actualPath
- utils.Log.Debugf("%v %v", storage, actualPath)
return op.Other(ctx, storage, args)
}
diff --git a/internal/fs/put.go b/internal/fs/put.go
old mode 100644
new mode 100755
index 41f6b8db..807b15e0
--- a/internal/fs/put.go
+++ b/internal/fs/put.go
@@ -3,47 +3,63 @@ package fs
import (
"context"
"fmt"
- "sync/atomic"
-
+ "github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
- "github.com/alist-org/alist/v3/pkg/task"
- "github.com/alist-org/alist/v3/pkg/utils"
"github.com/pkg/errors"
+ "github.com/xhofe/tache"
)
-var UploadTaskManager = task.NewTaskManager(3, func(tid *uint64) {
- atomic.AddUint64(tid, 1)
-})
+type UploadTask struct {
+ tache.Base
+ storage driver.Driver
+ dstDirActualPath string
+ file model.FileStreamer
+}
+
+func (t *UploadTask) GetName() string {
+ return fmt.Sprintf("upload %s to [%s](%s)", t.file.GetName(), t.storage.GetStorage().MountPath, t.dstDirActualPath)
+}
+
+func (t *UploadTask) GetStatus() string {
+ return "uploading"
+}
+
+func (t *UploadTask) Run() error {
+ return op.Put(t.Ctx(), t.storage, t.dstDirActualPath, t.file, t.SetProgress, true)
+}
+
+var UploadTaskManager *tache.Manager[*UploadTask]
// putAsTask add as a put task and return immediately
-func putAsTask(dstDirPath string, file *model.FileStream) error {
+func putAsTask(dstDirPath string, file model.FileStreamer) (tache.TaskWithInfo, error) {
storage, dstDirActualPath, err := op.GetStorageAndActualPath(dstDirPath)
if err != nil {
- return errors.WithMessage(err, "failed get storage")
+ return nil, errors.WithMessage(err, "failed get storage")
}
if storage.Config().NoUpload {
- return errors.WithStack(errs.UploadNotSupported)
+ return nil, errors.WithStack(errs.UploadNotSupported)
}
if file.NeedStore() {
- tempFile, err := utils.CreateTempFile(file, file.GetSize())
+ _, err := file.CacheFullInTempFile()
if err != nil {
- return errors.Wrapf(err, "failed to create temp file")
+ return nil, errors.Wrapf(err, "failed to create temp file")
}
- file.SetReadCloser(tempFile)
+ //file.SetReader(tempFile)
+ //file.SetTmpFile(tempFile)
+ }
+ t := &UploadTask{
+ storage: storage,
+ dstDirActualPath: dstDirActualPath,
+ file: file,
}
- UploadTaskManager.Submit(task.WithCancelCtx(&task.Task[uint64]{
- Name: fmt.Sprintf("upload %s to [%s](%s)", file.GetName(), storage.GetStorage().MountPath, dstDirActualPath),
- Func: func(task *task.Task[uint64]) error {
- return op.Put(task.Ctx, storage, dstDirActualPath, file, task.SetProgress, true)
- },
- }))
- return nil
+ UploadTaskManager.Add(t)
+ return t, nil
}
// putDirect put the file and return after finish
-func putDirectly(ctx context.Context, dstDirPath string, file *model.FileStream, lazyCache ...bool) error {
+func putDirectly(ctx context.Context, dstDirPath string, file model.FileStreamer, lazyCache ...bool) error {
storage, dstDirActualPath, err := op.GetStorageAndActualPath(dstDirPath)
if err != nil {
return errors.WithMessage(err, "failed get storage")
diff --git a/internal/fs/util.go b/internal/fs/util.go
deleted file mode 100644
index 5eca5fce..00000000
--- a/internal/fs/util.go
+++ /dev/null
@@ -1,73 +0,0 @@
-package fs
-
-import (
- "context"
- "io"
- "net/http"
- "strings"
-
- "github.com/alist-org/alist/v3/internal/net"
- "github.com/alist-org/alist/v3/pkg/http_range"
-
- "github.com/alist-org/alist/v3/internal/model"
- "github.com/alist-org/alist/v3/pkg/utils"
- "github.com/alist-org/alist/v3/server/common"
- "github.com/pkg/errors"
-)
-
-func getFileStreamFromLink(ctx context.Context, file model.Obj, link *model.Link) (*model.FileStream, error) {
- var rc io.ReadCloser
- var err error
- mimetype := utils.GetMimeType(file.GetName())
- if link.RangeReadCloser.RangeReader != nil {
- rc, err = link.RangeReadCloser.RangeReader(http_range.Range{Length: -1})
- if err != nil {
- return nil, err
- }
- } else if link.ReadSeekCloser != nil {
- rc = link.ReadSeekCloser
- } else if link.Concurrency != 0 || link.PartSize != 0 {
- down := net.NewDownloader(func(d *net.Downloader) {
- d.Concurrency = link.Concurrency
- d.PartSize = link.PartSize
- })
- req := &net.HttpRequestParams{
- URL: link.URL,
- Range: http_range.Range{Length: -1},
- Size: file.GetSize(),
- HeaderRef: link.Header,
- }
- rc, err = down.Download(ctx, req)
- if err != nil {
- return nil, err
- }
- } else {
- //TODO: add accelerator
- req, err := http.NewRequest(http.MethodGet, link.URL, nil)
- if err != nil {
- return nil, errors.Wrapf(err, "failed to create request for %s", link.URL)
- }
- for h, val := range link.Header {
- req.Header[h] = val
- }
- res, err := common.HttpClient().Do(req)
- if err != nil {
- return nil, errors.Wrapf(err, "failed to get response for %s", link.URL)
- }
- mt := res.Header.Get("Content-Type")
- if mt != "" && strings.ToLower(mt) != "application/octet-stream" {
- mimetype = mt
- }
- rc = res.Body
- }
- // if can't get mimetype, use default application/octet-stream
- if mimetype == "" {
- mimetype = "application/octet-stream"
- }
- stream := &model.FileStream{
- Obj: file,
- ReadCloser: rc,
- Mimetype: mimetype,
- }
- return stream, nil
-}
diff --git a/internal/fs/walk.go b/internal/fs/walk.go
old mode 100644
new mode 100755
diff --git a/internal/fuse/fs.go b/internal/fuse/fs.go
old mode 100644
new mode 100755
diff --git a/internal/fuse/mount.go b/internal/fuse/mount.go
old mode 100644
new mode 100755
diff --git a/internal/message/http.go b/internal/message/http.go
old mode 100644
new mode 100755
diff --git a/internal/message/message.go b/internal/message/message.go
old mode 100644
new mode 100755
diff --git a/internal/message/ws.go b/internal/message/ws.go
old mode 100644
new mode 100755
diff --git a/internal/model/args.go b/internal/model/args.go
old mode 100644
new mode 100755
index bb12ae07..ac3c1875
--- a/internal/model/args.go
+++ b/internal/model/args.go
@@ -1,6 +1,7 @@
package model
import (
+ "context"
"io"
"net/http"
"time"
@@ -22,13 +23,14 @@ type LinkArgs struct {
}
type Link struct {
- URL string `json:"url"`
- Header http.Header `json:"header"` // needed header (for url) or response header(for data or writer)
- RangeReadCloser RangeReadCloser `json:"-"` // recommended way
- ReadSeekCloser io.ReadSeekCloser `json:"-"` // best for local,smb... file system, which exposes ReadSeekCloser
+ URL string `json:"url"` // most common way
+ Header http.Header `json:"header"` // needed header (for url)
+ RangeReadCloser RangeReadCloserIF `json:"-"` // recommended way if can't use URL
+ MFile File `json:"-"` // best for local,smb... file system, which exposes MFile
Expiration *time.Duration // local cache expire Duration
IPCacheKey bool `json:"-"` // add ip to cache key
+
//for accelerating request, use multi-thread downloading
Concurrency int `json:"concurrency"`
PartSize int `json:"part_size"`
@@ -45,10 +47,23 @@ type FsOtherArgs struct {
Method string `json:"method" form:"method"`
Data interface{} `json:"data" form:"data"`
}
+type RangeReadCloserIF interface {
+ RangeRead(ctx context.Context, httpRange http_range.Range) (io.ReadCloser, error)
+ utils.ClosersIF
+}
+
+var _ RangeReadCloserIF = (*RangeReadCloser)(nil)
+
type RangeReadCloser struct {
RangeReader RangeReaderFunc
- Closers *utils.Closers
+ utils.Closers
+}
+
+func (r RangeReadCloser) RangeRead(ctx context.Context, httpRange http_range.Range) (io.ReadCloser, error) {
+ rc, err := r.RangeReader(ctx, httpRange)
+ r.Closers.Add(rc)
+ return rc, err
}
-type WriterFunc func(w io.Writer) error
-type RangeReaderFunc func(httpRange http_range.Range) (io.ReadCloser, error)
+// type WriterFunc func(w io.Writer) error
+type RangeReaderFunc func(ctx context.Context, httpRange http_range.Range) (io.ReadCloser, error)
diff --git a/internal/model/file.go b/internal/model/file.go
new file mode 100755
index 00000000..ba65ef93
--- /dev/null
+++ b/internal/model/file.go
@@ -0,0 +1,25 @@
+package model
+
+import "io"
+
+// File is basic file level accessing interface
+type File interface {
+ io.Reader
+ io.ReaderAt
+ io.Seeker
+ io.Closer
+}
+
+type NopMFileIF interface {
+ io.Reader
+ io.ReaderAt
+ io.Seeker
+}
+type NopMFile struct {
+ NopMFileIF
+}
+
+func (NopMFile) Close() error { return nil }
+func NewNopMFile(r NopMFileIF) File {
+ return NopMFile{r}
+}
diff --git a/internal/model/meta.go b/internal/model/meta.go
old mode 100644
new mode 100755
index afbdb63f..0446137a
--- a/internal/model/meta.go
+++ b/internal/model/meta.go
@@ -1,14 +1,16 @@
package model
type Meta struct {
- ID uint `json:"id" gorm:"primaryKey"`
- Path string `json:"path" gorm:"unique" binding:"required"`
- Password string `json:"password"`
- PSub bool `json:"p_sub"`
- Write bool `json:"write"`
- WSub bool `json:"w_sub"`
- Hide string `json:"hide"`
- HSub bool `json:"h_sub"`
- Readme string `json:"readme"`
- RSub bool `json:"r_sub"`
+ ID uint `json:"id" gorm:"primaryKey"`
+ Path string `json:"path" gorm:"unique" binding:"required"`
+ Password string `json:"password"`
+ PSub bool `json:"p_sub"`
+ Write bool `json:"write"`
+ WSub bool `json:"w_sub"`
+ Hide string `json:"hide"`
+ HSub bool `json:"h_sub"`
+ Readme string `json:"readme"`
+ RSub bool `json:"r_sub"`
+ Header string `json:"header"`
+ HeaderSub bool `json:"header_sub"`
}
diff --git a/internal/model/obj.go b/internal/model/obj.go
old mode 100644
new mode 100755
index 09e0a13e..77c0700a
--- a/internal/model/obj.go
+++ b/internal/model/obj.go
@@ -7,6 +7,9 @@ import (
"strings"
"time"
+ "github.com/alist-org/alist/v3/pkg/http_range"
+ "github.com/alist-org/alist/v3/pkg/utils"
+
mapset "github.com/deckarep/golang-set/v2"
"github.com/maruel/natural"
@@ -20,8 +23,9 @@ type Obj interface {
GetSize() int64
GetName() string
ModTime() time.Time
+ CreateTime() time.Time
IsDir() bool
- //GetHash() (string, string)
+ GetHash() utils.HashInfo
// The internal information of the driver.
// If you want to use it, please understand what it means
@@ -29,14 +33,20 @@ type Obj interface {
GetPath() string
}
+// FileStreamer ->check FileStream for more comments
type FileStreamer interface {
- io.ReadCloser
+ io.Reader
+ io.Closer
Obj
GetMimetype() string
- SetReadCloser(io.ReadCloser)
+ //SetReader(io.Reader)
NeedStore() bool
- GetReadCloser() io.ReadCloser
- GetOld() Obj
+ GetExist() Obj
+ SetExist(Obj)
+ //for a non-seekable Stream, RangeRead supports peeking some data, and CacheFullInTempFile still works
+ RangeRead(http_range.Range) (io.Reader, error)
+ //for a non-seekable Stream, if Read is called, this function won't work
+ CacheFullInTempFile() (File, error)
}
type URL interface {
@@ -50,9 +60,6 @@ type Thumb interface {
type SetPath interface {
SetPath(path string)
}
-type SetHash interface {
- SetHash(hash string, hashType string)
-}
func SortFiles(objs []Obj, orderBy, orderDirection string) {
if orderBy == "" {
@@ -140,6 +147,20 @@ func GetUrl(obj Obj) (url string, ok bool) {
return url, false
}
+func GetRawObject(obj Obj) *Object {
+ switch v := obj.(type) {
+ case *ObjThumbURL:
+ return &v.Object
+ case *ObjThumb:
+ return &v.Object
+ case *ObjectURL:
+ return &v.Object
+ case *Object:
+ return v
+ }
+ return nil
+}
+
// Merge
func NewObjMerge() *ObjMerge {
return &ObjMerge{
diff --git a/internal/model/object.go b/internal/model/object.go
old mode 100644
new mode 100755
index b1ef1b4a..93f2c307
--- a/internal/model/object.go
+++ b/internal/model/object.go
@@ -28,9 +28,9 @@ type Object struct {
Name string
Size int64
Modified time.Time
+ Ctime time.Time // file create time
IsFolder bool
- Hash string
- HashType string
+ HashInfo utils.HashInfo
}
func (o *Object) GetName() string {
@@ -44,6 +44,12 @@ func (o *Object) GetSize() int64 {
func (o *Object) ModTime() time.Time {
return o.Modified
}
+func (o *Object) CreateTime() time.Time {
+ if o.Ctime.IsZero() {
+ return o.ModTime()
+ }
+ return o.Ctime
+}
func (o *Object) IsDir() bool {
return o.IsFolder
@@ -61,13 +67,8 @@ func (o *Object) SetPath(path string) {
o.Path = path
}
-func (o *Object) SetHash(hash string, hashType string) {
- o.Hash = hash
- o.HashType = hashType
-}
-
-func (o *Object) GetHash() (string, string) {
- return o.Hash, o.HashType
+func (o *Object) GetHash() utils.HashInfo {
+ return o.HashInfo
}
type Thumbnail struct {
diff --git a/internal/model/req.go b/internal/model/req.go
old mode 100644
new mode 100755
diff --git a/internal/model/search.go b/internal/model/search.go
old mode 100644
new mode 100755
diff --git a/internal/model/setting.go b/internal/model/setting.go
old mode 100644
new mode 100755
index f4202ee0..3b2c30f1
--- a/internal/model/setting.go
+++ b/internal/model/setting.go
@@ -6,7 +6,7 @@ const (
STYLE
PREVIEW
GLOBAL
- ARIA2
+ OFFLINE_DOWNLOAD
INDEX
SSO
)
diff --git a/internal/model/storage.go b/internal/model/storage.go
old mode 100644
new mode 100755
diff --git a/internal/model/stream.go b/internal/model/stream.go
deleted file mode 100644
index cd7c3363..00000000
--- a/internal/model/stream.go
+++ /dev/null
@@ -1,33 +0,0 @@
-package model
-
-import (
- "io"
-)
-
-type FileStream struct {
- Obj
- io.ReadCloser
- Mimetype string
- WebPutAsTask bool
- Old Obj
-}
-
-func (f *FileStream) GetMimetype() string {
- return f.Mimetype
-}
-
-func (f *FileStream) NeedStore() bool {
- return f.WebPutAsTask
-}
-
-func (f *FileStream) GetReadCloser() io.ReadCloser {
- return f.ReadCloser
-}
-
-func (f *FileStream) SetReadCloser(rc io.ReadCloser) {
- f.ReadCloser = rc
-}
-
-func (f *FileStream) GetOld() Obj {
- return f.Old
-}
diff --git a/internal/model/token.go b/internal/model/token.go
old mode 100644
new mode 100755
diff --git a/internal/model/user.go b/internal/model/user.go
old mode 100644
new mode 100755
index fc0af9cb..2d61a971
--- a/internal/model/user.go
+++ b/internal/model/user.go
@@ -1,11 +1,15 @@
package model
import (
+ "encoding/binary"
+ "encoding/json"
"fmt"
+ "time"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/alist-org/alist/v3/pkg/utils/random"
+ "github.com/go-webauthn/webauthn/webauthn"
"github.com/pkg/errors"
)
@@ -21,15 +25,16 @@ type User struct {
ID uint `json:"id" gorm:"primaryKey"` // unique key
Username string `json:"username" gorm:"unique" binding:"required"` // username
PwdHash string `json:"-"` // password hash
- Salt string // unique salt
- Password string `json:"password"` // password
- BasePath string `json:"base_path"` // base path
- Role int `json:"role"` // user's role
+ PwdTS int64 `json:"-"` // password timestamp
+ Salt string `json:"-"` // unique salt
+ Password string `json:"password"` // password
+ BasePath string `json:"base_path"` // base path
+ Role int `json:"role"` // user's role
Disabled bool `json:"disabled"`
// Determine permissions by bit
// 0: can see hidden files
// 1: can access without password
- // 2: can add aria2 tasks
+ // 2: can add offline download tasks
// 3: can mkdir and upload
// 4: can rename
// 5: can move
@@ -37,10 +42,10 @@ type User struct {
// 7: can remove
// 8: webdav read
// 9: webdav write
- // 10: can add qbittorrent tasks
Permission int32 `json:"permission"`
OtpSecret string `json:"-"`
SsoID string `json:"sso_id"` // unique by sso platform
+ Authn string `gorm:"type:text" json:"-"`
}
func (u *User) IsGuest() bool {
@@ -68,6 +73,7 @@ func (u *User) ValidatePwdStaticHash(pwdStaticHash string) error {
func (u *User) SetPassword(pwd string) *User {
u.Salt = random.String(16)
u.PwdHash = TwoHashPwd(pwd, u.Salt)
+ u.PwdTS = time.Now().Unix()
return u
}
@@ -79,7 +85,7 @@ func (u *User) CanAccessWithoutPassword() bool {
return u.IsAdmin() || (u.Permission>>1)&1 == 1
}
-func (u *User) CanAddAria2Tasks() bool {
+func (u *User) CanAddOfflineDownloadTasks() bool {
return u.IsAdmin() || (u.Permission>>2)&1 == 1
}
@@ -111,10 +117,6 @@ func (u *User) CanWebdavManage() bool {
return u.IsAdmin() || (u.Permission>>9)&1 == 1
}
-func (u *User) CanAddQbittorrentTasks() bool {
- return u.IsAdmin() || (u.Permission>>10)&1 == 1
-}
-
func (u *User) JoinPath(reqPath string) (string, error) {
return utils.JoinBasePath(u.BasePath, reqPath)
}
@@ -130,3 +132,30 @@ func HashPwd(static string, salt string) string {
func TwoHashPwd(password string, salt string) string {
return HashPwd(StaticHash(password), salt)
}
+
+func (u *User) WebAuthnID() []byte {
+ bs := make([]byte, 8)
+ binary.LittleEndian.PutUint64(bs, uint64(u.ID))
+ return bs
+}
+
+func (u *User) WebAuthnName() string {
+ return u.Username
+}
+
+func (u *User) WebAuthnDisplayName() string {
+ return u.Username
+}
+
+func (u *User) WebAuthnCredentials() []webauthn.Credential {
+ var res []webauthn.Credential
+ err := json.Unmarshal([]byte(u.Authn), &res)
+ if err != nil {
+ fmt.Println(err)
+ }
+ return res
+}
+
+func (u *User) WebAuthnIcon() string {
+ return "https://alist.nn.ci/logo.svg"
+}
diff --git a/internal/net/request.go b/internal/net/request.go
old mode 100644
new mode 100755
index 087e44b2..b450ede5
--- a/internal/net/request.go
+++ b/internal/net/request.go
@@ -1,6 +1,7 @@
package net
import (
+ "bytes"
"context"
"fmt"
"io"
@@ -42,7 +43,7 @@ type Downloader struct {
//RequestParam HttpRequestParams
HttpClient HttpRequestFunc
}
-type HttpRequestFunc func(params *HttpRequestParams) (*http.Response, error)
+type HttpRequestFunc func(ctx context.Context, params *HttpRequestParams) (*http.Response, error)
func NewDownloader(options ...func(*Downloader)) *Downloader {
d := &Downloader{
@@ -130,7 +131,7 @@ func (d *downloader) download() (io.ReadCloser, error) {
}
if d.cfg.Concurrency == 1 {
- resp, err := d.cfg.HttpClient(d.params)
+ resp, err := d.cfg.HttpClient(d.ctx, d.params)
if err != nil {
return nil, err
}
@@ -202,7 +203,6 @@ func (d *downloader) downloadPart() {
//defer d.wg.Done()
for {
c, ok := <-d.chunkChannel
- log.Debugf("downloadPart tried to get chunk")
if !ok {
break
}
@@ -211,7 +211,7 @@ func (d *downloader) downloadPart() {
// of download producer.
continue
}
-
+ log.Debugf("downloadPart tried to get chunk")
if err := d.downloadChunk(&c); err != nil {
d.setErr(err)
}
@@ -220,7 +220,7 @@ func (d *downloader) downloadPart() {
// downloadChunk downloads the chunk
func (d *downloader) downloadChunk(ch *chunk) error {
- log.Debugf("start new chunk %+v buffer_id =%d", ch, ch.buf.buffer.id)
+ log.Debugf("start new chunk %+v buffer_id =%d", ch, ch.id)
var n int64
var err error
params := d.getParamsFromChunk(ch)
@@ -258,10 +258,11 @@ func (d *downloader) downloadChunk(ch *chunk) error {
func (d *downloader) tryDownloadChunk(params *HttpRequestParams, ch *chunk) (int64, error) {
- resp, err := d.cfg.HttpClient(params)
+ resp, err := d.cfg.HttpClient(d.ctx, params)
if err != nil {
return 0, err
}
+ defer resp.Body.Close()
//only check file size on the first task
if ch.id == 0 {
err = d.checkTotalBytes(resp)
@@ -279,7 +280,6 @@ func (d *downloader) tryDownloadChunk(params *HttpRequestParams, ch *chunk) (int
err = fmt.Errorf("chunk download size incorrect, expected=%d, got=%d", ch.size, n)
return n, &errReadingBody{err: err}
}
- defer resp.Body.Close()
return n, nil
}
@@ -371,10 +371,10 @@ type chunk struct {
//boundary http_range.Range
}
-func DefaultHttpRequestFunc(params *HttpRequestParams) (*http.Response, error) {
+func DefaultHttpRequestFunc(ctx context.Context, params *HttpRequestParams) (*http.Response, error) {
header := http_range.ApplyRangeToHttpHeader(params.Range, params.HeaderRef)
- res, err := RequestHttp("GET", header, params.URL)
+ res, err := RequestHttp(ctx, "GET", header, params.URL)
if err != nil {
return nil, err
}
@@ -402,13 +402,8 @@ func (e *errReadingBody) Unwrap() error {
}
type MultiReadCloser struct {
- io.ReadCloser
-
- //total int //total bufArr
- //wPos int //current reader wPos
cfg *cfg
closer closerFunc
- //getBuf getBufFunc
finish finishBufFUnc
}
@@ -449,99 +444,26 @@ func (mr MultiReadCloser) Close() error {
return mr.closer()
}
-type Buffer struct {
- data []byte
- wPos int //writer position
- id int
- rPos int //reader position
- lock sync.Mutex
-
- once bool //combined use with notify & lock, to get notify once
- notify chan int // notifies new writes
-}
-
-func (buf *Buffer) Write(p []byte) (n int, err error) {
- inSize := len(p)
- if inSize == 0 {
- return 0, nil
- }
-
- if inSize > len(buf.data)-buf.wPos {
- return 0, fmt.Errorf("exceeding buffer max size,inSize=%d ,buf.data.len=%d , buf.wPos=%d",
- inSize, len(buf.data), buf.wPos)
- }
- copy(buf.data[buf.wPos:], p)
- buf.wPos += inSize
-
- //give read a notice if once==true
- buf.lock.Lock()
- if buf.once == true {
- buf.notify <- inSize //struct{}{}
- }
- buf.once = false
- buf.lock.Unlock()
-
- return inSize, nil
-}
-
-func (buf *Buffer) getPos() (n int) {
- return buf.wPos
-}
-func (buf *Buffer) reset() {
- buf.wPos = 0
- buf.rPos = 0
-}
-
-// waitTillNewWrite notify caller that new write happens
-func (buf *Buffer) waitTillNewWrite(pos int) error {
- //log.Debugf("waitTillNewWrite, current wPos=%d", pos)
- var err error
-
- //defer buffer.lock.Unlock()
- if pos >= len(buf.data) {
- err = fmt.Errorf("there will not be any new write")
- } else if pos > buf.wPos {
- err = fmt.Errorf("illegal read position")
- } else if pos == buf.wPos {
- buf.lock.Lock()
- buf.once = true
- //buffer.wg1.Add(1)
- buf.lock.Unlock()
- //wait for write
- log.Debugf("waitTillNewWrite wait for notify")
- writes := <-buf.notify
- log.Debugf("waitTillNewWrite got new write from notify, last writes:%+v", writes)
- //if pos >= buf.wPos {
- // //wrote 0 bytes
- // return fmt.Errorf("write has error")
- //}
- return nil
- }
- //only case: wPos < buffer.wPos
- return err
-}
-
type Buf struct {
- buffer *Buffer // Buffer we read from
- size int //expected size
+ buffer *bytes.Buffer
+ size int //expected size
ctx context.Context
+ off int
+ rw sync.RWMutex
+ notify chan struct{}
}
// NewBuf is a buffer that can have 1 read & 1 write at the same time.
// when read is faster write, immediately feed data to read after written
func NewBuf(ctx context.Context, maxSize int, id int) *Buf {
- d := make([]byte, maxSize)
- buffer := &Buffer{data: d, id: id, notify: make(chan int)}
- buffer.reset()
- return &Buf{ctx: ctx, buffer: buffer, size: maxSize}
+ d := make([]byte, 0, maxSize)
+ return &Buf{ctx: ctx, buffer: bytes.NewBuffer(d), size: maxSize, notify: make(chan struct{})}
}
func (br *Buf) Reset(size int) {
- br.buffer.reset()
+ br.buffer.Reset()
br.size = size
-}
-func (br *Buf) GetId() int {
- return br.buffer.id
+ br.off = 0
}
func (br *Buf) Read(p []byte) (n int, err error) {
@@ -551,48 +473,49 @@ func (br *Buf) Read(p []byte) (n int, err error) {
if len(p) == 0 {
return 0, nil
}
- if br.buffer.rPos == br.size {
+ if br.off >= br.size {
return 0, io.EOF
}
- //persist buffer position as another thread is keep increasing it
- bufPos := br.buffer.getPos()
- outSize := bufPos - br.buffer.rPos
-
- if outSize == 0 {
- //var wg sync.WaitGroup
- err := br.waitTillNewWrite(br.buffer.rPos)
- if err != nil {
- return 0, err
- }
- bufPos = br.buffer.getPos()
- outSize = bufPos - br.buffer.rPos
+ br.rw.RLock()
+ n, err = br.buffer.Read(p)
+ br.rw.RUnlock()
+ if err == nil {
+ br.off += n
+ return n, err
}
-
- if len(p) < outSize {
- // p is not big enough
- outSize = len(p)
+ if err != io.EOF {
+ return n, err
}
- copy(p, br.buffer.data[br.buffer.rPos:br.buffer.rPos+outSize])
- br.buffer.rPos += outSize
- if br.buffer.rPos == br.size {
- err = io.EOF
+ if n != 0 {
+ br.off += n
+ return n, nil
+ }
+ // n==0, err==io.EOF
+ // wait for new write for 200ms
+ select {
+ case <-br.ctx.Done():
+ return 0, br.ctx.Err()
+ case <-br.notify:
+ return 0, nil
+ case <-time.After(time.Millisecond * 200):
+ return 0, nil
}
-
- return outSize, err
-}
-
-// waitTillNewWrite is expensive, since we just checked that no new data, wait 0.2s
-func (br *Buf) waitTillNewWrite(pos int) error {
- time.Sleep(200 * time.Millisecond)
- return br.buffer.waitTillNewWrite(br.buffer.rPos)
}
func (br *Buf) Write(p []byte) (n int, err error) {
if err := br.ctx.Err(); err != nil {
return 0, err
}
- return br.buffer.Write(p)
+ br.rw.Lock()
+ defer br.rw.Unlock()
+ n, err = br.buffer.Write(p)
+ select {
+ case br.notify <- struct{}{}:
+ default:
+ }
+ return
}
+
func (br *Buf) Close() {
- close(br.buffer.notify)
+ close(br.notify)
}
diff --git a/internal/net/request_test.go b/internal/net/request_test.go
old mode 100644
new mode 100755
index 39bfd82a..032b7376
--- a/internal/net/request_test.go
+++ b/internal/net/request_test.go
@@ -7,14 +7,14 @@ import (
"bytes"
"context"
"fmt"
- "github.com/alist-org/alist/v3/pkg/http_range"
- "github.com/sirupsen/logrus"
- "golang.org/x/exp/slices"
"io"
- "io/ioutil"
"net/http"
"sync"
"testing"
+
+ "github.com/alist-org/alist/v3/pkg/http_range"
+ "github.com/sirupsen/logrus"
+ "golang.org/x/exp/slices"
)
var buf22MB = make([]byte, 1024*1024*22)
@@ -55,7 +55,7 @@ func TestDownloadOrder(t *testing.T) {
if err != nil {
t.Fatalf("expect no error, got %v", err)
}
- resultBuf, err := io.ReadAll(*readCloser)
+ resultBuf, err := io.ReadAll(readCloser)
if err != nil {
t.Fatalf("expect no error, got %v", err)
}
@@ -111,7 +111,7 @@ func TestDownloadSingle(t *testing.T) {
if err != nil {
t.Fatalf("expect no error, got %v", err)
}
- resultBuf, err := io.ReadAll(*readCloser)
+ resultBuf, err := io.ReadAll(readCloser)
if err != nil {
t.Fatalf("expect no error, got %v", err)
}
@@ -142,7 +142,7 @@ type downloadCaptureClient struct {
lock sync.Mutex
}
-func (c *downloadCaptureClient) HttpRequest(params *HttpRequestParams) (*http.Response, error) {
+func (c *downloadCaptureClient) HttpRequest(ctx context.Context, params *HttpRequestParams) (*http.Response, error) {
c.lock.Lock()
defer c.lock.Unlock()
@@ -168,7 +168,7 @@ func newDownloadRangeClient(data []byte) (*downloadCaptureClient, *int, *[]strin
header := &http.Header{}
header.Set("Content-Range", fmt.Sprintf("bytes %d-%d/%d", start, fin-1, len(data)))
return &http.Response{
- Body: ioutil.NopCloser(bytes.NewReader(bodyBytes)),
+ Body: io.NopCloser(bytes.NewReader(bodyBytes)),
Header: *header,
ContentLength: int64(len(bodyBytes)),
}, nil
diff --git a/internal/net/serve.go b/internal/net/serve.go
old mode 100644
new mode 100755
index b2da536a..a0566780
--- a/internal/net/serve.go
+++ b/internal/net/serve.go
@@ -1,6 +1,7 @@
package net
import (
+ "context"
"fmt"
"io"
"mime"
@@ -110,7 +111,7 @@ func ServeHTTP(w http.ResponseWriter, r *http.Request, name string, modTime time
}
switch {
case len(ranges) == 0:
- reader, err := RangeReaderFunc(http_range.Range{Length: -1})
+ reader, err := RangeReaderFunc(context.Background(), http_range.Range{Length: -1})
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
@@ -129,7 +130,7 @@ func ServeHTTP(w http.ResponseWriter, r *http.Request, name string, modTime time
// does not request multiple parts might not support
// multipart responses."
ra := ranges[0]
- sendContent, err = RangeReaderFunc(ra)
+ sendContent, err = RangeReaderFunc(context.Background(), ra)
if err != nil {
http.Error(w, err.Error(), http.StatusRequestedRangeNotSatisfiable)
return
@@ -156,7 +157,7 @@ func ServeHTTP(w http.ResponseWriter, r *http.Request, name string, modTime time
pw.CloseWithError(err)
return
}
- reader, err := RangeReaderFunc(ra)
+ reader, err := RangeReaderFunc(context.Background(), ra)
if err != nil {
pw.CloseWithError(err)
return
@@ -209,29 +210,25 @@ func ProcessHeader(origin, override http.Header) http.Header {
}
// RequestHttp deal with Header properly then send the request
-func RequestHttp(httpMethod string, headerOverride http.Header, URL string) (*http.Response, error) {
- req, err := http.NewRequest(httpMethod, URL, nil)
+func RequestHttp(ctx context.Context, httpMethod string, headerOverride http.Header, URL string) (*http.Response, error) {
+ req, err := http.NewRequestWithContext(ctx, httpMethod, URL, nil)
if err != nil {
return nil, err
}
req.Header = headerOverride
- log.Debugln("request Header: ", req.Header)
- log.Debugln("request URL: ", URL)
res, err := HttpClient().Do(req)
if err != nil {
return nil, err
}
- log.Debugf("response status: %d", res.StatusCode)
- log.Debugln("response Header: ", res.Header)
// TODO clean header with blocklist or passlist
res.Header.Del("set-cookie")
if res.StatusCode >= 400 {
all, _ := io.ReadAll(res.Body)
+ _ = res.Body.Close()
msg := string(all)
log.Debugln(msg)
- return res, errors.New(msg)
+ return nil, fmt.Errorf("http request [%s] failure,status: %d response:%s", URL, res.StatusCode, msg)
}
-
return res, nil
}
diff --git a/internal/net/util.go b/internal/net/util.go
old mode 100644
new mode 100755
diff --git a/internal/offline_download/all.go b/internal/offline_download/all.go
new file mode 100755
index 00000000..2229a855
--- /dev/null
+++ b/internal/offline_download/all.go
@@ -0,0 +1,7 @@
+package offline_download
+
+import (
+ _ "github.com/alist-org/alist/v3/internal/offline_download/aria2"
+ _ "github.com/alist-org/alist/v3/internal/offline_download/http"
+ _ "github.com/alist-org/alist/v3/internal/offline_download/qbit"
+)
diff --git a/internal/offline_download/aria2/aria2.go b/internal/offline_download/aria2/aria2.go
new file mode 100755
index 00000000..ea6404a6
--- /dev/null
+++ b/internal/offline_download/aria2/aria2.go
@@ -0,0 +1,126 @@
+package aria2
+
+import (
+ "context"
+ "fmt"
+ "github.com/alist-org/alist/v3/internal/errs"
+ "strconv"
+ "time"
+
+ "github.com/alist-org/alist/v3/internal/conf"
+ "github.com/alist-org/alist/v3/internal/model"
+ "github.com/alist-org/alist/v3/internal/offline_download/tool"
+ "github.com/alist-org/alist/v3/internal/setting"
+ "github.com/alist-org/alist/v3/pkg/aria2/rpc"
+ "github.com/pkg/errors"
+ log "github.com/sirupsen/logrus"
+)
+
+var notify = NewNotify()
+
+type Aria2 struct {
+ client rpc.Client
+}
+
+func (a *Aria2) Run(task *tool.DownloadTask) error {
+ return errs.NotSupport
+}
+
+func (a *Aria2) Name() string {
+ return "aria2"
+}
+
+func (a *Aria2) Items() []model.SettingItem {
+ // aria2 settings
+ return []model.SettingItem{
+ {Key: conf.Aria2Uri, Value: "http://localhost:6800/jsonrpc", Type: conf.TypeString, Group: model.OFFLINE_DOWNLOAD, Flag: model.PRIVATE},
+ {Key: conf.Aria2Secret, Value: "", Type: conf.TypeString, Group: model.OFFLINE_DOWNLOAD, Flag: model.PRIVATE},
+ }
+}
+
+func (a *Aria2) Init() (string, error) {
+ a.client = nil
+ uri := setting.GetStr(conf.Aria2Uri)
+ secret := setting.GetStr(conf.Aria2Secret)
+ c, err := rpc.New(context.Background(), uri, secret, 4*time.Second, notify)
+ if err != nil {
+ return "", errors.Wrap(err, "failed to init aria2 client")
+ }
+ version, err := c.GetVersion()
+ if err != nil {
+ return "", errors.Wrapf(err, "failed get aria2 version")
+ }
+ a.client = c
+ log.Infof("using aria2 version: %s", version.Version)
+ return fmt.Sprintf("aria2 version: %s", version.Version), nil
+}
+
+func (a *Aria2) IsReady() bool {
+ return a.client != nil
+}
+
+func (a *Aria2) AddURL(args *tool.AddUrlArgs) (string, error) {
+ options := map[string]interface{}{
+ "dir": args.TempDir,
+ }
+ gid, err := a.client.AddURI([]string{args.Url}, options)
+ if err != nil {
+ return "", err
+ }
+ notify.Signals.Store(gid, args.Signal)
+ return gid, nil
+}
+
+func (a *Aria2) Remove(task *tool.DownloadTask) error {
+ _, err := a.client.Remove(task.GID)
+ return err
+}
+
+func (a *Aria2) Status(task *tool.DownloadTask) (*tool.Status, error) {
+ info, err := a.client.TellStatus(task.GID)
+ if err != nil {
+ return nil, err
+ }
+ total, err := strconv.ParseUint(info.TotalLength, 10, 64)
+ if err != nil {
+ total = 0
+ }
+ downloaded, err := strconv.ParseUint(info.CompletedLength, 10, 64)
+ if err != nil {
+ downloaded = 0
+ }
+ s := &tool.Status{
+ Completed: info.Status == "complete",
+ Err: err,
+ }
+ s.Progress = float64(downloaded) / float64(total) * 100
+ if len(info.FollowedBy) != 0 {
+ s.NewGID = info.FollowedBy[0]
+ notify.Signals.Delete(task.GID)
+ notify.Signals.Store(s.NewGID, task.Signal)
+ }
+ switch info.Status {
+ case "complete":
+ s.Completed = true
+ case "error":
+ s.Err = errors.Errorf("failed to download %s, error: %s", task.GID, info.ErrorMessage)
+ case "active":
+ s.Status = "aria2: " + info.Status
+ if info.Seeder == "true" {
+ s.Completed = true
+ }
+ case "waiting", "paused":
+ s.Status = "aria2: " + info.Status
+ case "removed":
+ s.Err = errors.Errorf("failed to download %s, removed", task.GID)
+ default:
+ return nil, errors.Errorf("[aria2] unknown status %s", info.Status)
+ }
+ return s, nil
+}
+
+var _ tool.Tool = (*Aria2)(nil)
+
+func init() {
+ tool.Tools.Add(&Aria2{})
+}
diff --git a/internal/aria2/notify.go b/internal/offline_download/aria2/notify.go
old mode 100644
new mode 100755
similarity index 100%
rename from internal/aria2/notify.go
rename to internal/offline_download/aria2/notify.go
diff --git a/internal/offline_download/http/client.go b/internal/offline_download/http/client.go
new file mode 100755
index 00000000..0db05f35
--- /dev/null
+++ b/internal/offline_download/http/client.go
@@ -0,0 +1,85 @@
+package http
+
+import (
+ "fmt"
+ "github.com/alist-org/alist/v3/internal/model"
+ "github.com/alist-org/alist/v3/internal/offline_download/tool"
+ "github.com/alist-org/alist/v3/pkg/utils"
+ "net/http"
+ "net/url"
+ "os"
+ "path"
+ "path/filepath"
+)
+
+type SimpleHttp struct {
+ client http.Client
+}
+
+func (s SimpleHttp) Name() string {
+ return "SimpleHttp"
+}
+
+func (s SimpleHttp) Items() []model.SettingItem {
+ return nil
+}
+
+func (s SimpleHttp) Init() (string, error) {
+ return "ok", nil
+}
+
+func (s SimpleHttp) IsReady() bool {
+ return true
+}
+
+func (s SimpleHttp) AddURL(args *tool.AddUrlArgs) (string, error) {
+ panic("should not be called")
+}
+
+func (s SimpleHttp) Remove(task *tool.DownloadTask) error {
+ panic("should not be called")
+}
+
+func (s SimpleHttp) Status(task *tool.DownloadTask) (*tool.Status, error) {
+ panic("should not be called")
+}
+
+func (s SimpleHttp) Run(task *tool.DownloadTask) error {
+ u := task.Url
+ // parse url
+ _u, err := url.Parse(u)
+ if err != nil {
+ return err
+ }
+ req, err := http.NewRequestWithContext(task.Ctx(), http.MethodGet, u, nil)
+ if err != nil {
+ return err
+ }
+ resp, err := s.client.Do(req)
+ if err != nil {
+ return err
+ }
+ defer resp.Body.Close()
+ if resp.StatusCode >= 400 {
+ return fmt.Errorf("http status code %d", resp.StatusCode)
+ }
+ filename := path.Base(_u.Path)
+ if n, err := parseFilenameFromContentDisposition(resp.Header.Get("Content-Disposition")); err == nil {
+ filename = n
+ }
+ // save to temp dir
+ _ = os.MkdirAll(task.TempDir, os.ModePerm)
+ filePath := filepath.Join(task.TempDir, filename)
+ file, err := os.Create(filePath)
+ if err != nil {
+ return err
+ }
+ defer file.Close()
+ fileSize := resp.ContentLength
+ err = utils.CopyWithCtx(task.Ctx(), file, resp.Body, fileSize, task.SetProgress)
+ return err
+}
+
+func init() {
+ tool.Tools.Add(&SimpleHttp{})
+}
diff --git a/internal/offline_download/http/util.go b/internal/offline_download/http/util.go
new file mode 100755
index 00000000..eefefec2
--- /dev/null
+++ b/internal/offline_download/http/util.go
@@ -0,0 +1,21 @@
+package http
+
+import (
+ "fmt"
+ "mime"
+)
+
+func parseFilenameFromContentDisposition(contentDisposition string) (string, error) {
+ if contentDisposition == "" {
+ return "", fmt.Errorf("Content-Disposition is empty")
+ }
+ _, params, err := mime.ParseMediaType(contentDisposition)
+ if err != nil {
+ return "", err
+ }
+ filename := params["filename"]
+ if filename == "" {
+ return "", fmt.Errorf("filename not found in Content-Disposition: [%s]", contentDisposition)
+ }
+ return filename, nil
+}
diff --git a/internal/offline_download/qbit/qbit.go b/internal/offline_download/qbit/qbit.go
new file mode 100755
index 00000000..c2e92d2d
--- /dev/null
+++ b/internal/offline_download/qbit/qbit.go
@@ -0,0 +1,85 @@
+package qbit
+
+import (
+ "github.com/alist-org/alist/v3/internal/conf"
+ "github.com/alist-org/alist/v3/internal/errs"
+ "github.com/alist-org/alist/v3/internal/model"
+ "github.com/alist-org/alist/v3/internal/offline_download/tool"
+ "github.com/alist-org/alist/v3/internal/setting"
+ "github.com/alist-org/alist/v3/pkg/qbittorrent"
+ "github.com/pkg/errors"
+)
+
+type QBittorrent struct {
+ client qbittorrent.Client
+}
+
+func (a *QBittorrent) Run(task *tool.DownloadTask) error {
+ return errs.NotSupport
+}
+
+func (a *QBittorrent) Name() string {
+ return "qBittorrent"
+}
+
+func (a *QBittorrent) Items() []model.SettingItem {
+ // qBittorrent settings
+ return []model.SettingItem{
+ {Key: conf.QbittorrentUrl, Value: "http://admin:adminadmin@localhost:8080/", Type: conf.TypeString, Group: model.OFFLINE_DOWNLOAD, Flag: model.PRIVATE},
+ {Key: conf.QbittorrentSeedtime, Value: "0", Type: conf.TypeNumber, Group: model.OFFLINE_DOWNLOAD, Flag: model.PRIVATE},
+ }
+}
+
+func (a *QBittorrent) Init() (string, error) {
+ a.client = nil
+ url := setting.GetStr(conf.QbittorrentUrl)
+ qbClient, err := qbittorrent.New(url)
+ if err != nil {
+ return "", err
+ }
+ a.client = qbClient
+ return "ok", nil
+}
+
+func (a *QBittorrent) IsReady() bool {
+ return a.client != nil
+}
+
+func (a *QBittorrent) AddURL(args *tool.AddUrlArgs) (string, error) {
+ err := a.client.AddFromLink(args.Url, args.TempDir, args.UID)
+ if err != nil {
+ return "", err
+ }
+ return args.UID, nil
+}
+
+func (a *QBittorrent) Remove(task *tool.DownloadTask) error {
+ err := a.client.Delete(task.GID, true)
+ return err
+}
+
+func (a *QBittorrent) Status(task *tool.DownloadTask) (*tool.Status, error) {
+ info, err := a.client.GetInfo(task.GID)
+ if err != nil {
+ return nil, err
+ }
+ s := &tool.Status{}
+ s.Progress = float64(info.Completed) / float64(info.Size) * 100
+ switch info.State {
+ case qbittorrent.UPLOADING, qbittorrent.PAUSEDUP, qbittorrent.QUEUEDUP, qbittorrent.STALLEDUP, qbittorrent.FORCEDUP, qbittorrent.CHECKINGUP:
+ s.Completed = true
+ case qbittorrent.ALLOCATING, qbittorrent.DOWNLOADING, qbittorrent.METADL, qbittorrent.PAUSEDDL, qbittorrent.QUEUEDDL, qbittorrent.STALLEDDL, qbittorrent.CHECKINGDL, qbittorrent.FORCEDDL, qbittorrent.CHECKINGRESUMEDATA, qbittorrent.MOVING:
+ s.Status = "[qBittorrent] downloading"
+ case qbittorrent.ERROR, qbittorrent.MISSINGFILES, qbittorrent.UNKNOWN:
+ s.Err = errors.Errorf("[qBittorrent] failed to download %s, error: %s", task.GID, info.State)
+ default:
+ s.Err = errors.Errorf("[qBittorrent] unknown error occurred downloading %s", task.GID)
+ }
+ return s, nil
+}
+
+var _ tool.Tool = (*QBittorrent)(nil)
+
+func init() {
+ tool.Tools.Add(&QBittorrent{})
+}
diff --git a/internal/offline_download/tool/add.go b/internal/offline_download/tool/add.go
new file mode 100755
index 00000000..3da05c8d
--- /dev/null
+++ b/internal/offline_download/tool/add.go
@@ -0,0 +1,76 @@
+package tool
+
+import (
+ "context"
+ "github.com/alist-org/alist/v3/internal/conf"
+ "github.com/alist-org/alist/v3/internal/errs"
+ "github.com/alist-org/alist/v3/internal/op"
+ "github.com/google/uuid"
+ "github.com/pkg/errors"
+ "github.com/xhofe/tache"
+ "path/filepath"
+)
+
+type DeletePolicy string
+
+const (
+ DeleteOnUploadSucceed DeletePolicy = "delete_on_upload_succeed"
+ DeleteOnUploadFailed DeletePolicy = "delete_on_upload_failed"
+ DeleteNever DeletePolicy = "delete_never"
+ DeleteAlways DeletePolicy = "delete_always"
+)
+
+type AddURLArgs struct {
+ URL string
+ DstDirPath string
+ Tool string
+ DeletePolicy DeletePolicy
+}
+
+func AddURL(ctx context.Context, args *AddURLArgs) (tache.TaskWithInfo, error) {
+ // get tool
+ tool, err := Tools.Get(args.Tool)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed get tool")
+ }
+ // check tool is ready
+ if !tool.IsReady() {
+ // try to init tool
+ if _, err := tool.Init(); err != nil {
+ return nil, errors.Wrapf(err, "failed init tool %s", args.Tool)
+ }
+ }
+ // check storage
+ storage, dstDirActualPath, err := op.GetStorageAndActualPath(args.DstDirPath)
+ if err != nil {
+ return nil, errors.WithMessage(err, "failed get storage")
+ }
+ // check is it could upload
+ if storage.Config().NoUpload {
+ return nil, errors.WithStack(errs.UploadNotSupported)
+ }
+ // check path is valid
+ obj, err := op.Get(ctx, storage, dstDirActualPath)
+ if err != nil {
+ if !errs.IsObjectNotFound(err) {
+ return nil, errors.WithMessage(err, "failed get object")
+ }
+ } else {
+ if !obj.IsDir() {
+ // can't add to a file
+ return nil, errors.WithStack(errs.NotFolder)
+ }
+ }
+
+ uid := uuid.NewString()
+ tempDir := filepath.Join(conf.Conf.TempDir, args.Tool, uid)
+ t := &DownloadTask{
+ Url: args.URL,
+ DstDirPath: args.DstDirPath,
+ TempDir: tempDir,
+ DeletePolicy: args.DeletePolicy,
+ tool: tool,
+ }
+ DownloadTaskManager.Add(t)
+ return t, nil
+}
diff --git a/internal/offline_download/tool/all_test.go b/internal/offline_download/tool/all_test.go
new file mode 100755
index 00000000..27da5e32
--- /dev/null
+++ b/internal/offline_download/tool/all_test.go
@@ -0,0 +1,17 @@
+package tool_test
+
+import (
+ "testing"
+
+ "github.com/alist-org/alist/v3/internal/offline_download/tool"
+)
+
+func TestGetFiles(t *testing.T) {
+ files, err := tool.GetFiles("..")
+ if err != nil {
+ t.Fatal(err)
+ }
+ for _, file := range files {
+ t.Log(file.Name, file.Size, file.Path, file.Modified)
+ }
+}
diff --git a/internal/offline_download/tool/base.go b/internal/offline_download/tool/base.go
new file mode 100755
index 00000000..3b9fb07a
--- /dev/null
+++ b/internal/offline_download/tool/base.go
@@ -0,0 +1,66 @@
+package tool
+
+import (
+ "io"
+ "os"
+ "time"
+
+ "github.com/alist-org/alist/v3/internal/model"
+)
+
+type AddUrlArgs struct {
+ Url string
+ UID string
+ TempDir string
+ Signal chan int
+}
+
+type Status struct {
+ Progress float64
+ NewGID string
+ Completed bool
+ Status string
+ Err error
+}
+
+type Tool interface {
+ Name() string
+ // Items return the setting items the tool need
+ Items() []model.SettingItem
+ Init() (string, error)
+ IsReady() bool
+ // AddURL add an uri to download, return the task id
+ AddURL(args *AddUrlArgs) (string, error)
+ // Remove the download if task been canceled
+ Remove(task *DownloadTask) error
+ // Status return the status of the download task, if an error occurred, return the error in Status.Err
+ Status(task *DownloadTask) (*Status, error)
+
+ // Run for simple http download
+ Run(task *DownloadTask) error
+}
+
+type GetFileser interface {
+ // GetFiles return the files of the download task, if nil, means walk the temp dir to get the files
+ GetFiles(task *DownloadTask) []File
+}
+
+type File struct {
+ // ReadCloser for http client
+ ReadCloser io.ReadCloser
+ Name string
+ Size int64
+ Path string
+ Modified time.Time
+}
+
+func (f *File) GetReadCloser() (io.ReadCloser, error) {
+ if f.ReadCloser != nil {
+ return f.ReadCloser, nil
+ }
+ file, err := os.Open(f.Path)
+ if err != nil {
+ return nil, err
+ }
+ return file, nil
+}
diff --git a/internal/offline_download/tool/download.go b/internal/offline_download/tool/download.go
new file mode 100755
index 00000000..0e1a9ca8
--- /dev/null
+++ b/internal/offline_download/tool/download.go
@@ -0,0 +1,142 @@
+package tool
+
+import (
+ "fmt"
+ "github.com/alist-org/alist/v3/internal/errs"
+ "github.com/pkg/errors"
+ log "github.com/sirupsen/logrus"
+ "github.com/xhofe/tache"
+ "time"
+)
+
+type DownloadTask struct {
+ tache.Base
+ Url string `json:"url"`
+ DstDirPath string `json:"dst_dir_path"`
+ TempDir string `json:"temp_dir"`
+ DeletePolicy DeletePolicy `json:"delete_policy"`
+
+ Status string `json:"status"`
+ Signal chan int `json:"-"`
+ GID string `json:"-"`
+ tool Tool
+ callStatusRetried int
+}
+
+func (t *DownloadTask) Run() error {
+ if err := t.tool.Run(t); !errs.IsNotSupportError(err) {
+ if err == nil {
+ return t.Complete()
+ }
+ return err
+ }
+ t.Signal = make(chan int)
+ defer func() {
+ t.Signal = nil
+ }()
+ gid, err := t.tool.AddURL(&AddUrlArgs{
+ Url: t.Url,
+ UID: t.ID,
+ TempDir: t.TempDir,
+ Signal: t.Signal,
+ })
+ if err != nil {
+ return err
+ }
+ t.GID = gid
+ var (
+ ok bool
+ )
+outer:
+ for {
+ select {
+ case <-t.CtxDone():
+ err := t.tool.Remove(t)
+ return err
+ case <-t.Signal:
+ ok, err = t.Update()
+ if ok {
+ break outer
+ }
+ case <-time.After(time.Second * 3):
+ ok, err = t.Update()
+ if ok {
+ break outer
+ }
+ }
+ }
+ if err != nil {
+ return err
+ }
+ t.Status = "offline download completed, maybe transferring"
+ return nil
+}
+
+// Update download status, return true if download completed
+func (t *DownloadTask) Update() (bool, error) {
+ info, err := t.tool.Status(t)
+ if err != nil {
+ t.callStatusRetried++
+ log.Errorf("failed to get status of %s, retried %d times", t.ID, t.callStatusRetried)
+ return false, nil
+ }
+ if t.callStatusRetried > 5 {
+ return true, errors.Errorf("failed to get status of %s, retried %d times", t.ID, t.callStatusRetried)
+ }
+ t.callStatusRetried = 0
+ t.SetProgress(info.Progress)
+ t.Status = fmt.Sprintf("[%s]: %s", t.tool.Name(), info.Status)
+ if info.NewGID != "" {
+ log.Debugf("followen by: %+v", info.NewGID)
+ t.GID = info.NewGID
+ return false, nil
+ }
+ // if download completed
+ if info.Completed {
+ err := t.Complete()
+ return true, errors.WithMessage(err, "failed to transfer file")
+ }
+ // if download failed
+ if info.Err != nil {
+ return true, errors.Errorf("failed to download %s, error: %s", t.ID, info.Err.Error())
+ }
+ return false, nil
+}
+
+func (t *DownloadTask) Complete() error {
+ var (
+ files []File
+ err error
+ )
+ if getFileser, ok := t.tool.(GetFileser); ok {
+ files = getFileser.GetFiles(t)
+ } else {
+ files, err = GetFiles(t.TempDir)
+ if err != nil {
+ return errors.Wrapf(err, "failed to get files")
+ }
+ }
+ // upload files
+ for i, _ := range files {
+ file := files[i]
+ TransferTaskManager.Add(&TransferTask{
+ file: file,
+ dstDirPath: t.DstDirPath,
+ tempDir: t.TempDir,
+ deletePolicy: t.DeletePolicy,
+ })
+ }
+ return nil
+}
+
+func (t *DownloadTask) GetName() string {
+ return fmt.Sprintf("download %s to (%s)", t.Url, t.DstDirPath)
+}
+
+func (t *DownloadTask) GetStatus() string {
+ return t.Status
+}
+
+var (
+ DownloadTaskManager *tache.Manager[*DownloadTask]
+)
diff --git a/internal/offline_download/tool/tools.go b/internal/offline_download/tool/tools.go
new file mode 100755
index 00000000..9de7d526
--- /dev/null
+++ b/internal/offline_download/tool/tools.go
@@ -0,0 +1,39 @@
+package tool
+
+import (
+ "fmt"
+ "github.com/alist-org/alist/v3/internal/model"
+)
+
+var (
+ Tools = make(ToolsManager)
+)
+
+type ToolsManager map[string]Tool
+
+func (t ToolsManager) Get(name string) (Tool, error) {
+ if tool, ok := t[name]; ok {
+ return tool, nil
+ }
+ return nil, fmt.Errorf("tool %s not found", name)
+}
+
+func (t ToolsManager) Add(tool Tool) {
+ t[tool.Name()] = tool
+}
+
+func (t ToolsManager) Names() []string {
+ names := make([]string, 0, len(t))
+ for name := range t {
+ names = append(names, name)
+ }
+ return names
+}
+
+func (t ToolsManager) Items() []model.SettingItem {
+ var items []model.SettingItem
+ for _, tool := range t {
+ items = append(items, tool.Items()...)
+ }
+ return items
+}
diff --git a/internal/offline_download/tool/transfer.go b/internal/offline_download/tool/transfer.go
new file mode 100755
index 00000000..0ef58df5
--- /dev/null
+++ b/internal/offline_download/tool/transfer.go
@@ -0,0 +1,83 @@
+package tool
+
+import (
+ "fmt"
+ "github.com/alist-org/alist/v3/internal/model"
+ "github.com/alist-org/alist/v3/internal/op"
+ "github.com/alist-org/alist/v3/internal/stream"
+ "github.com/alist-org/alist/v3/pkg/utils"
+ "github.com/pkg/errors"
+ log "github.com/sirupsen/logrus"
+ "github.com/xhofe/tache"
+ "os"
+ "path/filepath"
+)
+
+type TransferTask struct {
+ tache.Base
+ file File
+ dstDirPath string
+ tempDir string
+ deletePolicy DeletePolicy
+}
+
+func (t *TransferTask) Run() error {
+ // check dstDir again
+ storage, dstDirActualPath, err := op.GetStorageAndActualPath(t.dstDirPath)
+ if err != nil {
+ return errors.WithMessage(err, "failed get storage")
+ }
+ mimetype := utils.GetMimeType(t.file.Path)
+ rc, err := t.file.GetReadCloser()
+ if err != nil {
+ return errors.Wrapf(err, "failed to open file %s", t.file.Path)
+ }
+ s := &stream.FileStream{
+ Ctx: nil,
+ Obj: &model.Object{
+ Name: filepath.Base(t.file.Path),
+ Size: t.file.Size,
+ Modified: t.file.Modified,
+ IsFolder: false,
+ },
+ Reader: rc,
+ Mimetype: mimetype,
+ Closers: utils.NewClosers(rc),
+ }
+ relDir, err := filepath.Rel(t.tempDir, filepath.Dir(t.file.Path))
+ if err != nil {
+ log.Errorf("find relation directory error: %v", err)
+ }
+ newDistDir := filepath.Join(dstDirActualPath, relDir)
+ return op.Put(t.Ctx(), storage, newDistDir, s, t.SetProgress)
+}
+
+func (t *TransferTask) GetName() string {
+ return fmt.Sprintf("transfer %s to [%s]", t.file.Path, t.dstDirPath)
+}
+
+func (t *TransferTask) GetStatus() string {
+ return "transferring"
+}
+
+func (t *TransferTask) OnSucceeded() {
+ if t.deletePolicy == DeleteOnUploadSucceed || t.deletePolicy == DeleteAlways {
+ err := os.Remove(t.file.Path)
+ if err != nil {
+ log.Errorf("failed to delete file %s, error: %s", t.file.Path, err.Error())
+ }
+ }
+}
+
+func (t *TransferTask) OnFailed() {
+ if t.deletePolicy == DeleteOnUploadFailed || t.deletePolicy == DeleteAlways {
+ err := os.Remove(t.file.Path)
+ if err != nil {
+ log.Errorf("failed to delete file %s, error: %s", t.file.Path, err.Error())
+ }
+ }
+}
+
+var (
+ TransferTaskManager *tache.Manager[*TransferTask]
+)
diff --git a/internal/offline_download/tool/util.go b/internal/offline_download/tool/util.go
new file mode 100755
index 00000000..4258eff6
--- /dev/null
+++ b/internal/offline_download/tool/util.go
@@ -0,0 +1,28 @@
+package tool
+
+import (
+ "os"
+ "path/filepath"
+)
+
+func GetFiles(dir string) ([]File, error) {
+ var files []File
+ err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
+ if err != nil {
+ return err
+ }
+ if !info.IsDir() {
+ files = append(files, File{
+ Name: info.Name(),
+ Size: info.Size(),
+ Path: path,
+ Modified: info.ModTime(),
+ })
+ }
+ return nil
+ })
+ if err != nil {
+ return nil, err
+ }
+ return files, nil
+}
diff --git a/internal/op/const.go b/internal/op/const.go
old mode 100644
new mode 100755
diff --git a/internal/op/driver.go b/internal/op/driver.go
old mode 100644
new mode 100755
diff --git a/internal/op/driver_test.go b/internal/op/driver_test.go
old mode 100644
new mode 100755
diff --git a/internal/op/fs.go b/internal/op/fs.go
old mode 100644
new mode 100755
index 15384405..9fe7d5e6
--- a/internal/op/fs.go
+++ b/internal/op/fs.go
@@ -2,7 +2,6 @@ package op
import (
"context"
- "os"
stdpath "path"
"time"
@@ -85,6 +84,14 @@ func addCacheObj(storage driver.Driver, path string, newObj model.Obj) {
}
func ClearCache(storage driver.Driver, path string) {
+ objs, ok := listCache.Get(Key(storage, path))
+ if ok {
+ for _, obj := range objs {
+ if obj.IsDir() {
+ ClearCache(storage, stdpath.Join(path, obj.GetName()))
+ }
+ }
+ }
listCache.Del(Key(storage, path))
}
@@ -474,6 +481,10 @@ func Remove(ctx context.Context, storage driver.Driver, path string) error {
err = s.Remove(ctx, model.UnwrapObj(rawObj))
if err == nil {
delCacheObj(storage, dirPath, rawObj)
+ // clear folder cache recursively
+ if rawObj.IsDir() {
+ ClearCache(storage, path)
+ }
}
default:
return errs.NotImplement
@@ -481,18 +492,10 @@ func Remove(ctx context.Context, storage driver.Driver, path string) error {
return errors.WithStack(err)
}
-func Put(ctx context.Context, storage driver.Driver, dstDirPath string, file *model.FileStream, up driver.UpdateProgress, lazyCache ...bool) error {
+func Put(ctx context.Context, storage driver.Driver, dstDirPath string, file model.FileStreamer, up driver.UpdateProgress, lazyCache ...bool) error {
if storage.Config().CheckStatus && storage.GetStorage().Status != WORK {
return errors.Errorf("storage not init: %s", storage.GetStorage().Status)
}
- defer func() {
- if f, ok := file.GetReadCloser().(*os.File); ok {
- err := os.RemoveAll(f.Name())
- if err != nil {
- log.Errorf("failed to remove file [%s]", f.Name())
- }
- }
- }()
defer func() {
if err := file.Close(); err != nil {
log.Errorf("failed to close file streamer, %v", err)
@@ -508,7 +511,7 @@ func Put(ctx context.Context, storage driver.Driver, dstDirPath string, file *mo
if fi.GetSize() == 0 {
err = Remove(ctx, storage, dstPath)
if err != nil {
- return errors.WithMessagef(err, "failed remove file that exist and have size 0")
+ return errors.WithMessagef(err, "while uploading, failed remove existing file which size = 0")
}
} else if storage.Config().NoOverwriteUpload {
// try to rename old obj
@@ -517,7 +520,7 @@ func Put(ctx context.Context, storage driver.Driver, dstDirPath string, file *mo
return err
}
} else {
- file.Old = fi
+ file.SetExist(fi)
}
}
err = MakeDir(ctx, storage, dstDirPath)
@@ -531,7 +534,7 @@ func Put(ctx context.Context, storage driver.Driver, dstDirPath string, file *mo
}
// if up is nil, set a default to prevent panic
if up == nil {
- up = func(p int) {}
+ up = func(p float64) {}
}
switch s := storage.(type) {
diff --git a/internal/op/hook.go b/internal/op/hook.go
old mode 100644
new mode 100755
index e37e52df..23b8e59a
--- a/internal/op/hook.go
+++ b/internal/op/hook.go
@@ -78,6 +78,10 @@ var settingItemHooks = map[string]SettingItemHook{
log.Debugf("filename char mapping: %+v", conf.FilenameCharMap)
return nil
},
+ conf.IgnoreDirectLinkParams: func(item *model.SettingItem) error {
+ conf.SlicesMap[conf.IgnoreDirectLinkParams] = strings.Split(item.Value, ",")
+ return nil
+ },
}
func RegisterSettingItemHook(key string, hook SettingItemHook) {
diff --git a/internal/op/meta.go b/internal/op/meta.go
old mode 100644
new mode 100755
diff --git a/internal/op/path.go b/internal/op/path.go
old mode 100644
new mode 100755
diff --git a/internal/op/setting.go b/internal/op/setting.go
old mode 100644
new mode 100755
diff --git a/internal/op/storage.go b/internal/op/storage.go
old mode 100644
new mode 100755
diff --git a/internal/op/storage_test.go b/internal/op/storage_test.go
old mode 100644
new mode 100755
diff --git a/internal/op/token.go b/internal/op/token.go
old mode 100644
new mode 100755
diff --git a/internal/op/user.go b/internal/op/user.go
old mode 100644
new mode 100755
diff --git a/internal/qbittorrent/add.go b/internal/qbittorrent/add.go
deleted file mode 100644
index f552a9ec..00000000
--- a/internal/qbittorrent/add.go
+++ /dev/null
@@ -1,60 +0,0 @@
-package qbittorrent
-
-import (
- "context"
- "fmt"
- "path/filepath"
-
- "github.com/alist-org/alist/v3/internal/conf"
- "github.com/alist-org/alist/v3/internal/errs"
- "github.com/alist-org/alist/v3/internal/op"
- "github.com/alist-org/alist/v3/internal/setting"
- "github.com/alist-org/alist/v3/pkg/task"
- "github.com/google/uuid"
- "github.com/pkg/errors"
-)
-
-func AddURL(ctx context.Context, url string, dstDirPath string) error {
- // check storage
- storage, dstDirActualPath, err := op.GetStorageAndActualPath(dstDirPath)
- if err != nil {
- return errors.WithMessage(err, "failed get storage")
- }
- // check is it could upload
- if storage.Config().NoUpload {
- return errors.WithStack(errs.UploadNotSupported)
- }
- // check path is valid
- obj, err := op.Get(ctx, storage, dstDirActualPath)
- if err != nil {
- if !errs.IsObjectNotFound(err) {
- return errors.WithMessage(err, "failed get object")
- }
- } else {
- if !obj.IsDir() {
- // can't add to a file
- return errors.WithStack(errs.NotFolder)
- }
- }
- // call qbittorrent
- id := uuid.NewString()
- tempDir := filepath.Join(conf.Conf.TempDir, "qbittorrent", id)
- err = qbclient.AddFromLink(url, tempDir, id)
- if err != nil {
- return errors.Wrapf(err, "failed to add url %s", url)
- }
- DownTaskManager.Submit(task.WithCancelCtx(&task.Task[string]{
- ID: id,
- Name: fmt.Sprintf("download %s to [%s](%s)", url, storage.GetStorage().MountPath, dstDirActualPath),
- Func: func(tsk *task.Task[string]) error {
- m := &Monitor{
- tsk: tsk,
- tempDir: tempDir,
- dstDirPath: dstDirPath,
- seedtime: setting.GetInt(conf.QbittorrentSeedtime, 0),
- }
- return m.Loop()
- },
- }))
- return nil
-}
diff --git a/internal/qbittorrent/client_test.go b/internal/qbittorrent/client_test.go
deleted file mode 100644
index 21f1dc41..00000000
--- a/internal/qbittorrent/client_test.go
+++ /dev/null
@@ -1,154 +0,0 @@
-package qbittorrent
-
-import (
- "net/http"
- "net/http/cookiejar"
- "net/url"
- "testing"
-)
-
-func TestLogin(t *testing.T) {
- // test logging in with wrong password
- u, err := url.Parse("http://admin:admin@127.0.0.1:8080/")
- if err != nil {
- t.Error(err)
- }
- jar, err := cookiejar.New(nil)
- if err != nil {
- t.Error(err)
- }
- var c = &client{
- url: u,
- client: http.Client{Jar: jar},
- }
- err = c.login()
- if err == nil {
- t.Error(err)
- }
-
- // test logging in with correct password
- u, err = url.Parse("http://admin:adminadmin@127.0.0.1:8080/")
- if err != nil {
- t.Error(err)
- }
- c.url = u
- err = c.login()
- if err != nil {
- t.Error(err)
- }
-}
-
-// in this test, the `Bypass authentication for clients on localhost` option in qBittorrent webui should be disabled
-func TestAuthorized(t *testing.T) {
- // init client
- u, err := url.Parse("http://admin:adminadmin@127.0.0.1:8080/")
- if err != nil {
- t.Error(err)
- }
- jar, err := cookiejar.New(nil)
- if err != nil {
- t.Error(err)
- }
- var c = &client{
- url: u,
- client: http.Client{Jar: jar},
- }
-
- // test without logging in, which should be unauthorized
- authorized := c.authorized()
- if authorized {
- t.Error("Should not be authorized")
- }
-
- // test after logging in
- err = c.login()
- if err != nil {
- t.Error(err)
- }
- authorized = c.authorized()
- if !authorized {
- t.Error("Should be authorized")
- }
-}
-
-func TestNew(t *testing.T) {
- _, err := New("http://admin:adminadmin@127.0.0.1:8080/")
- if err != nil {
- t.Error(err)
- }
- _, err = New("http://admin:wrong_password@127.0.0.1:8080/")
- if err == nil {
- t.Error("Should get an error")
- }
-}
-
-func TestAdd(t *testing.T) {
- // init client
- c, err := New("http://admin:adminadmin@127.0.0.1:8080/")
- if err != nil {
- t.Error(err)
- }
- err = c.AddFromLink(
- "https://releases.ubuntu.com/22.04/ubuntu-22.04.1-desktop-amd64.iso.torrent",
- "D:\\qBittorrentDownload\\alist",
- "uuid-1",
- )
- if err != nil {
- t.Error(err)
- }
- err = c.AddFromLink(
- "magnet:?xt=urn:btih:375ae3280cd80a8e9d7212e11dfaf7c45069dd35&dn=archlinux-2023.02.01-x86_64.iso",
- "D:\\qBittorrentDownload\\alist",
- "uuid-2",
- )
- if err != nil {
- t.Error(err)
- }
-}
-
-func TestGetInfo(t *testing.T) {
- // init client
- c, err := New("http://admin:adminadmin@127.0.0.1:8080/")
- if err != nil {
- t.Error(err)
- }
- _, err = c.GetInfo("uuid-1")
- if err != nil {
- t.Error(err)
- }
-}
-
-func TestGetFiles(t *testing.T) {
- // init client
- c, err := New("http://admin:adminadmin@127.0.0.1:8080/")
- if err != nil {
- t.Error(err)
- }
- files, err := c.GetFiles("uuid-1")
- if err != nil {
- t.Error(err)
- }
- if len(files) != 1 {
- t.Error("should have exactly one file")
- }
-}
-
-func TestDelete(t *testing.T) {
- // init client
- c, err := New("http://admin:adminadmin@127.0.0.1:8080/")
- if err != nil {
- t.Error(err)
- }
- err = c.AddFromLink(
- "https://releases.ubuntu.com/22.04/ubuntu-22.04.1-desktop-amd64.iso.torrent",
- "D:\\qBittorrentDownload\\alist",
- "uuid-1",
- )
- if err != nil {
- t.Error(err)
- }
- err = c.Delete("uuid-1", true)
- if err != nil {
- t.Error(err)
- }
-}
diff --git a/internal/qbittorrent/monitor.go b/internal/qbittorrent/monitor.go
deleted file mode 100644
index f1b01efa..00000000
--- a/internal/qbittorrent/monitor.go
+++ /dev/null
@@ -1,175 +0,0 @@
-package qbittorrent
-
-import (
- "fmt"
- "io"
- "os"
- "path/filepath"
- "sync"
- "sync/atomic"
- "time"
-
- "github.com/alist-org/alist/v3/internal/model"
- "github.com/alist-org/alist/v3/internal/op"
- "github.com/alist-org/alist/v3/pkg/task"
- "github.com/alist-org/alist/v3/pkg/utils"
- "github.com/pkg/errors"
- log "github.com/sirupsen/logrus"
-)
-
-type Monitor struct {
- tsk *task.Task[string]
- tempDir string
- dstDirPath string
- seedtime int
- finish chan struct{}
-}
-
-func (m *Monitor) Loop() error {
- var (
- err error
- completed bool
- )
- m.finish = make(chan struct{})
-
- // wait for qbittorrent to parse torrent and create task
- m.tsk.SetStatus("waiting for qbittorrent to parse torrent and create task")
- waitCount := 0
- for {
- _, err := qbclient.GetInfo(m.tsk.ID)
- if err == nil {
- break
- }
- switch err.(type) {
- case InfoNotFoundError:
- break
- default:
- return err
- }
-
- waitCount += 1
- if waitCount >= 60 {
- return errors.New("torrent parse timeout")
- }
- timer := time.NewTimer(time.Second)
- <-timer.C
- }
-
-outer:
- for {
- select {
- case <-m.tsk.Ctx.Done():
- // delete qbittorrent task and downloaded files when the task exits with error
- return qbclient.Delete(m.tsk.ID, true)
- case <-time.After(time.Second * 2):
- completed, err = m.update()
- if completed {
- break outer
- }
- }
- }
- if err != nil {
- return err
- }
- m.tsk.SetStatus("qbittorrent download completed, transferring")
- <-m.finish
- m.tsk.SetStatus("completed")
- return nil
-}
-
-func (m *Monitor) update() (bool, error) {
- info, err := qbclient.GetInfo(m.tsk.ID)
- if err != nil {
- m.tsk.SetStatus("qbittorrent " + string(info.State))
- return true, err
- }
-
- progress := float64(info.Completed) / float64(info.Size) * 100
- m.tsk.SetProgress(int(progress))
- switch info.State {
- case UPLOADING, PAUSEDUP, QUEUEDUP, STALLEDUP, FORCEDUP, CHECKINGUP:
- err = m.complete()
- return true, errors.WithMessage(err, "failed to transfer file")
- case ALLOCATING, DOWNLOADING, METADL, PAUSEDDL, QUEUEDDL, STALLEDDL, CHECKINGDL, FORCEDDL, CHECKINGRESUMEDATA, MOVING:
- m.tsk.SetStatus("qbittorrent downloading")
- return false, nil
- case ERROR, MISSINGFILES, UNKNOWN:
- return true, errors.Errorf("failed to download %s, error: %s", m.tsk.ID, info.State)
- }
- return true, errors.New("unknown error occurred downloading qbittorrent") // should never happen
-}
-
-var TransferTaskManager = task.NewTaskManager(3, func(k *uint64) {
- atomic.AddUint64(k, 1)
-})
-
-func (m *Monitor) complete() error {
- // check dstDir again
- storage, dstBaseDir, err := op.GetStorageAndActualPath(m.dstDirPath)
- if err != nil {
- return errors.WithMessage(err, "failed get storage")
- }
- // get files
- files, err := qbclient.GetFiles(m.tsk.ID)
- if err != nil {
- return errors.Wrapf(err, "failed to get files of %s", m.tsk.ID)
- }
- log.Debugf("files len: %d", len(files))
- // delete qbittorrent task but do not delete the files before transferring to avoid qbittorrent
- // accessing downloaded files and throw `cannot access the file because it is being used by another process` error
- // err = qbclient.Delete(m.tsk.ID, false)
- // if err != nil {
- // return err
- // }
- // upload files
- var wg sync.WaitGroup
- wg.Add(len(files))
- go func() {
- wg.Wait()
- m.finish <- struct{}{}
- if m.seedtime < 0 {
- log.Debugf("do not delete qb task %s", m.tsk.ID)
- return
- }
- log.Debugf("delete qb task %s after %d minutes", m.tsk.ID, m.seedtime)
- <-time.After(time.Duration(m.seedtime) * time.Minute)
- err := qbclient.Delete(m.tsk.ID, true)
- if err != nil {
- log.Errorln(err.Error())
- }
- err = os.RemoveAll(m.tempDir)
- if err != nil {
- log.Errorf("failed to remove qbittorrent temp dir: %+v", err.Error())
- }
- }()
- for _, file := range files {
- tempPath := filepath.Join(m.tempDir, file.Name)
- dstPath := filepath.Join(dstBaseDir, file.Name)
- dstDir := filepath.Dir(dstPath)
- fileName := filepath.Base(dstPath)
- TransferTaskManager.Submit(task.WithCancelCtx(&task.Task[uint64]{
- Name: fmt.Sprintf("transfer %s to [%s](%s)", tempPath, storage.GetStorage().MountPath, dstPath),
- Func: func(tsk *task.Task[uint64]) error {
- defer wg.Done()
- size := file.Size
- mimetype := utils.GetMimeType(tempPath)
- f, err := os.Open(tempPath)
- if err != nil {
- return errors.Wrapf(err, "failed to open file %s", tempPath)
- }
- stream := &model.FileStream{
- Obj: &model.Object{
- Name: fileName,
- Size: size,
- Modified: time.Now(),
- IsFolder: false,
- },
- ReadCloser: struct{ io.ReadSeekCloser }{f},
- Mimetype: mimetype,
- }
- return op.Put(tsk.Ctx, storage, dstDir, stream, tsk.SetProgress)
- },
- }))
- }
- return nil
-}
diff --git a/internal/qbittorrent/qbittorrent.go b/internal/qbittorrent/qbittorrent.go
deleted file mode 100644
index d0117175..00000000
--- a/internal/qbittorrent/qbittorrent.go
+++ /dev/null
@@ -1,23 +0,0 @@
-package qbittorrent
-
-import (
- "github.com/alist-org/alist/v3/internal/conf"
- "github.com/alist-org/alist/v3/internal/setting"
- "github.com/alist-org/alist/v3/pkg/task"
-)
-
-var DownTaskManager = task.NewTaskManager[string](3)
-var qbclient Client
-
-func InitClient() error {
- var err error
- qbclient = nil
-
- url := setting.GetStr(conf.QbittorrentUrl)
- qbclient, err = New(url)
- return err
-}
-
-func IsQbittorrentReady() bool {
- return qbclient != nil
-}
diff --git a/internal/search/bleve/init.go b/internal/search/bleve/init.go
old mode 100644
new mode 100755
diff --git a/internal/search/bleve/search.go b/internal/search/bleve/search.go
old mode 100644
new mode 100755
diff --git a/internal/search/build.go b/internal/search/build.go
old mode 100644
new mode 100755
diff --git a/internal/search/db/init.go b/internal/search/db/init.go
old mode 100644
new mode 100755
diff --git a/internal/search/db/search.go b/internal/search/db/search.go
old mode 100644
new mode 100755
diff --git a/internal/search/db_non_full_text/init.go b/internal/search/db_non_full_text/init.go
old mode 100644
new mode 100755
diff --git a/internal/search/db_non_full_text/search.go b/internal/search/db_non_full_text/search.go
old mode 100644
new mode 100755
diff --git a/internal/search/import.go b/internal/search/import.go
old mode 100644
new mode 100755
diff --git a/internal/search/search.go b/internal/search/search.go
old mode 100644
new mode 100755
diff --git a/internal/search/searcher/manage.go b/internal/search/searcher/manage.go
old mode 100644
new mode 100755
diff --git a/internal/search/searcher/searcher.go b/internal/search/searcher/searcher.go
old mode 100644
new mode 100755
diff --git a/internal/search/util.go b/internal/search/util.go
old mode 100644
new mode 100755
diff --git a/internal/setting/setting.go b/internal/setting/setting.go
old mode 100644
new mode 100755
diff --git a/internal/sign/sign.go b/internal/sign/sign.go
old mode 100644
new mode 100755
diff --git a/internal/stream/stream.go b/internal/stream/stream.go
new file mode 100755
index 00000000..a45735d0
--- /dev/null
+++ b/internal/stream/stream.go
@@ -0,0 +1,243 @@
+package stream
+
+import (
+ "bytes"
+ "context"
+ "errors"
+ "fmt"
+ "io"
+ "os"
+
+ "github.com/alist-org/alist/v3/internal/errs"
+ "github.com/alist-org/alist/v3/internal/model"
+ "github.com/alist-org/alist/v3/pkg/http_range"
+ "github.com/alist-org/alist/v3/pkg/utils"
+)
+
+type FileStream struct {
+ Ctx context.Context
+ model.Obj
+ io.Reader
+ Mimetype string
+ WebPutAsTask bool
+ Exist model.Obj //the file existed in the destination, we can reuse some info since we wil overwrite it
+ utils.Closers
+ tmpFile *os.File //if present, tmpFile has full content, it will be deleted at last
+ peekBuff *bytes.Reader
+}
+
+func (f *FileStream) GetSize() int64 {
+ if f.tmpFile != nil {
+ info, err := f.tmpFile.Stat()
+ if err == nil {
+ return info.Size()
+ }
+ }
+ return f.Obj.GetSize()
+}
+
+func (f *FileStream) GetMimetype() string {
+ return f.Mimetype
+}
+
+func (f *FileStream) NeedStore() bool {
+ return f.WebPutAsTask
+}
+func (f *FileStream) Close() error {
+ var err1, err2 error
+ err1 = f.Closers.Close()
+ if f.tmpFile != nil {
+ err2 = os.RemoveAll(f.tmpFile.Name())
+ if err2 != nil {
+ err2 = errs.NewErr(err2, "failed to remove tmpFile [%s]", f.tmpFile.Name())
+ }
+ }
+
+ return errors.Join(err1, err2)
+}
+
+func (f *FileStream) GetExist() model.Obj {
+ return f.Exist
+}
+func (f *FileStream) SetExist(obj model.Obj) {
+ f.Exist = obj
+}
+
+// CacheFullInTempFile save all data into tmpFile. Not recommended since it wears disk,
+// and can't start upload until the file is written. It's not thread-safe!
+func (f *FileStream) CacheFullInTempFile() (model.File, error) {
+ if f.tmpFile != nil {
+ return f.tmpFile, nil
+ }
+ if file, ok := f.Reader.(model.File); ok {
+ return file, nil
+ }
+ tmpF, err := utils.CreateTempFile(f.Reader, f.GetSize())
+ if err != nil {
+ return nil, err
+ }
+ f.Add(tmpF)
+ f.tmpFile = tmpF
+ f.Reader = tmpF
+ return f.tmpFile, nil
+}
+
+const InMemoryBufMaxSize = 10 // Megabytes
+const InMemoryBufMaxSizeBytes = InMemoryBufMaxSize * 1024 * 1024
+
+// RangeRead have to cache all data first since only Reader is provided.
+// also support a peeking RangeRead at very start, but won't buffer more than 10MB data in memory
+func (f *FileStream) RangeRead(httpRange http_range.Range) (io.Reader, error) {
+ if httpRange.Length == -1 {
+ httpRange.Length = f.GetSize()
+ }
+ if f.peekBuff != nil && httpRange.Start < int64(f.peekBuff.Len()) && httpRange.Start+httpRange.Length-1 < int64(f.peekBuff.Len()) {
+ return io.NewSectionReader(f.peekBuff, httpRange.Start, httpRange.Length), nil
+ }
+ if f.tmpFile == nil {
+ if httpRange.Start == 0 && httpRange.Length <= InMemoryBufMaxSizeBytes && f.peekBuff == nil {
+ bufSize := utils.Min(httpRange.Length, f.GetSize())
+ newBuf := bytes.NewBuffer(make([]byte, 0, bufSize))
+ n, err := io.CopyN(newBuf, f.Reader, bufSize)
+ if err != nil {
+ return nil, err
+ }
+ if n != bufSize {
+ return nil, fmt.Errorf("stream RangeRead did not get all data in peek, expect =%d ,actual =%d", bufSize, n)
+ }
+ f.peekBuff = bytes.NewReader(newBuf.Bytes())
+ f.Reader = io.MultiReader(f.peekBuff, f.Reader)
+ return io.NewSectionReader(f.peekBuff, httpRange.Start, httpRange.Length), nil
+ } else {
+ _, err := f.CacheFullInTempFile()
+ if err != nil {
+ return nil, err
+ }
+ }
+ }
+ return io.NewSectionReader(f.tmpFile, httpRange.Start, httpRange.Length), nil
+}
+
+var _ model.FileStreamer = (*SeekableStream)(nil)
+var _ model.FileStreamer = (*FileStream)(nil)
+
+//var _ seekableStream = (*FileStream)(nil)
+
+// for most internal stream, which is either RangeReadCloser or MFile
+type SeekableStream struct {
+ FileStream
+ Link *model.Link
+ // should have one of belows to support rangeRead
+ rangeReadCloser model.RangeReadCloserIF
+ mFile model.File
+}
+
+func NewSeekableStream(fs FileStream, link *model.Link) (*SeekableStream, error) {
+ if len(fs.Mimetype) == 0 {
+ fs.Mimetype = utils.GetMimeType(fs.Obj.GetName())
+ }
+ ss := SeekableStream{FileStream: fs, Link: link}
+ if ss.Reader != nil {
+ result, ok := ss.Reader.(model.File)
+ if ok {
+ ss.mFile = result
+ ss.Closers.Add(result)
+ return &ss, nil
+ }
+ }
+ if ss.Link != nil {
+ if ss.Link.MFile != nil {
+ ss.mFile = ss.Link.MFile
+ ss.Reader = ss.Link.MFile
+ ss.Closers.Add(ss.Link.MFile)
+ return &ss, nil
+ }
+
+ if ss.Link.RangeReadCloser != nil {
+ ss.rangeReadCloser = ss.Link.RangeReadCloser
+ return &ss, nil
+ }
+ if len(ss.Link.URL) > 0 {
+ rrc, err := GetRangeReadCloserFromLink(ss.GetSize(), link)
+ if err != nil {
+ return nil, err
+ }
+ ss.rangeReadCloser = rrc
+ return &ss, nil
+ }
+ }
+
+ return nil, fmt.Errorf("illegal seekableStream")
+}
+
+//func (ss *SeekableStream) Peek(length int) {
+//
+//}
+
+// RangeRead is not thread-safe, pls use it in single thread only.
+func (ss *SeekableStream) RangeRead(httpRange http_range.Range) (io.Reader, error) {
+ if httpRange.Length == -1 {
+ httpRange.Length = ss.GetSize()
+ }
+ if ss.mFile != nil {
+ return io.NewSectionReader(ss.mFile, httpRange.Start, httpRange.Length), nil
+ }
+ if ss.tmpFile != nil {
+ return io.NewSectionReader(ss.tmpFile, httpRange.Start, httpRange.Length), nil
+ }
+ if ss.rangeReadCloser != nil {
+ rc, err := ss.rangeReadCloser.RangeRead(ss.Ctx, httpRange)
+ if err != nil {
+ return nil, err
+ }
+ return rc, nil
+ }
+ return nil, fmt.Errorf("can't find mFile or rangeReadCloser")
+}
+
+//func (f *FileStream) GetReader() io.Reader {
+// return f.Reader
+//}
+
+// only provide Reader as full stream when it's demanded. in rapid-upload, we can skip this to save memory
+func (ss *SeekableStream) Read(p []byte) (n int, err error) {
+ //f.mu.Lock()
+
+ //f.peekedOnce = true
+ //defer f.mu.Unlock()
+ if ss.Reader == nil {
+ if ss.rangeReadCloser == nil {
+ return 0, fmt.Errorf("illegal seekableStream")
+ }
+ rc, err := ss.rangeReadCloser.RangeRead(ss.Ctx, http_range.Range{Length: -1})
+ if err != nil {
+ return 0, nil
+ }
+ ss.Reader = io.NopCloser(rc)
+ ss.Closers.Add(rc)
+
+ }
+ return ss.Reader.Read(p)
+}
+
+func (ss *SeekableStream) CacheFullInTempFile() (model.File, error) {
+ if ss.tmpFile != nil {
+ return ss.tmpFile, nil
+ }
+ if ss.mFile != nil {
+ return ss.mFile, nil
+ }
+ tmpF, err := utils.CreateTempFile(ss, ss.GetSize())
+ if err != nil {
+ return nil, err
+ }
+ ss.Add(tmpF)
+ ss.tmpFile = tmpF
+ ss.Reader = tmpF
+ return ss.tmpFile, nil
+}
+
+func (f *FileStream) SetTmpFile(r *os.File) {
+ f.Reader = r
+ f.tmpFile = r
+}
diff --git a/internal/stream/util.go b/internal/stream/util.go
new file mode 100755
index 00000000..7d2b7ef7
--- /dev/null
+++ b/internal/stream/util.go
@@ -0,0 +1,88 @@
+package stream
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "net/http"
+
+ "github.com/alist-org/alist/v3/internal/errs"
+ "github.com/alist-org/alist/v3/internal/model"
+ "github.com/alist-org/alist/v3/internal/net"
+ "github.com/alist-org/alist/v3/pkg/http_range"
+ log "github.com/sirupsen/logrus"
+)
+
+func GetRangeReadCloserFromLink(size int64, link *model.Link) (model.RangeReadCloserIF, error) {
+ if len(link.URL) == 0 {
+ return nil, fmt.Errorf("can't create RangeReadCloser since URL is empty in link")
+ }
+ //remoteClosers := utils.EmptyClosers()
+ rangeReaderFunc := func(ctx context.Context, r http_range.Range) (io.ReadCloser, error) {
+ if link.Concurrency != 0 || link.PartSize != 0 {
+ header := net.ProcessHeader(http.Header{}, link.Header)
+ down := net.NewDownloader(func(d *net.Downloader) {
+ d.Concurrency = link.Concurrency
+ d.PartSize = link.PartSize
+ })
+ req := &net.HttpRequestParams{
+ URL: link.URL,
+ Range: r,
+ Size: size,
+ HeaderRef: header,
+ }
+ rc, err := down.Download(ctx, req)
+ if err != nil {
+ return nil, errs.NewErr(err, "GetReadCloserFromLink failed")
+ }
+ return rc, nil
+
+ }
+ if len(link.URL) > 0 {
+ response, err := RequestRangedHttp(ctx, link, r.Start, r.Length)
+ if err != nil {
+ if response == nil {
+ return nil, fmt.Errorf("http request failure, err:%s", err)
+ }
+ return nil, fmt.Errorf("http request failure,status: %d err:%s", response.StatusCode, err)
+ }
+ if r.Start == 0 && (r.Length == -1 || r.Length == size) || response.StatusCode == http.StatusPartialContent ||
+ checkContentRange(&response.Header, r.Start) {
+ return response.Body, nil
+ } else if response.StatusCode == http.StatusOK {
+ log.Warnf("remote http server not supporting range request, expect low perfromace!")
+ readCloser, err := net.GetRangedHttpReader(response.Body, r.Start, r.Length)
+ if err != nil {
+ return nil, err
+ }
+ return readCloser, nil
+
+ }
+
+ return response.Body, nil
+ }
+
+ return nil, errs.NotSupport
+ }
+ resultRangeReadCloser := model.RangeReadCloser{RangeReader: rangeReaderFunc}
+ return &resultRangeReadCloser, nil
+}
+
+func RequestRangedHttp(ctx context.Context, link *model.Link, offset, length int64) (*http.Response, error) {
+ header := net.ProcessHeader(http.Header{}, link.Header)
+ header = http_range.ApplyRangeToHttpHeader(http_range.Range{Start: offset, Length: length}, header)
+
+ return net.RequestHttp(ctx, "GET", header, link.URL)
+}
+
+// 139 cloud does not properly return 206 http status code, add a hack here
+func checkContentRange(header *http.Header, offset int64) bool {
+ start, _, err := http_range.ParseContentRange(header.Get("Content-Range"))
+ if err != nil {
+ log.Warnf("exception trying to parse Content-Range, will ignore,err=%s", err)
+ }
+ if start == offset {
+ return true
+ }
+ return false
+}
diff --git a/internal/token/token.go b/internal/token/token.go
old mode 100644
new mode 100755
diff --git a/main.go b/main.go
old mode 100644
new mode 100755
diff --git a/pkg/aria2/rpc/README.md b/pkg/aria2/rpc/README.md
old mode 100644
new mode 100755
diff --git a/pkg/aria2/rpc/call.go b/pkg/aria2/rpc/call.go
old mode 100644
new mode 100755
diff --git a/pkg/aria2/rpc/call_test.go b/pkg/aria2/rpc/call_test.go
old mode 100644
new mode 100755
diff --git a/pkg/aria2/rpc/client.go b/pkg/aria2/rpc/client.go
old mode 100644
new mode 100755
diff --git a/pkg/aria2/rpc/client_test.go b/pkg/aria2/rpc/client_test.go
old mode 100644
new mode 100755
diff --git a/pkg/aria2/rpc/const.go b/pkg/aria2/rpc/const.go
old mode 100644
new mode 100755
diff --git a/pkg/aria2/rpc/json2.go b/pkg/aria2/rpc/json2.go
old mode 100644
new mode 100755
diff --git a/pkg/aria2/rpc/notification.go b/pkg/aria2/rpc/notification.go
old mode 100644
new mode 100755
diff --git a/pkg/aria2/rpc/proc.go b/pkg/aria2/rpc/proc.go
old mode 100644
new mode 100755
diff --git a/pkg/aria2/rpc/proto.go b/pkg/aria2/rpc/proto.go
old mode 100644
new mode 100755
diff --git a/pkg/aria2/rpc/resp.go b/pkg/aria2/rpc/resp.go
old mode 100644
new mode 100755
diff --git a/pkg/chanio/chanio.go b/pkg/chanio/chanio.go
old mode 100644
new mode 100755
diff --git a/pkg/cookie/cookie.go b/pkg/cookie/cookie.go
old mode 100644
new mode 100755
diff --git a/pkg/cron/cron.go b/pkg/cron/cron.go
old mode 100644
new mode 100755
diff --git a/pkg/cron/cron_test.go b/pkg/cron/cron_test.go
old mode 100644
new mode 100755
diff --git a/pkg/errgroup/errgroup.go b/pkg/errgroup/errgroup.go
old mode 100644
new mode 100755
diff --git a/pkg/generic/queue.go b/pkg/generic/queue.go
old mode 100644
new mode 100755
diff --git a/pkg/generic_sync/map.go b/pkg/generic_sync/map.go
old mode 100644
new mode 100755
diff --git a/pkg/generic_sync/map_test.go b/pkg/generic_sync/map_test.go
old mode 100644
new mode 100755
diff --git a/pkg/gowebdav/.gitignore b/pkg/gowebdav/.gitignore
old mode 100644
new mode 100755
diff --git a/pkg/gowebdav/.travis.yml b/pkg/gowebdav/.travis.yml
old mode 100644
new mode 100755
diff --git a/pkg/gowebdav/LICENSE b/pkg/gowebdav/LICENSE
old mode 100644
new mode 100755
diff --git a/pkg/gowebdav/Makefile b/pkg/gowebdav/Makefile
old mode 100644
new mode 100755
diff --git a/pkg/gowebdav/README.md b/pkg/gowebdav/README.md
old mode 100644
new mode 100755
diff --git a/pkg/gowebdav/basicAuth.go b/pkg/gowebdav/basicAuth.go
old mode 100644
new mode 100755
diff --git a/pkg/gowebdav/client.go b/pkg/gowebdav/client.go
old mode 100644
new mode 100755
index 6e12289c..2fca0b7f
--- a/pkg/gowebdav/client.go
+++ b/pkg/gowebdav/client.go
@@ -83,6 +83,11 @@ func (c *Client) SetTransport(transport http.RoundTripper) {
c.c.Transport = transport
}
+// SetJar exposes the ability to set a cookie jar to the client.
+func (c *Client) SetJar(jar http.CookieJar) {
+ c.c.Jar = jar
+}
+
// Connect connects to our dav server
func (c *Client) Connect() error {
rs, err := c.options("/")
@@ -351,6 +356,11 @@ func (c *Client) Link(path string) (string, http.Header, error) {
return "", nil, newPathErrorErr("Link", path, err)
}
+ if c.c.Jar != nil {
+ for _, cookie := range c.c.Jar.Cookies(r.URL) {
+ r.AddCookie(cookie)
+ }
+ }
for k, vals := range c.headers {
for _, v := range vals {
r.Header.Add(k, v)
diff --git a/pkg/gowebdav/cmd/gowebdav/README.md b/pkg/gowebdav/cmd/gowebdav/README.md
old mode 100644
new mode 100755
diff --git a/pkg/gowebdav/cmd/gowebdav/main.go b/pkg/gowebdav/cmd/gowebdav/main.go
old mode 100644
new mode 100755
diff --git a/pkg/gowebdav/digestAuth.go b/pkg/gowebdav/digestAuth.go
old mode 100644
new mode 100755
diff --git a/pkg/gowebdav/doc.go b/pkg/gowebdav/doc.go
old mode 100644
new mode 100755
diff --git a/pkg/gowebdav/errors.go b/pkg/gowebdav/errors.go
old mode 100644
new mode 100755
diff --git a/pkg/gowebdav/file.go b/pkg/gowebdav/file.go
old mode 100644
new mode 100755
diff --git a/pkg/gowebdav/netrc.go b/pkg/gowebdav/netrc.go
old mode 100644
new mode 100755
diff --git a/pkg/gowebdav/requests.go b/pkg/gowebdav/requests.go
old mode 100644
new mode 100755
diff --git a/pkg/gowebdav/utils.go b/pkg/gowebdav/utils.go
old mode 100644
new mode 100755
diff --git a/pkg/gowebdav/utils_test.go b/pkg/gowebdav/utils_test.go
old mode 100644
new mode 100755
diff --git a/pkg/http_range/range.go b/pkg/http_range/range.go
old mode 100644
new mode 100755
index 0d6598f2..5edd210d
--- a/pkg/http_range/range.go
+++ b/pkg/http_range/range.go
@@ -107,18 +107,34 @@ func ParseRange(s string, size int64) ([]Range, error) { // nolint:gocognit
return ranges, nil
}
+// ParseContentRange this function parse content-range in http response
+func ParseContentRange(s string) (start, end int64, err error) {
+ if s == "" {
+ return 0, 0, ErrInvalid
+ }
+ const b = "bytes "
+ if !strings.HasPrefix(s, b) {
+ return 0, 0, ErrInvalid
+ }
+ p1 := strings.Index(s, "-")
+ p2 := strings.Index(s, "/")
+ if p1 < 0 || p2 < 0 {
+ return 0, 0, ErrInvalid
+ }
+ startStr, endStr := textproto.TrimString(s[len(b):p1]), textproto.TrimString(s[p1+1:p2])
+ start, startErr := strconv.ParseInt(startStr, 10, 64)
+ end, endErr := strconv.ParseInt(endStr, 10, 64)
+
+ return start, end, errors.Join(startErr, endErr)
+}
+
func (r Range) MimeHeader(contentType string, size int64) textproto.MIMEHeader {
return textproto.MIMEHeader{
- "Content-Range": {r.contentRange(size)},
+ "Content-Range": {r.ContentRange(size)},
"Content-Type": {contentType},
}
}
-// for http response header
-func (r Range) contentRange(size int64) string {
- return fmt.Sprintf("bytes %d-%d/%d", r.Start, r.Start+r.Length-1, size)
-}
-
// ApplyRangeToHttpHeader for http request header
func ApplyRangeToHttpHeader(p Range, headerRef http.Header) http.Header {
header := headerRef
diff --git a/pkg/mq/mq.go b/pkg/mq/mq.go
old mode 100644
new mode 100755
diff --git a/internal/qbittorrent/client.go b/pkg/qbittorrent/client.go
old mode 100644
new mode 100755
similarity index 100%
rename from internal/qbittorrent/client.go
rename to pkg/qbittorrent/client.go
diff --git a/pkg/sign/hmac.go b/pkg/sign/hmac.go
old mode 100644
new mode 100755
diff --git a/pkg/sign/sign.go b/pkg/sign/sign.go
old mode 100644
new mode 100755
diff --git a/pkg/singleflight/signleflight_test.go b/pkg/singleflight/signleflight_test.go
old mode 100644
new mode 100755
diff --git a/pkg/singleflight/singleflight.go b/pkg/singleflight/singleflight.go
old mode 100644
new mode 100755
diff --git a/pkg/task/errors.go b/pkg/task/errors.go
old mode 100644
new mode 100755
diff --git a/pkg/task/manager.go b/pkg/task/manager.go
old mode 100644
new mode 100755
diff --git a/pkg/task/task.go b/pkg/task/task.go
old mode 100644
new mode 100755
index f47eb747..5b634f10
--- a/pkg/task/task.go
+++ b/pkg/task/task.go
@@ -26,7 +26,7 @@ type Task[K comparable] struct {
Name string
state string // pending, running, finished, canceling, canceled, errored
status string
- progress int
+ progress float64
Error error
@@ -41,11 +41,11 @@ func (t *Task[K]) SetStatus(status string) {
t.status = status
}
-func (t *Task[K]) SetProgress(percentage int) {
+func (t *Task[K]) SetProgress(percentage float64) {
t.progress = percentage
}
-func (t Task[K]) GetProgress() int {
+func (t Task[K]) GetProgress() float64 {
return t.progress
}
diff --git a/pkg/task/task_test.go b/pkg/task/task_test.go
old mode 100644
new mode 100755
diff --git a/pkg/utils/balance.go b/pkg/utils/balance.go
old mode 100644
new mode 100755
diff --git a/pkg/utils/bool.go b/pkg/utils/bool.go
old mode 100644
new mode 100755
diff --git a/pkg/utils/ctx.go b/pkg/utils/ctx.go
old mode 100644
new mode 100755
diff --git a/pkg/utils/email.go b/pkg/utils/email.go
old mode 100644
new mode 100755
diff --git a/pkg/utils/file.go b/pkg/utils/file.go
old mode 100644
new mode 100755
index 6dd78164..7ae07158
--- a/pkg/utils/file.go
+++ b/pkg/utils/file.go
@@ -113,7 +113,7 @@ func CreateNestedFile(path string) (*os.File, error) {
}
// CreateTempFile create temp file from io.ReadCloser, and seek to 0
-func CreateTempFile(r io.ReadCloser, size int64) (*os.File, error) {
+func CreateTempFile(r io.Reader, size int64) (*os.File, error) {
if f, ok := r.(*os.File); ok {
return f, nil
}
@@ -126,7 +126,7 @@ func CreateTempFile(r io.ReadCloser, size int64) (*os.File, error) {
_ = os.Remove(f.Name())
return nil, errs.NewErr(err, "CreateTempFile failed")
}
- if size != 0 && readBytes != size {
+ if size > 0 && readBytes != size {
_ = os.Remove(f.Name())
return nil, errs.NewErr(err, "CreateTempFile failed, incoming stream actual size= %d, expect = %d ", readBytes, size)
}
@@ -163,11 +163,25 @@ func GetObjType(filename string, isDir bool) int {
return GetFileType(filename)
}
+var extraMimeTypes = map[string]string{
+ ".apk": "application/vnd.android.package-archive",
+}
+
func GetMimeType(name string) string {
ext := path.Ext(name)
+ if m, ok := extraMimeTypes[ext]; ok {
+ return m
+ }
m := mime.TypeByExtension(ext)
if m != "" {
return m
}
return "application/octet-stream"
}
+
+const (
+ KB = 1 << (10 * (iota + 1))
+ MB
+ GB
+ TB
+)
diff --git a/pkg/utils/hash.go b/pkg/utils/hash.go
old mode 100644
new mode 100755
diff --git a/pkg/utils/hash/gcid.go b/pkg/utils/hash/gcid.go
old mode 100644
new mode 100755
diff --git a/pkg/utils/hash_test.go b/pkg/utils/hash_test.go
new file mode 100755
index 00000000..55713c1a
--- /dev/null
+++ b/pkg/utils/hash_test.go
@@ -0,0 +1,67 @@
+package utils
+
+import (
+ "bytes"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "io"
+ "testing"
+)
+
+type hashTest struct {
+ input []byte
+ output map[*HashType]string
+}
+
+var hashTestSet = []hashTest{
+ {
+ input: []byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14},
+ output: map[*HashType]string{
+ MD5: "bf13fc19e5151ac57d4252e0e0f87abe",
+ SHA1: "3ab6543c08a75f292a5ecedac87ec41642d12166",
+ SHA256: "c839e57675862af5c21bd0a15413c3ec579e0d5522dab600bc6c3489b05b8f54",
+ },
+ },
+ // Empty data set
+ {
+ input: []byte{},
+ output: map[*HashType]string{
+ MD5: "d41d8cd98f00b204e9800998ecf8427e",
+ SHA1: "da39a3ee5e6b4b0d3255bfef95601890afd80709",
+ SHA256: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ },
+ },
+}
+
+func TestMultiHasher(t *testing.T) {
+ for _, test := range hashTestSet {
+ mh := NewMultiHasher([]*HashType{MD5, SHA1, SHA256})
+ n, err := io.Copy(mh, bytes.NewBuffer(test.input))
+ require.NoError(t, err)
+ assert.Len(t, test.input, int(n))
+ hashInfo := mh.GetHashInfo()
+ for k, v := range hashInfo.h {
+ expect, ok := test.output[k]
+ require.True(t, ok, "test output for hash not found")
+ assert.Equal(t, expect, v)
+ }
+ // Test that all are present
+ for k, v := range test.output {
+ expect, ok := hashInfo.h[k]
+ require.True(t, ok, "test output for hash not found")
+ assert.Equal(t, expect, v)
+ }
+ for k, v := range test.output {
+ expect := hashInfo.GetHash(k)
+ require.True(t, len(expect) > 0, "test output for hash not found")
+ assert.Equal(t, expect, v)
+ }
+ expect := hashInfo.GetHash(nil)
+ require.True(t, len(expect) == 0, "unknown type should return empty string")
+ str := hashInfo.String()
+ Log.Info("str=" + str)
+ newHi := FromString(str)
+ assert.Equal(t, newHi.h, hashInfo.h)
+
+ }
+}
diff --git a/pkg/utils/io.go b/pkg/utils/io.go
old mode 100644
new mode 100755
index 936461a7..6852e28a
--- a/pkg/utils/io.go
+++ b/pkg/utils/io.go
@@ -3,10 +3,13 @@ package utils
import (
"bytes"
"context"
+ "errors"
"fmt"
"io"
"time"
+ "golang.org/x/exp/constraints"
+
log "github.com/sirupsen/logrus"
)
@@ -17,9 +20,9 @@ type readerFunc func(p []byte) (n int, err error)
func (rf readerFunc) Read(p []byte) (n int, err error) { return rf(p) }
// CopyWithCtx slightly modified function signature:
-// - context has been added in order to propagate cancelation
+// - context has been added in order to propagate cancellation
// - I do not return the number of bytes written, has it is not useful in my use case
-func CopyWithCtx(ctx context.Context, out io.Writer, in io.Reader, size int64, progress func(percentage int)) error {
+func CopyWithCtx(ctx context.Context, out io.Writer, in io.Reader, size int64, progress func(percentage float64)) error {
// Copy will call the Reader and Writer interface multiple time, in order
// to copy by chunk (avoiding loading the whole file in memory).
// I insert the ability to cancel before read time as it is the earliest
@@ -38,7 +41,7 @@ func CopyWithCtx(ctx context.Context, out io.Writer, in io.Reader, size int64, p
n, err := in.Read(p)
if s > 0 && (err == nil || err == io.EOF) {
finish += int64(n)
- progress(int(finish / s))
+ progress(float64(finish) / float64(s))
}
return n, err
}
@@ -132,16 +135,6 @@ func (mr *MultiReadable) Close() error {
return nil
}
-type nopCloser struct {
- io.ReadSeeker
-}
-
-func (nopCloser) Close() error { return nil }
-
-func ReadSeekerNopCloser(r io.ReadSeeker) io.ReadSeekCloser {
- return nopCloser{r}
-}
-
func Retry(attempts int, sleep time.Duration, f func() error) (err error) {
for i := 0; i < attempts; i++ {
fmt.Println("This is attempt number", i)
@@ -158,23 +151,56 @@ func Retry(attempts int, sleep time.Duration, f func() error) (err error) {
return fmt.Errorf("after %d attempts, last error: %s", attempts, err)
}
+type ClosersIF interface {
+ io.Closer
+ Add(closer io.Closer)
+ AddClosers(closers Closers)
+ GetClosers() Closers
+}
+
type Closers struct {
- closers []*io.Closer
+ closers []io.Closer
+}
+
+func (c *Closers) GetClosers() Closers {
+ return *c
}
-func (c *Closers) Close() (err error) {
+var _ ClosersIF = (*Closers)(nil)
+
+func (c *Closers) Close() error {
+ var errs []error
for _, closer := range c.closers {
if closer != nil {
- _ = (*closer).Close()
+ errs = append(errs, closer.Close())
}
}
- return nil
+ return errors.Join(errs...)
}
func (c *Closers) Add(closer io.Closer) {
- if closer != nil {
- c.closers = append(c.closers, &closer)
+ c.closers = append(c.closers, closer)
+
+}
+func (c *Closers) AddClosers(closers Closers) {
+ c.closers = append(c.closers, closers.closers...)
+}
+
+func EmptyClosers() Closers {
+ return Closers{[]io.Closer{}}
+}
+func NewClosers(c ...io.Closer) Closers {
+ return Closers{c}
+}
+
+func Min[T constraints.Ordered](a, b T) T {
+ if a < b {
+ return a
}
+ return b
}
-func NewClosers() *Closers {
- return &Closers{[]*io.Closer{}}
+func Max[T constraints.Ordered](a, b T) T {
+ if a < b {
+ return b
+ }
+ return a
}
diff --git a/pkg/utils/ip.go b/pkg/utils/ip.go
old mode 100644
new mode 100755
diff --git a/pkg/utils/json.go b/pkg/utils/json.go
old mode 100644
new mode 100755
diff --git a/pkg/utils/log.go b/pkg/utils/log.go
old mode 100644
new mode 100755
diff --git a/pkg/utils/map.go b/pkg/utils/map.go
old mode 100644
new mode 100755
diff --git a/pkg/utils/path.go b/pkg/utils/path.go
old mode 100644
new mode 100755
diff --git a/pkg/utils/path_test.go b/pkg/utils/path_test.go
old mode 100644
new mode 100755
diff --git a/pkg/utils/random/random.go b/pkg/utils/random/random.go
old mode 100644
new mode 100755
diff --git a/pkg/utils/slice.go b/pkg/utils/slice.go
old mode 100644
new mode 100755
diff --git a/pkg/utils/str.go b/pkg/utils/str.go
old mode 100644
new mode 100755
index 509bb828..e42484dc
--- a/pkg/utils/str.go
+++ b/pkg/utils/str.go
@@ -30,3 +30,13 @@ func SafeAtob(data string) (string, error) {
}
return string(bytes), err
}
+
+// GetNoneEmpty returns the first non-empty string, return empty if all empty
+func GetNoneEmpty(strArr ...string) string {
+ for _, s := range strArr {
+ if len(s) > 0 {
+ return s
+ }
+ }
+ return ""
+}
diff --git a/pkg/utils/time.go b/pkg/utils/time.go
old mode 100644
new mode 100755
index 41632e40..a9d9b5b6
--- a/pkg/utils/time.go
+++ b/pkg/utils/time.go
@@ -5,8 +5,10 @@ import (
"time"
)
+var CNLoc = time.FixedZone("UTC", 8*60*60)
+
func MustParseCNTime(str string) time.Time {
- lastOpTime, _ := time.ParseInLocation("2006-01-02 15:04:05 -07", str+" +08", time.Local)
+ lastOpTime, _ := time.ParseInLocation("2006-01-02 15:04:05 -07", str+" +08", CNLoc)
return lastOpTime
}
diff --git a/pkg/utils/url.go b/pkg/utils/url.go
old mode 100644
new mode 100755
diff --git a/public/public.go b/public/public.go
old mode 100644
new mode 100755
diff --git a/release.sh b/release.sh
old mode 100644
new mode 100755
diff --git a/renovate.json b/renovate.json
old mode 100644
new mode 100755
diff --git a/server/common/auth.go b/server/common/auth.go
old mode 100644
new mode 100755
index 017390bd..b6a79b75
--- a/server/common/auth.go
+++ b/server/common/auth.go
@@ -4,6 +4,7 @@ import (
"time"
"github.com/alist-org/alist/v3/internal/conf"
+ "github.com/alist-org/alist/v3/internal/model"
"github.com/golang-jwt/jwt/v4"
"github.com/pkg/errors"
)
@@ -12,12 +13,14 @@ var SecretKey []byte
type UserClaims struct {
Username string `json:"username"`
+ PwdTS int64 `json:"pwd_ts"`
jwt.RegisteredClaims
}
-func GenerateToken(username string) (tokenString string, err error) {
+func GenerateToken(user *model.User) (tokenString string, err error) {
claim := UserClaims{
- Username: username,
+ Username: user.Username,
+ PwdTS: user.PwdTS,
RegisteredClaims: jwt.RegisteredClaims{
ExpiresAt: jwt.NewNumericDate(time.Now().Add(time.Duration(conf.Conf.TokenExpiresIn) * time.Hour)),
IssuedAt: jwt.NewNumericDate(time.Now()),
diff --git a/server/common/base.go b/server/common/base.go
old mode 100644
new mode 100755
diff --git a/server/common/check.go b/server/common/check.go
old mode 100644
new mode 100755
diff --git a/server/common/check_test.go b/server/common/check_test.go
old mode 100644
new mode 100755
diff --git a/server/common/common.go b/server/common/common.go
old mode 100644
new mode 100755
diff --git a/server/common/hide_privacy_test.go b/server/common/hide_privacy_test.go
old mode 100644
new mode 100755
diff --git a/server/common/proxy.go b/server/common/proxy.go
old mode 100644
new mode 100755
index 45c2b820..4ca4ba7f
--- a/server/common/proxy.go
+++ b/server/common/proxy.go
@@ -6,55 +6,37 @@ import (
"io"
"net/http"
"net/url"
- "sync"
- "github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/net"
"github.com/alist-org/alist/v3/pkg/http_range"
"github.com/alist-org/alist/v3/pkg/utils"
- "github.com/pkg/errors"
)
-func HttpClient() *http.Client {
- once.Do(func() {
- httpClient = base.NewHttpClient()
- httpClient.CheckRedirect = func(req *http.Request, via []*http.Request) error {
- if len(via) >= 10 {
- return errors.New("stopped after 10 redirects")
- }
- req.Header.Del("Referer")
- return nil
- }
- })
- return httpClient
-}
-
-var once sync.Once
-var httpClient *http.Client
-
func Proxy(w http.ResponseWriter, r *http.Request, link *model.Link, file model.Obj) error {
- if link.ReadSeekCloser != nil {
+ if link.MFile != nil {
+ defer link.MFile.Close()
attachFileName(w, file)
- http.ServeContent(w, r, file.GetName(), file.ModTime(), link.ReadSeekCloser)
- defer link.ReadSeekCloser.Close()
+ contentType := link.Header.Get("Content-Type")
+ if contentType != "" {
+ w.Header().Set("Content-Type", contentType)
+ }
+ http.ServeContent(w, r, file.GetName(), file.ModTime(), link.MFile)
return nil
- } else if link.RangeReadCloser.RangeReader != nil {
+ } else if link.RangeReadCloser != nil {
attachFileName(w, file)
- net.ServeHTTP(w, r, file.GetName(), file.ModTime(), file.GetSize(), link.RangeReadCloser.RangeReader)
+ net.ServeHTTP(w, r, file.GetName(), file.ModTime(), file.GetSize(), link.RangeReadCloser.RangeRead)
defer func() {
- if link.RangeReadCloser.Closers != nil {
- link.RangeReadCloser.Closers.Close()
- }
+ _ = link.RangeReadCloser.Close()
}()
return nil
} else if link.Concurrency != 0 || link.PartSize != 0 {
attachFileName(w, file)
size := file.GetSize()
//var finalClosers model.Closers
- finalClosers := utils.NewClosers()
+ finalClosers := utils.EmptyClosers()
header := net.ProcessHeader(r.Header, link.Header)
- rangeReader := func(httpRange http_range.Range) (io.ReadCloser, error) {
+ rangeReader := func(ctx context.Context, httpRange http_range.Range) (io.ReadCloser, error) {
down := net.NewDownloader(func(d *net.Downloader) {
d.Concurrency = link.Concurrency
d.PartSize = link.PartSize
@@ -65,7 +47,7 @@ func Proxy(w http.ResponseWriter, r *http.Request, link *model.Link, file model.
Size: size,
HeaderRef: header,
}
- rc, err := down.Download(context.Background(), req)
+ rc, err := down.Download(ctx, req)
finalClosers.Add(rc)
return rc, err
}
@@ -75,7 +57,7 @@ func Proxy(w http.ResponseWriter, r *http.Request, link *model.Link, file model.
} else {
//transparent proxy
header := net.ProcessHeader(r.Header, link.Header)
- res, err := net.RequestHttp(r.Method, header, link.URL)
+ res, err := net.RequestHttp(context.Background(), r.Method, header, link.URL)
if err != nil {
return err
}
@@ -98,4 +80,5 @@ func Proxy(w http.ResponseWriter, r *http.Request, link *model.Link, file model.
func attachFileName(w http.ResponseWriter, file model.Obj) {
fileName := file.GetName()
w.Header().Set("Content-Disposition", fmt.Sprintf(`attachment; filename="%s"; filename*=UTF-8''%s`, fileName, url.PathEscape(fileName)))
+ w.Header().Set("Content-Type", utils.GetMimeType(fileName))
}
diff --git a/server/common/resp.go b/server/common/resp.go
old mode 100644
new mode 100755
diff --git a/server/common/sign.go b/server/common/sign.go
old mode 100644
new mode 100755
diff --git a/server/dev.go b/server/debug.go
old mode 100644
new mode 100755
similarity index 60%
rename from server/dev.go
rename to server/debug.go
index d39ef7a0..081ef8c3
--- a/server/dev.go
+++ b/server/debug.go
@@ -1,12 +1,20 @@
package server
import (
+ "net/http"
+ _ "net/http/pprof"
+ "runtime"
+
"github.com/alist-org/alist/v3/server/common"
"github.com/alist-org/alist/v3/server/middlewares"
"github.com/gin-gonic/gin"
)
-func dev(g *gin.RouterGroup) {
+func _pprof(g *gin.RouterGroup) {
+ g.Any("/*name", gin.WrapH(http.DefaultServeMux))
+}
+
+func debug(g *gin.RouterGroup) {
g.GET("/path/*path", middlewares.Down, func(ctx *gin.Context) {
rawPath := ctx.MustGet("path").(string)
ctx.JSON(200, gin.H{
@@ -16,4 +24,9 @@ func dev(g *gin.RouterGroup) {
g.GET("/hide_privacy", func(ctx *gin.Context) {
common.ErrorStrResp(ctx, "This is ip: 1.1.1.1", 400)
})
+ g.GET("/gc", func(c *gin.Context) {
+ runtime.GC()
+ c.String(http.StatusOK, "ok")
+ })
+ _pprof(g.Group("/pprof"))
}
diff --git a/server/handles/aria2.go b/server/handles/aria2.go
deleted file mode 100644
index 325367a7..00000000
--- a/server/handles/aria2.go
+++ /dev/null
@@ -1,80 +0,0 @@
-package handles
-
-import (
- "github.com/alist-org/alist/v3/internal/aria2"
- "github.com/alist-org/alist/v3/internal/conf"
- "github.com/alist-org/alist/v3/internal/model"
- "github.com/alist-org/alist/v3/internal/op"
- "github.com/alist-org/alist/v3/server/common"
- "github.com/gin-gonic/gin"
-)
-
-type SetAria2Req struct {
- Uri string `json:"uri" form:"uri"`
- Secret string `json:"secret" form:"secret"`
-}
-
-func SetAria2(c *gin.Context) {
- var req SetAria2Req
- if err := c.ShouldBind(&req); err != nil {
- common.ErrorResp(c, err, 400)
- return
- }
- items := []model.SettingItem{
- {Key: conf.Aria2Uri, Value: req.Uri, Type: conf.TypeString, Group: model.ARIA2, Flag: model.PRIVATE},
- {Key: conf.Aria2Secret, Value: req.Secret, Type: conf.TypeString, Group: model.ARIA2, Flag: model.PRIVATE},
- }
- if err := op.SaveSettingItems(items); err != nil {
- common.ErrorResp(c, err, 500)
- return
- }
- version, err := aria2.InitClient(2)
- if err != nil {
- common.ErrorResp(c, err, 500)
- return
- }
- common.SuccessResp(c, version)
-}
-
-type AddAria2Req struct {
- Urls []string `json:"urls"`
- Path string `json:"path"`
-}
-
-func AddAria2(c *gin.Context) {
- user := c.MustGet("user").(*model.User)
- if !user.CanAddAria2Tasks() {
- common.ErrorStrResp(c, "permission denied", 403)
- return
- }
- if !aria2.IsAria2Ready() {
- // try to init client
- _, err := aria2.InitClient(2)
- if err != nil {
- common.ErrorResp(c, err, 500)
- return
- }
- if !aria2.IsAria2Ready() {
- common.ErrorStrResp(c, "aria2 still not ready after init", 500)
- return
- }
- }
- var req AddAria2Req
- if err := c.ShouldBind(&req); err != nil {
- common.ErrorResp(c, err, 400)
- return
- }
- reqPath, err := user.JoinPath(req.Path)
- if err != nil {
- common.ErrorResp(c, err, 403)
- return
- }
- for _, url := range req.Urls {
- err := aria2.AddURI(c, url, reqPath)
- if err != nil {
- common.ErrorResp(c, err, 500)
- return
- }
- }
- common.SuccessResp(c)
-}
diff --git a/server/handles/auth.go b/server/handles/auth.go
old mode 100644
new mode 100755
index 02538f2c..5272521d
--- a/server/handles/auth.go
+++ b/server/handles/auth.go
@@ -79,7 +79,7 @@ func loginHash(c *gin.Context, req *LoginReq) {
}
}
// generate token
- token, err := common.GenerateToken(user.Username)
+ token, err := common.GenerateToken(user)
if err != nil {
common.ErrorResp(c, err, 400, true)
return
diff --git a/server/handles/down.go b/server/handles/down.go
old mode 100644
new mode 100755
index e3528784..d3d41e85
--- a/server/handles/down.go
+++ b/server/handles/down.go
@@ -40,19 +40,21 @@ func Down(c *gin.Context) {
common.ErrorResp(c, err, 500)
return
}
- if link.ReadSeekCloser != nil {
+ if link.MFile != nil {
defer func(ReadSeekCloser io.ReadCloser) {
err := ReadSeekCloser.Close()
if err != nil {
log.Errorf("close data error: %s", err)
}
- }(link.ReadSeekCloser)
+ }(link.MFile)
}
c.Header("Referrer-Policy", "no-referrer")
c.Header("Cache-Control", "max-age=0, no-cache, no-store, must-revalidate")
if setting.GetBool(conf.ForwardDirectLinkParams) {
query := c.Request.URL.Query()
- query.Del("sign")
+ for _, v := range conf.SlicesMap[conf.IgnoreDirectLinkParams] {
+ query.Del(v)
+ }
link.URL, err = utils.InjectQuery(link.URL, query)
if err != nil {
common.ErrorResp(c, err, 500)
@@ -95,7 +97,9 @@ func Proxy(c *gin.Context) {
}
if link.URL != "" && setting.GetBool(conf.ForwardDirectLinkParams) {
query := c.Request.URL.Query()
- query.Del("sign")
+ for _, v := range conf.SlicesMap[conf.IgnoreDirectLinkParams] {
+ query.Del(v)
+ }
link.URL, err = utils.InjectQuery(link.URL, query)
if err != nil {
common.ErrorResp(c, err, 500)
diff --git a/server/handles/driver.go b/server/handles/driver.go
old mode 100644
new mode 100755
diff --git a/server/handles/fsbatch.go b/server/handles/fsbatch.go
old mode 100644
new mode 100755
diff --git a/server/handles/fsmanage.go b/server/handles/fsmanage.go
old mode 100644
new mode 100755
index 29e46665..3d446eda
--- a/server/handles/fsmanage.go
+++ b/server/handles/fsmanage.go
@@ -2,6 +2,7 @@ package handles
import (
"fmt"
+ "github.com/xhofe/tache"
"io"
stdpath "path"
@@ -120,22 +121,20 @@ func FsCopy(c *gin.Context) {
common.ErrorResp(c, err, 403)
return
}
- var addedTask []string
+ var addedTasks []tache.TaskWithInfo
for i, name := range req.Names {
- ok, err := fs.Copy(c, stdpath.Join(srcDir, name), dstDir, len(req.Names) > i+1)
- if ok {
- addedTask = append(addedTask, name)
+ t, err := fs.Copy(c, stdpath.Join(srcDir, name), dstDir, len(req.Names) > i+1)
+ if t != nil {
+ addedTasks = append(addedTasks, t)
}
if err != nil {
common.ErrorResp(c, err, 500)
return
}
}
- if len(addedTask) > 0 {
- common.SuccessResp(c, fmt.Sprintf("Added %d tasks", len(addedTask)))
- } else {
- common.SuccessResp(c)
- }
+ common.SuccessResp(c, gin.H{
+ "tasks": getTaskInfos(addedTasks),
+ })
}
type RenameReq struct {
@@ -331,13 +330,13 @@ func Link(c *gin.Context) {
common.ErrorResp(c, err, 500)
return
}
- if link.ReadSeekCloser != nil {
+ if link.MFile != nil {
defer func(ReadSeekCloser io.ReadCloser) {
err := ReadSeekCloser.Close()
if err != nil {
log.Errorf("close link data error: %v", err)
}
- }(link.ReadSeekCloser)
+ }(link.MFile)
}
common.SuccessResp(c, link)
return
diff --git a/server/handles/fsread.go b/server/handles/fsread.go
old mode 100644
new mode 100755
index c21e294b..7c580f63
--- a/server/handles/fsread.go
+++ b/server/handles/fsread.go
@@ -33,19 +33,23 @@ type DirReq struct {
}
type ObjResp struct {
- Name string `json:"name"`
- Size int64 `json:"size"`
- IsDir bool `json:"is_dir"`
- Modified time.Time `json:"modified"`
- Sign string `json:"sign"`
- Thumb string `json:"thumb"`
- Type int `json:"type"`
+ Name string `json:"name"`
+ Size int64 `json:"size"`
+ IsDir bool `json:"is_dir"`
+ Modified time.Time `json:"modified"`
+ Created time.Time `json:"created"`
+ Sign string `json:"sign"`
+ Thumb string `json:"thumb"`
+ Type int `json:"type"`
+ HashInfoStr string `json:"hashinfo"`
+ HashInfo map[*utils.HashType]string `json:"hash_info"`
}
type FsListResp struct {
Content []ObjResp `json:"content"`
Total int64 `json:"total"`
Readme string `json:"readme"`
+ Header string `json:"header"`
Write bool `json:"write"`
Provider string `json:"provider"`
}
@@ -94,6 +98,7 @@ func FsList(c *gin.Context) {
Content: toObjsResp(objs, reqPath, isEncrypt(meta, reqPath)),
Total: int64(total),
Readme: getReadme(meta, reqPath),
+ Header: getHeader(meta, reqPath),
Write: user.CanWrite() || common.CanWrite(meta, reqPath),
Provider: provider,
})
@@ -166,6 +171,13 @@ func getReadme(meta *model.Meta, path string) string {
return ""
}
+func getHeader(meta *model.Meta, path string) string {
+ if meta != nil && (utils.PathEqual(meta.Path, path) || meta.HeaderSub) {
+ return meta.Header
+ }
+ return ""
+}
+
func isEncrypt(meta *model.Meta, path string) bool {
if common.IsStorageSignEnabled(path) {
return true
@@ -198,13 +210,16 @@ func toObjsResp(objs []model.Obj, parent string, encrypt bool) []ObjResp {
for _, obj := range objs {
thumb, _ := model.GetThumb(obj)
resp = append(resp, ObjResp{
- Name: obj.GetName(),
- Size: obj.GetSize(),
- IsDir: obj.IsDir(),
- Modified: obj.ModTime(),
- Sign: common.Sign(obj, parent, encrypt),
- Thumb: thumb,
- Type: utils.GetObjType(obj.GetName(), obj.IsDir()),
+ Name: obj.GetName(),
+ Size: obj.GetSize(),
+ IsDir: obj.IsDir(),
+ Modified: obj.ModTime(),
+ Created: obj.CreateTime(),
+ HashInfoStr: obj.GetHash().String(),
+ HashInfo: obj.GetHash().Export(),
+ Sign: common.Sign(obj, parent, encrypt),
+ Thumb: thumb,
+ Type: utils.GetObjType(obj.GetName(), obj.IsDir()),
})
}
return resp
@@ -219,6 +234,7 @@ type FsGetResp struct {
ObjResp
RawURL string `json:"raw_url"`
Readme string `json:"readme"`
+ Header string `json:"header"`
Provider string `json:"provider"`
Related []ObjResp `json:"related"`
}
@@ -309,16 +325,20 @@ func FsGet(c *gin.Context) {
thumb, _ := model.GetThumb(obj)
common.SuccessResp(c, FsGetResp{
ObjResp: ObjResp{
- Name: obj.GetName(),
- Size: obj.GetSize(),
- IsDir: obj.IsDir(),
- Modified: obj.ModTime(),
- Sign: common.Sign(obj, parentPath, isEncrypt(meta, reqPath)),
- Type: utils.GetFileType(obj.GetName()),
- Thumb: thumb,
+ Name: obj.GetName(),
+ Size: obj.GetSize(),
+ IsDir: obj.IsDir(),
+ Modified: obj.ModTime(),
+ Created: obj.CreateTime(),
+ HashInfoStr: obj.GetHash().String(),
+ HashInfo: obj.GetHash().Export(),
+ Sign: common.Sign(obj, parentPath, isEncrypt(meta, reqPath)),
+ Type: utils.GetFileType(obj.GetName()),
+ Thumb: thumb,
},
RawURL: rawURL,
Readme: getReadme(meta, reqPath),
+ Header: getHeader(meta, reqPath),
Provider: provider,
Related: toObjsResp(related, parentPath, isEncrypt(parentMeta, parentPath)),
})
diff --git a/server/handles/fsup.go b/server/handles/fsup.go
old mode 100644
new mode 100755
index 5cde6fb8..ef9baa11
--- a/server/handles/fsup.go
+++ b/server/handles/fsup.go
@@ -1,17 +1,32 @@
package handles
import (
+ "github.com/xhofe/tache"
+ "io"
"net/url"
stdpath "path"
"strconv"
"time"
+ "github.com/alist-org/alist/v3/internal/stream"
+
"github.com/alist-org/alist/v3/internal/fs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/server/common"
"github.com/gin-gonic/gin"
)
+func getLastModified(c *gin.Context) time.Time {
+ now := time.Now()
+ lastModifiedStr := c.GetHeader("Last-Modified")
+ lastModifiedMillisecond, err := strconv.ParseInt(lastModifiedStr, 10, 64)
+ if err != nil {
+ return now
+ }
+ lastModified := time.UnixMilli(lastModifiedMillisecond)
+ return lastModified
+}
+
func FsStream(c *gin.Context) {
path := c.GetHeader("File-Path")
path, err := url.PathUnescape(path)
@@ -33,26 +48,34 @@ func FsStream(c *gin.Context) {
common.ErrorResp(c, err, 400)
return
}
- stream := &model.FileStream{
+ s := &stream.FileStream{
Obj: &model.Object{
Name: name,
Size: size,
- Modified: time.Now(),
+ Modified: getLastModified(c),
},
- ReadCloser: c.Request.Body,
+ Reader: c.Request.Body,
Mimetype: c.GetHeader("Content-Type"),
WebPutAsTask: asTask,
}
+ var t tache.TaskWithInfo
if asTask {
- err = fs.PutAsTask(dir, stream)
+ t, err = fs.PutAsTask(dir, s)
} else {
- err = fs.PutDirectly(c, dir, stream, true)
+ err = fs.PutDirectly(c, dir, s, true)
}
+ defer c.Request.Body.Close()
if err != nil {
common.ErrorResp(c, err, 500)
return
}
- common.SuccessResp(c)
+ if t == nil {
+ common.SuccessResp(c)
+ return
+ }
+ common.SuccessResp(c, gin.H{
+ "task": getTaskInfo(t),
+ })
}
func FsForm(c *gin.Context) {
@@ -88,25 +111,41 @@ func FsForm(c *gin.Context) {
common.ErrorResp(c, err, 500)
return
}
+ defer f.Close()
dir, name := stdpath.Split(path)
- stream := &model.FileStream{
+ s := stream.FileStream{
Obj: &model.Object{
Name: name,
Size: file.Size,
- Modified: time.Now(),
+ Modified: getLastModified(c),
},
- ReadCloser: f,
+ Reader: f,
Mimetype: file.Header.Get("Content-Type"),
- WebPutAsTask: false,
+ WebPutAsTask: asTask,
}
+ var t tache.TaskWithInfo
if asTask {
- err = fs.PutAsTask(dir, stream)
+ s.Reader = struct {
+ io.Reader
+ }{f}
+ t, err = fs.PutAsTask(dir, &s)
} else {
- err = fs.PutDirectly(c, dir, stream, true)
+ ss, err := stream.NewSeekableStream(s, nil)
+ if err != nil {
+ common.ErrorResp(c, err, 500)
+ return
+ }
+ err = fs.PutDirectly(c, dir, ss, true)
}
if err != nil {
common.ErrorResp(c, err, 500)
return
}
- common.SuccessResp(c)
+ if t == nil {
+ common.SuccessResp(c)
+ return
+ }
+ common.SuccessResp(c, gin.H{
+ "task": getTaskInfo(t),
+ })
}
diff --git a/server/handles/helper.go b/server/handles/helper.go
old mode 100644
new mode 100755
index eb68b33a..3f867fee
--- a/server/handles/helper.go
+++ b/server/handles/helper.go
@@ -22,13 +22,13 @@ func Robots(c *gin.Context) {
}
func QrCode(c *gin.Context) {
- url := c.Query("url")
- png, err := qrcode.Encode(url, qrcode.Medium, 256)
- if err != nil {
- common.ErrorResp(c, err, 500, true)
- } else {
- c.Data(200, "image/png", png)
- }
+ url := c.Query("url")
+ png, err := qrcode.Encode(url, qrcode.Medium, 256)
+ if err != nil {
+ common.ErrorResp(c, err, 500, true)
+ } else {
+ c.Data(200, "image/png", png)
+ }
}
func Plist(c *gin.Context) {
@@ -56,6 +56,8 @@ func Plist(c *gin.Context) {
}
fullName := c.Param("name")
Url := link.String()
+ Url = strings.ReplaceAll(Url, "<", "[")
+ Url = strings.ReplaceAll(Url, ">", "]")
nameEncode := linkNameSplit[1]
fullName, err = url.PathUnescape(nameEncode)
if err != nil {
diff --git a/server/handles/index.go b/server/handles/index.go
old mode 100644
new mode 100755
diff --git a/server/handles/meta.go b/server/handles/meta.go
old mode 100644
new mode 100755
diff --git a/server/handles/offline_download.go b/server/handles/offline_download.go
new file mode 100755
index 00000000..0b019e9e
--- /dev/null
+++ b/server/handles/offline_download.go
@@ -0,0 +1,118 @@
+package handles
+
+import (
+ "github.com/alist-org/alist/v3/internal/conf"
+ "github.com/alist-org/alist/v3/internal/model"
+ "github.com/alist-org/alist/v3/internal/offline_download/tool"
+ "github.com/alist-org/alist/v3/internal/op"
+ "github.com/alist-org/alist/v3/server/common"
+ "github.com/gin-gonic/gin"
+ "github.com/xhofe/tache"
+)
+
+type SetAria2Req struct {
+ Uri string `json:"uri" form:"uri"`
+ Secret string `json:"secret" form:"secret"`
+}
+
+func SetAria2(c *gin.Context) {
+ var req SetAria2Req
+ if err := c.ShouldBind(&req); err != nil {
+ common.ErrorResp(c, err, 400)
+ return
+ }
+ items := []model.SettingItem{
+ {Key: conf.Aria2Uri, Value: req.Uri, Type: conf.TypeString, Group: model.OFFLINE_DOWNLOAD, Flag: model.PRIVATE},
+ {Key: conf.Aria2Secret, Value: req.Secret, Type: conf.TypeString, Group: model.OFFLINE_DOWNLOAD, Flag: model.PRIVATE},
+ }
+ if err := op.SaveSettingItems(items); err != nil {
+ common.ErrorResp(c, err, 500)
+ return
+ }
+ _tool, err := tool.Tools.Get("aria2")
+ version, err := _tool.Init()
+ if err != nil {
+ common.ErrorResp(c, err, 500)
+ return
+ }
+ common.SuccessResp(c, version)
+}
+
+type SetQbittorrentReq struct {
+ Url string `json:"url" form:"url"`
+ Seedtime string `json:"seedtime" form:"seedtime"`
+}
+
+func SetQbittorrent(c *gin.Context) {
+ var req SetQbittorrentReq
+ if err := c.ShouldBind(&req); err != nil {
+ common.ErrorResp(c, err, 400)
+ return
+ }
+ items := []model.SettingItem{
+ {Key: conf.QbittorrentUrl, Value: req.Url, Type: conf.TypeString, Group: model.OFFLINE_DOWNLOAD, Flag: model.PRIVATE},
+ {Key: conf.QbittorrentSeedtime, Value: req.Seedtime, Type: conf.TypeNumber, Group: model.OFFLINE_DOWNLOAD, Flag: model.PRIVATE},
+ }
+ if err := op.SaveSettingItems(items); err != nil {
+ common.ErrorResp(c, err, 500)
+ return
+ }
+ _tool, err := tool.Tools.Get("qBittorrent")
+ if err != nil {
+ common.ErrorResp(c, err, 500)
+ return
+ }
+ if _, err := _tool.Init(); err != nil {
+ common.ErrorResp(c, err, 500)
+ return
+ }
+ common.SuccessResp(c, "ok")
+}
+
+func OfflineDownloadTools(c *gin.Context) {
+ tools := tool.Tools.Names()
+ common.SuccessResp(c, tools)
+}
+
+type AddOfflineDownloadReq struct {
+ Urls []string `json:"urls"`
+ Path string `json:"path"`
+ Tool string `json:"tool"`
+ DeletePolicy string `json:"delete_policy"`
+}
+
+func AddOfflineDownload(c *gin.Context) {
+ user := c.MustGet("user").(*model.User)
+ if !user.CanAddOfflineDownloadTasks() {
+ common.ErrorStrResp(c, "permission denied", 403)
+ return
+ }
+
+ var req AddOfflineDownloadReq
+ if err := c.ShouldBind(&req); err != nil {
+ common.ErrorResp(c, err, 400)
+ return
+ }
+ reqPath, err := user.JoinPath(req.Path)
+ if err != nil {
+ common.ErrorResp(c, err, 403)
+ return
+ }
+ var tasks []tache.TaskWithInfo
+ for _, url := range req.Urls {
+ t, err := tool.AddURL(c, &tool.AddURLArgs{
+ URL: url,
+ DstDirPath: reqPath,
+ Tool: req.Tool,
+ DeletePolicy: tool.DeletePolicy(req.DeletePolicy),
+ })
+ if err != nil {
+ common.ErrorResp(c, err, 500)
+ return
+ }
+ tasks = append(tasks, t)
+ }
+ common.SuccessResp(c, gin.H{
+ "tasks": getTaskInfos(tasks),
+ })
+}
diff --git a/server/handles/qbittorrent.go b/server/handles/qbittorrent.go
deleted file mode 100644
index b2280454..00000000
--- a/server/handles/qbittorrent.go
+++ /dev/null
@@ -1,79 +0,0 @@
-package handles
-
-import (
- "github.com/alist-org/alist/v3/internal/conf"
- "github.com/alist-org/alist/v3/internal/model"
- "github.com/alist-org/alist/v3/internal/op"
- "github.com/alist-org/alist/v3/internal/qbittorrent"
- "github.com/alist-org/alist/v3/server/common"
- "github.com/gin-gonic/gin"
-)
-
-type SetQbittorrentReq struct {
- Url string `json:"url" form:"url"`
- Seedtime string `json:"seedtime" form:"seedtime"`
-}
-
-func SetQbittorrent(c *gin.Context) {
- var req SetQbittorrentReq
- if err := c.ShouldBind(&req); err != nil {
- common.ErrorResp(c, err, 400)
- return
- }
- items := []model.SettingItem{
- {Key: conf.QbittorrentUrl, Value: req.Url, Type: conf.TypeString, Group: model.SINGLE, Flag: model.PRIVATE},
- {Key: conf.QbittorrentSeedtime, Value: req.Seedtime, Type: conf.TypeNumber, Group: model.SINGLE, Flag: model.PRIVATE},
- }
- if err := op.SaveSettingItems(items); err != nil {
- common.ErrorResp(c, err, 500)
- return
- }
- if err := qbittorrent.InitClient(); err != nil {
- common.ErrorResp(c, err, 500)
- return
- }
- common.SuccessResp(c, "ok")
-}
-
-type AddQbittorrentReq struct {
- Urls []string `json:"urls"`
- Path string `json:"path"`
-}
-
-func AddQbittorrent(c *gin.Context) {
- user := c.MustGet("user").(*model.User)
- if !user.CanAddQbittorrentTasks() {
- common.ErrorStrResp(c, "permission denied", 403)
- return
- }
- if !qbittorrent.IsQbittorrentReady() {
- // try to init client
- err := qbittorrent.InitClient()
- if err != nil {
- common.ErrorResp(c, err, 500)
- return
- }
- if !qbittorrent.IsQbittorrentReady() {
- common.ErrorStrResp(c, "qbittorrent still not ready after init", 500)
- return
- }
- }
- var req AddQbittorrentReq
- if err := c.ShouldBind(&req); err != nil {
- common.ErrorResp(c, err, 400)
- return
- }
- reqPath, err := user.JoinPath(req.Path)
- if err != nil {
- common.ErrorResp(c, err, 403)
- return
- }
- for _, url := range req.Urls {
- err := qbittorrent.AddURL(c, url, reqPath)
- if err != nil {
- common.ErrorResp(c, err, 500)
- return
- }
- }
- common.SuccessResp(c)
-}
diff --git a/server/handles/search.go b/server/handles/search.go
old mode 100644
new mode 100755
diff --git a/server/handles/setting.go b/server/handles/setting.go
old mode 100644
new mode 100755
diff --git a/server/handles/ssologin.go b/server/handles/ssologin.go
old mode 100644
new mode 100755
index f7e85807..52486b97
--- a/server/handles/ssologin.go
+++ b/server/handles/ssologin.go
@@ -2,10 +2,12 @@ package handles
import (
"encoding/base32"
+ "encoding/base64"
"errors"
"fmt"
"net/http"
"net/url"
+ "path"
"strings"
"time"
@@ -36,71 +38,85 @@ var opts = totp.ValidateOpts{
func SSOLoginRedirect(c *gin.Context) {
method := c.Query("method")
+ usecompatibility := setting.GetBool(conf.SSOCompatibilityMode)
enabled := setting.GetBool(conf.SSOLoginEnabled)
clientId := setting.GetStr(conf.SSOClientId)
platform := setting.GetStr(conf.SSOLoginPlatform)
var r_url string
var redirect_uri string
- if enabled {
- urlValues := url.Values{}
- if method == "" {
- common.ErrorStrResp(c, "no method provided", 400)
- return
- }
+ if !enabled {
+ common.ErrorStrResp(c, "Single sign-on is not enabled", 403)
+ return
+ }
+ urlValues := url.Values{}
+ if method == "" {
+ common.ErrorStrResp(c, "no method provided", 400)
+ return
+ }
+ if usecompatibility {
+ redirect_uri = common.GetApiUrl(c.Request) + "/api/auth/" + method
+ } else {
redirect_uri = common.GetApiUrl(c.Request) + "/api/auth/sso_callback" + "?method=" + method
+ }
+ urlValues.Add("response_type", "code")
+ urlValues.Add("redirect_uri", redirect_uri)
+ urlValues.Add("client_id", clientId)
+ switch platform {
+ case "Github":
+ r_url = "https://github.com/login/oauth/authorize?"
+ urlValues.Add("scope", "read:user")
+ case "Microsoft":
+ r_url = "https://login.microsoftonline.com/common/oauth2/v2.0/authorize?"
+ urlValues.Add("scope", "user.read")
+ urlValues.Add("response_mode", "query")
+ case "Google":
+ r_url = "https://accounts.google.com/o/oauth2/v2/auth?"
+ urlValues.Add("scope", "https://www.googleapis.com/auth/userinfo.profile")
+ case "Dingtalk":
+ r_url = "https://login.dingtalk.com/oauth2/auth?"
+ urlValues.Add("scope", "openid")
+ urlValues.Add("prompt", "consent")
urlValues.Add("response_type", "code")
- urlValues.Add("redirect_uri", redirect_uri)
- urlValues.Add("client_id", clientId)
- switch platform {
- case "Github":
- r_url = "https://github.com/login/oauth/authorize?"
- urlValues.Add("scope", "read:user")
- case "Microsoft":
- r_url = "https://login.microsoftonline.com/common/oauth2/v2.0/authorize?"
- urlValues.Add("scope", "user.read")
- urlValues.Add("response_mode", "query")
- case "Google":
- r_url = "https://accounts.google.com/o/oauth2/v2/auth?"
- urlValues.Add("scope", "https://www.googleapis.com/auth/userinfo.profile")
- case "Dingtalk":
- r_url = "https://login.dingtalk.com/oauth2/auth?"
- urlValues.Add("scope", "openid")
- urlValues.Add("prompt", "consent")
- urlValues.Add("response_type", "code")
- case "Casdoor":
- endpoint := strings.TrimSuffix(setting.GetStr(conf.SSOEndpointName), "/")
- r_url = endpoint + "/login/oauth/authorize?"
- urlValues.Add("scope", "profile")
- urlValues.Add("state", endpoint)
- case "OIDC":
- oauth2Config, err := GetOIDCClient(c)
- if err != nil {
- common.ErrorStrResp(c, err.Error(), 400)
- return
- }
- // generate state parameter
- state, err := totp.GenerateCodeCustom(base32.StdEncoding.EncodeToString([]byte(oauth2Config.ClientSecret)), time.Now(), opts)
- if err != nil {
- common.ErrorStrResp(c, err.Error(), 400)
- return
- }
- c.Redirect(http.StatusFound, oauth2Config.AuthCodeURL(state))
+ case "Casdoor":
+ endpoint := strings.TrimSuffix(setting.GetStr(conf.SSOEndpointName), "/")
+ r_url = endpoint + "/login/oauth/authorize?"
+ urlValues.Add("scope", "profile")
+ urlValues.Add("state", endpoint)
+ case "OIDC":
+ oauth2Config, err := GetOIDCClient(c)
+ if err != nil {
+ common.ErrorStrResp(c, err.Error(), 400)
return
- default:
- common.ErrorStrResp(c, "invalid platform", 400)
+ }
+ // generate state parameter
+ state, err := totp.GenerateCodeCustom(base32.StdEncoding.EncodeToString([]byte(oauth2Config.ClientSecret)), time.Now(), opts)
+ if err != nil {
+ common.ErrorStrResp(c, err.Error(), 400)
return
}
- c.Redirect(302, r_url+urlValues.Encode())
- } else {
- common.ErrorStrResp(c, "Single sign-on is not enabled", 403)
+ c.Redirect(http.StatusFound, oauth2Config.AuthCodeURL(state))
+ return
+ default:
+ common.ErrorStrResp(c, "invalid platform", 400)
+ return
}
+ c.Redirect(302, r_url+urlValues.Encode())
}
var ssoClient = resty.New().SetRetryCount(3)
func GetOIDCClient(c *gin.Context) (*oauth2.Config, error) {
+ var redirect_uri string
+ usecompatibility := setting.GetBool(conf.SSOCompatibilityMode)
argument := c.Query("method")
- redirect_uri := common.GetApiUrl(c.Request) + "/api/auth/sso_callback" + "?method=" + argument
+ if usecompatibility {
+ argument = path.Base(c.Request.URL.Path)
+ }
+ if usecompatibility {
+ redirect_uri = common.GetApiUrl(c.Request) + "/api/auth/" + argument
+ } else {
+ redirect_uri = common.GetApiUrl(c.Request) + "/api/auth/sso_callback" + "?method=" + argument
+ }
endpoint := setting.GetStr(conf.SSOEndpointName)
provider, err := oidc.NewProvider(c, endpoint)
if err != nil {
@@ -151,8 +167,24 @@ func autoRegister(username, userID string, err error) (*model.User, error) {
return user, nil
}
+func parseJWT(p string) ([]byte, error) {
+ parts := strings.Split(p, ".")
+ if len(parts) < 2 {
+ return nil, fmt.Errorf("oidc: malformed jwt, expected 3 parts got %d", len(parts))
+ }
+ payload, err := base64.RawURLEncoding.DecodeString(parts[1])
+ if err != nil {
+ return nil, fmt.Errorf("oidc: malformed jwt payload: %v", err)
+ }
+ return payload, nil
+}
+
func OIDCLoginCallback(c *gin.Context) {
+ useCompatibility := setting.GetBool(conf.SSOCompatibilityMode)
argument := c.Query("method")
+ if useCompatibility {
+ argument = path.Base(c.Request.URL.Path)
+ }
clientId := setting.GetStr(conf.SSOClientId)
endpoint := setting.GetStr(conf.SSOEndpointName)
provider, err := oidc.NewProvider(c, endpoint)
@@ -189,21 +221,26 @@ func OIDCLoginCallback(c *gin.Context) {
verifier := provider.Verifier(&oidc.Config{
ClientID: clientId,
})
- idToken, err := verifier.Verify(c, rawIDToken)
+ _, err = verifier.Verify(c, rawIDToken)
if err != nil {
common.ErrorResp(c, err, 400)
return
}
- type UserInfo struct {
- Name string `json:"name"`
- }
- claims := UserInfo{}
- if err := idToken.Claims(&claims); err != nil {
+ payload, err := parseJWT(rawIDToken)
+ if err != nil {
common.ErrorResp(c, err, 400)
return
}
- UserID := claims.Name
+ userID := utils.Json.Get(payload, conf.SSOOIDCUsernameKey).ToString()
+ if userID == "" {
+ common.ErrorStrResp(c, "cannot get username from OIDC provider", 400)
+ return
+ }
if argument == "get_sso_id" {
+ if useCompatibility {
+ c.Redirect(302, common.GetApiUrl(c.Request)+"/@manage?sso_id="+userID)
+ return
+ }
html := fmt.Sprintf(`
@@ -211,22 +248,26 @@ func OIDCLoginCallback(c *gin.Context) {
window.opener.postMessage({"sso_id": "%s"}, "*")
window.close()
- `, UserID)
+
@@ -242,12 +283,18 @@ func OIDCLoginCallback(c *gin.Context) {
func SSOLoginCallback(c *gin.Context) {
enabled := setting.GetBool(conf.SSOLoginEnabled)
+ usecompatibility := setting.GetBool(conf.SSOCompatibilityMode)
if !enabled {
common.ErrorResp(c, errors.New("sso login is disabled"), 500)
+ return
}
argument := c.Query("method")
+ if usecompatibility {
+ argument = path.Base(c.Request.URL.Path)
+ }
if !utils.SliceContains([]string{"get_sso_id", "sso_get_token"}, argument) {
common.ErrorResp(c, errors.New("invalid request"), 500)
+ return
}
clientId := setting.GetStr(conf.SSOClientId)
platform := setting.GetStr(conf.SSOLoginPlatform)
@@ -317,12 +364,18 @@ func SSOLoginCallback(c *gin.Context) {
}).
Post(tokenUrl)
} else {
+ var redirect_uri string
+ if usecompatibility {
+ redirect_uri = common.GetApiUrl(c.Request) + "/api/auth/" + argument
+ } else {
+ redirect_uri = common.GetApiUrl(c.Request) + "/api/auth/sso_callback" + "?method=" + argument
+ }
resp, err = ssoClient.R().SetHeader("Accept", "application/json").
SetFormData(map[string]string{
"client_id": clientId,
"client_secret": clientSecret,
"code": callbackCode,
- "redirect_uri": common.GetApiUrl(c.Request) + "/api/auth/sso_callback?method=" + argument,
+ "redirect_uri": redirect_uri,
"scope": scope,
}).SetFormData(additionalForm).Post(tokenUrl)
}
@@ -349,6 +402,10 @@ func SSOLoginCallback(c *gin.Context) {
return
}
if argument == "get_sso_id" {
+ if usecompatibility {
+ c.Redirect(302, common.GetApiUrl(c.Request)+"/@manage?sso_id="+userID)
+ return
+ }
html := fmt.Sprintf(`
@@ -369,10 +426,14 @@ func SSOLoginCallback(c *gin.Context) {
return
}
}
- token, err := common.GenerateToken(user.Username)
+ token, err := common.GenerateToken(user)
if err != nil {
common.ErrorResp(c, err, 400)
}
+ if usecompatibility {
+ c.Redirect(302, common.GetApiUrl(c.Request)+"/@login?token="+token)
+ return
+ }
html := fmt.Sprintf(`
diff --git a/server/handles/storage.go b/server/handles/storage.go
old mode 100644
new mode 100755
diff --git a/server/handles/task.go b/server/handles/task.go
old mode 100644
new mode 100755
index d76bb586..9c9486b9
--- a/server/handles/task.go
+++ b/server/handles/task.go
@@ -1,125 +1,91 @@
package handles
import (
- "strconv"
-
- "github.com/alist-org/alist/v3/internal/aria2"
"github.com/alist-org/alist/v3/internal/fs"
- "github.com/alist-org/alist/v3/internal/qbittorrent"
- "github.com/alist-org/alist/v3/pkg/task"
+ "github.com/alist-org/alist/v3/internal/offline_download/tool"
+ "github.com/alist-org/alist/v3/pkg/utils"
"github.com/alist-org/alist/v3/server/common"
"github.com/gin-gonic/gin"
+ "github.com/xhofe/tache"
)
type TaskInfo struct {
- ID string `json:"id"`
- Name string `json:"name"`
- State string `json:"state"`
- Status string `json:"status"`
- Progress int `json:"progress"`
- Error string `json:"error"`
-}
-
-type K2Str[K comparable] func(k K) string
-
-func uint64K2Str(k uint64) string {
- return strconv.FormatUint(k, 10)
+ ID string `json:"id"`
+ Name string `json:"name"`
+ State tache.State `json:"state"`
+ Status string `json:"status"`
+ Progress float64 `json:"progress"`
+ Error string `json:"error"`
}
-func strK2Str(str string) string {
- return str
-}
-
-func getTaskInfo[K comparable](task *task.Task[K], k2Str K2Str[K]) TaskInfo {
+func getTaskInfo[T tache.TaskWithInfo](task T) TaskInfo {
+ errMsg := ""
+ if task.GetErr() != nil {
+ errMsg = task.GetErr().Error()
+ }
return TaskInfo{
- ID: k2Str(task.ID),
- Name: task.Name,
+ ID: task.GetID(),
+ Name: task.GetName(),
State: task.GetState(),
Status: task.GetStatus(),
Progress: task.GetProgress(),
- Error: task.GetErrMsg(),
+ Error: errMsg,
}
}
-func getTaskInfos[K comparable](tasks []*task.Task[K], k2Str K2Str[K]) []TaskInfo {
- var infos []TaskInfo
- for _, t := range tasks {
- infos = append(infos, getTaskInfo(t, k2Str))
- }
- return infos
+func getTaskInfos[T tache.TaskWithInfo](tasks []T) []TaskInfo {
+ return utils.MustSliceConvert(tasks, getTaskInfo[T])
}
-type Str2K[K comparable] func(str string) (K, error)
-
-func str2Uint64K(str string) (uint64, error) {
- return strconv.ParseUint(str, 10, 64)
-}
-
-func str2StrK(str string) (string, error) {
- return str, nil
-}
-
-func taskRoute[K comparable](g *gin.RouterGroup, manager *task.Manager[K], k2Str K2Str[K], str2K Str2K[K]) {
+func taskRoute[T tache.TaskWithInfo](g *gin.RouterGroup, manager *tache.Manager[T]) {
g.GET("/undone", func(c *gin.Context) {
- common.SuccessResp(c, getTaskInfos(manager.ListUndone(), k2Str))
+ common.SuccessResp(c, getTaskInfos(manager.GetByState(tache.StatePending, tache.StateRunning,
+ tache.StateCanceling, tache.StateErrored, tache.StateFailing, tache.StateWaitingRetry, tache.StateBeforeRetry)))
})
g.GET("/done", func(c *gin.Context) {
- common.SuccessResp(c, getTaskInfos(manager.ListDone(), k2Str))
+ common.SuccessResp(c, getTaskInfos(manager.GetByState(tache.StateCanceled, tache.StateFailed, tache.StateSucceeded)))
})
- g.POST("/cancel", func(c *gin.Context) {
+ g.POST("/info", func(c *gin.Context) {
tid := c.Query("tid")
- id, err := str2K(tid)
- if err != nil {
- common.ErrorResp(c, err, 400)
+ task, ok := manager.GetByID(tid)
+ if !ok {
+ common.ErrorStrResp(c, "task not found", 404)
return
}
- if err := manager.Cancel(id); err != nil {
- common.ErrorResp(c, err, 500)
- } else {
- common.SuccessResp(c)
- }
+ common.SuccessResp(c, getTaskInfo(task))
+ })
+ g.POST("/cancel", func(c *gin.Context) {
+ tid := c.Query("tid")
+ manager.Cancel(tid)
+ common.SuccessResp(c)
})
g.POST("/delete", func(c *gin.Context) {
tid := c.Query("tid")
- id, err := str2K(tid)
- if err != nil {
- common.ErrorResp(c, err, 400)
- return
- }
- if err := manager.Remove(id); err != nil {
- common.ErrorResp(c, err, 500)
- } else {
- common.SuccessResp(c)
- }
+ manager.Remove(tid)
+ common.SuccessResp(c)
})
g.POST("/retry", func(c *gin.Context) {
tid := c.Query("tid")
- id, err := str2K(tid)
- if err != nil {
- common.ErrorResp(c, err, 400)
- return
- }
- if err := manager.Retry(id); err != nil {
- common.ErrorResp(c, err, 500)
- } else {
- common.SuccessResp(c)
- }
+ manager.Retry(tid)
+ common.SuccessResp(c)
})
g.POST("/clear_done", func(c *gin.Context) {
- manager.ClearDone()
+ manager.RemoveByState(tache.StateCanceled, tache.StateFailed, tache.StateSucceeded)
common.SuccessResp(c)
})
g.POST("/clear_succeeded", func(c *gin.Context) {
- manager.ClearSucceeded()
+ manager.RemoveByState(tache.StateSucceeded)
+ common.SuccessResp(c)
+ })
+ g.POST("/retry_failed", func(c *gin.Context) {
+ manager.RetryAllFailed()
common.SuccessResp(c)
})
}
func SetupTaskRoute(g *gin.RouterGroup) {
- taskRoute(g.Group("/aria2_down"), aria2.DownTaskManager, strK2Str, str2StrK)
- taskRoute(g.Group("/aria2_transfer"), aria2.TransferTaskManager, uint64K2Str, str2Uint64K)
- taskRoute(g.Group("/upload"), fs.UploadTaskManager, uint64K2Str, str2Uint64K)
- taskRoute(g.Group("/copy"), fs.CopyTaskManager, uint64K2Str, str2Uint64K)
- taskRoute(g.Group("/qbit_down"), qbittorrent.DownTaskManager, strK2Str, str2StrK)
- taskRoute(g.Group("/qbit_transfer"), qbittorrent.TransferTaskManager, uint64K2Str, str2Uint64K)
+ taskRoute(g.Group("/upload"), fs.UploadTaskManager)
+ taskRoute(g.Group("/copy"), fs.CopyTaskManager)
+ taskRoute(g.Group("/offline_download"), tool.DownloadTaskManager)
+ taskRoute(g.Group("/offline_download_transfer"), tool.TransferTaskManager)
}
diff --git a/server/handles/token.go b/server/handles/token.go
old mode 100644
new mode 100755
diff --git a/server/handles/user.go b/server/handles/user.go
old mode 100644
new mode 100755
diff --git a/server/handles/webauthn.go b/server/handles/webauthn.go
new file mode 100755
index 00000000..28a89522
--- /dev/null
+++ b/server/handles/webauthn.go
@@ -0,0 +1,217 @@
+package handles
+
+import (
+ "encoding/base64"
+ "encoding/json"
+ "fmt"
+
+ "github.com/alist-org/alist/v3/internal/authn"
+ "github.com/alist-org/alist/v3/internal/conf"
+ "github.com/alist-org/alist/v3/internal/db"
+ "github.com/alist-org/alist/v3/internal/model"
+ "github.com/alist-org/alist/v3/internal/op"
+ "github.com/alist-org/alist/v3/internal/setting"
+ "github.com/alist-org/alist/v3/server/common"
+ "github.com/gin-gonic/gin"
+ "github.com/go-webauthn/webauthn/webauthn"
+)
+
+func BeginAuthnLogin(c *gin.Context) {
+ enabled := setting.GetBool(conf.WebauthnLoginEnabled)
+ if !enabled {
+ common.ErrorStrResp(c, "WebAuthn is not enabled", 403)
+ return
+ }
+ username := c.Query("username")
+ if username == "" {
+ common.ErrorStrResp(c, "empty or no username provided", 400)
+ return
+ }
+ user, err := db.GetUserByName(username)
+ if err != nil {
+ common.ErrorResp(c, err, 400)
+ return
+ }
+ authnInstance, err := authn.NewAuthnInstance(c.Request)
+ if err != nil {
+ common.ErrorResp(c, err, 400)
+ return
+ }
+
+ options, sessionData, err := authnInstance.BeginLogin(user)
+
+ if err != nil {
+ common.ErrorResp(c, err, 400)
+ return
+ }
+ val, err := json.Marshal(sessionData)
+ if err != nil {
+ common.ErrorResp(c, err, 400)
+ return
+ }
+ common.SuccessResp(c, gin.H{
+ "options": options,
+ "session": val,
+ })
+}
+
+func FinishAuthnLogin(c *gin.Context) {
+ enabled := setting.GetBool(conf.WebauthnLoginEnabled)
+ if !enabled {
+ common.ErrorStrResp(c, "WebAuthn is not enabled", 403)
+ return
+ }
+ username := c.Query("username")
+ user, err := db.GetUserByName(username)
+ if err != nil {
+ common.ErrorResp(c, err, 400)
+ return
+ }
+
+ sessionDataString := c.GetHeader("session")
+
+ authnInstance, err := authn.NewAuthnInstance(c.Request)
+ if err != nil {
+ common.ErrorResp(c, err, 400)
+ return
+ }
+ sessionDataBytes, err := base64.StdEncoding.DecodeString(sessionDataString)
+ if err != nil {
+ common.ErrorResp(c, err, 400)
+ return
+ }
+
+ var sessionData webauthn.SessionData
+ if err := json.Unmarshal(sessionDataBytes, &sessionData); err != nil {
+ common.ErrorResp(c, err, 400)
+ return
+ }
+
+ _, err = authnInstance.FinishLogin(user, sessionData, c.Request)
+
+ if err != nil {
+ common.ErrorResp(c, err, 400)
+ return
+ }
+
+ token, err := common.GenerateToken(user)
+ if err != nil {
+ common.ErrorResp(c, err, 400, true)
+ return
+ }
+ common.SuccessResp(c, gin.H{"token": token})
+}
+
+func BeginAuthnRegistration(c *gin.Context) {
+ enabled := setting.GetBool(conf.WebauthnLoginEnabled)
+ if !enabled {
+ common.ErrorStrResp(c, "WebAuthn is not enabled", 403)
+ return
+ }
+ user := c.MustGet("user").(*model.User)
+
+ authnInstance, err := authn.NewAuthnInstance(c.Request)
+ if err != nil {
+ common.ErrorResp(c, err, 400)
+ }
+
+ options, sessionData, err := authnInstance.BeginRegistration(user)
+
+ if err != nil {
+ common.ErrorResp(c, err, 400)
+ }
+
+ val, err := json.Marshal(sessionData)
+ if err != nil {
+ common.ErrorResp(c, err, 400)
+ }
+
+ common.SuccessResp(c, gin.H{
+ "options": options,
+ "session": val,
+ })
+}
+
+func FinishAuthnRegistration(c *gin.Context) {
+ enabled := setting.GetBool(conf.WebauthnLoginEnabled)
+ if !enabled {
+ common.ErrorStrResp(c, "WebAuthn is not enabled", 403)
+ return
+ }
+ user := c.MustGet("user").(*model.User)
+ sessionDataString := c.GetHeader("Session")
+
+ authnInstance, err := authn.NewAuthnInstance(c.Request)
+ if err != nil {
+ common.ErrorResp(c, err, 400)
+ return
+ }
+
+ sessionDataBytes, err := base64.StdEncoding.DecodeString(sessionDataString)
+ if err != nil {
+ common.ErrorResp(c, err, 400)
+ return
+ }
+
+ var sessionData webauthn.SessionData
+ if err := json.Unmarshal(sessionDataBytes, &sessionData); err != nil {
+ common.ErrorResp(c, err, 400)
+ return
+ }
+
+ credential, err := authnInstance.FinishRegistration(user, sessionData, c.Request)
+
+ if err != nil {
+ common.ErrorResp(c, err, 400)
+ return
+ }
+ err = db.RegisterAuthn(user, credential)
+ if err != nil {
+ common.ErrorResp(c, err, 400)
+ return
+ }
+ err = op.DelUserCache(user.Username)
+ if err != nil {
+ common.ErrorResp(c, err, 400)
+ return
+ }
+ common.SuccessResp(c, "Registered Successfully")
+}
+
+func DeleteAuthnLogin(c *gin.Context) {
+ user := c.MustGet("user").(*model.User)
+ type DeleteAuthnReq struct {
+ ID string `json:"id"`
+ }
+ var req DeleteAuthnReq
+ err := c.ShouldBind(&req)
+ if err != nil {
+ common.ErrorResp(c, err, 400)
+ return
+ }
+ err = db.RemoveAuthn(user, req.ID)
+ err = op.DelUserCache(user.Username)
+ if err != nil {
+ common.ErrorResp(c, err, 400)
+ return
+ }
+ common.SuccessResp(c, "Deleted Successfully")
+}
+
+func GetAuthnCredentials(c *gin.Context) {
+ type WebAuthnCredentials struct {
+ ID []byte `json:"id"`
+ FingerPrint string `json:"fingerprint"`
+ }
+ user := c.MustGet("user").(*model.User)
+ credentials := user.WebAuthnCredentials()
+ res := make([]WebAuthnCredentials, 0, len(credentials))
+ for _, v := range credentials {
+ credential := WebAuthnCredentials{
+ ID: v.ID,
+ FingerPrint: fmt.Sprintf("% X", v.Authenticator.AAGUID),
+ }
+ res = append(res, credential)
+ }
+ common.SuccessResp(c, res)
+}
diff --git a/server/middlewares/auth.go b/server/middlewares/auth.go
old mode 100644
new mode 100755
index 71077378..14f186be
--- a/server/middlewares/auth.go
+++ b/server/middlewares/auth.go
@@ -57,6 +57,66 @@ func Auth(c *gin.Context) {
c.Abort()
return
}
+ // validate password timestamp
+ if userClaims.PwdTS != user.PwdTS {
+ common.ErrorStrResp(c, "Password has been changed, login please", 401)
+ c.Abort()
+ return
+ }
+ if user.Disabled {
+ common.ErrorStrResp(c, "Current user is disabled, replace please", 401)
+ c.Abort()
+ return
+ }
+ c.Set("user", user)
+ log.Debugf("use login token: %+v", user)
+ c.Next()
+}
+
+func Authn(c *gin.Context) {
+ token := c.GetHeader("Authorization")
+ if subtle.ConstantTimeCompare([]byte(token), []byte(setting.GetStr(conf.Token))) == 1 {
+ admin, err := op.GetAdmin()
+ if err != nil {
+ common.ErrorResp(c, err, 500)
+ c.Abort()
+ return
+ }
+ c.Set("user", admin)
+ log.Debugf("use admin token: %+v", admin)
+ c.Next()
+ return
+ }
+ if token == "" {
+ guest, err := op.GetGuest()
+ if err != nil {
+ common.ErrorResp(c, err, 500)
+ c.Abort()
+ return
+ }
+ c.Set("user", guest)
+ log.Debugf("use empty token: %+v", guest)
+ c.Next()
+ return
+ }
+ userClaims, err := common.ParseToken(token)
+ if err != nil {
+ common.ErrorResp(c, err, 401)
+ c.Abort()
+ return
+ }
+ user, err := op.GetUserByName(userClaims.Username)
+ if err != nil {
+ common.ErrorResp(c, err, 401)
+ c.Abort()
+ return
+ }
+ // validate password timestamp
+ if userClaims.PwdTS != user.PwdTS {
+ common.ErrorStrResp(c, "Password has been changed, login please", 401)
+ c.Abort()
+ return
+ }
if user.Disabled {
common.ErrorStrResp(c, "Current user is disabled, replace please", 401)
c.Abort()
diff --git a/server/middlewares/check.go b/server/middlewares/check.go
old mode 100644
new mode 100755
diff --git a/server/middlewares/down.go b/server/middlewares/down.go
old mode 100644
new mode 100755
diff --git a/server/middlewares/fsup.go b/server/middlewares/fsup.go
old mode 100644
new mode 100755
diff --git a/server/middlewares/https.go b/server/middlewares/https.go
old mode 100644
new mode 100755
diff --git a/server/middlewares/limit.go b/server/middlewares/limit.go
old mode 100644
new mode 100755
diff --git a/server/middlewares/search.go b/server/middlewares/search.go
old mode 100644
new mode 100755
diff --git a/server/router.go b/server/router.go
old mode 100644
new mode 100755
index 241a96ef..35396a8f
--- a/server/router.go
+++ b/server/router.go
@@ -22,7 +22,7 @@ func Init(e *gin.Engine) {
Cors(e)
g := e.Group(conf.URL.Path)
if conf.Conf.Scheme.HttpPort != -1 && conf.Conf.Scheme.HttpsPort != -1 && conf.Conf.Scheme.ForceHttps {
- g.Use(middlewares.ForceHttps)
+ e.Use(middlewares.ForceHttps)
}
g.Any("/ping", func(c *gin.Context) {
c.String(200, "pong")
@@ -45,6 +45,7 @@ func Init(e *gin.Engine) {
api := g.Group("/api")
auth := api.Group("", middlewares.Auth)
+ webauthn := api.Group("/authn", middlewares.Authn)
api.POST("/auth/login", handles.Login)
api.POST("/auth/login/hash", handles.LoginHash)
@@ -53,18 +54,29 @@ func Init(e *gin.Engine) {
auth.POST("/auth/2fa/generate", handles.Generate2FA)
auth.POST("/auth/2fa/verify", handles.Verify2FA)
- // github auth
+ // auth
api.GET("/auth/sso", handles.SSOLoginRedirect)
api.GET("/auth/sso_callback", handles.SSOLoginCallback)
+ api.GET("/auth/get_sso_id", handles.SSOLoginCallback)
+ api.GET("/auth/sso_get_token", handles.SSOLoginCallback)
+
+ //webauthn
+ webauthn.GET("/webauthn_begin_registration", handles.BeginAuthnRegistration)
+ webauthn.POST("/webauthn_finish_registration", handles.FinishAuthnRegistration)
+ webauthn.GET("/webauthn_begin_login", handles.BeginAuthnLogin)
+ webauthn.POST("/webauthn_finish_login", handles.FinishAuthnLogin)
+ webauthn.POST("/delete_authn", handles.DeleteAuthnLogin)
+ webauthn.GET("/getcredentials", handles.GetAuthnCredentials)
// no need auth
public := api.Group("/public")
public.Any("/settings", handles.PublicSettings)
+ public.Any("/offline_download_tools", handles.OfflineDownloadTools)
_fs(auth.Group("/fs"))
admin(auth.Group("/admin", middlewares.AuthAdmin))
- if flags.Dev {
- dev(g.Group("/dev"))
+ if flags.Debug || flags.Dev {
+ debug(g.Group("/debug"))
}
static.Static(g, func(handlers ...gin.HandlerFunc) {
e.NoRoute(handlers...)
@@ -92,6 +104,7 @@ func admin(g *gin.RouterGroup) {
user.POST("/update", handles.UpdateUser)
user.POST("/cancel_2fa", handles.Cancel2FAById)
user.POST("/delete", handles.DeleteUser)
+ user.POST("/del_cache", handles.DelUserCache)
storage := g.Group("/storage")
storage.GET("/list", handles.ListStorages)
@@ -151,14 +164,16 @@ func _fs(g *gin.RouterGroup) {
g.PUT("/put", middlewares.FsUp, handles.FsStream)
g.PUT("/form", middlewares.FsUp, handles.FsForm)
g.POST("/link", middlewares.AuthAdmin, handles.Link)
- g.POST("/add_aria2", handles.AddAria2)
- g.POST("/add_qbit", handles.AddQbittorrent)
+ //g.POST("/add_aria2", handles.AddOfflineDownload)
+ //g.POST("/add_qbit", handles.AddQbittorrent)
+ g.POST("/add_offline_download", handles.AddOfflineDownload)
}
func Cors(r *gin.Engine) {
config := cors.DefaultConfig()
- config.AllowAllOrigins = true
- config.AllowHeaders = []string{"*"}
- config.AllowMethods = []string{"*"}
+ //config.AllowAllOrigins = true
+ config.AllowOrigins = conf.Conf.Cors.AllowOrigins
+ config.AllowHeaders = conf.Conf.Cors.AllowHeaders
+ config.AllowMethods = conf.Conf.Cors.AllowMethods
r.Use(cors.New(config))
}
diff --git a/server/static/config.go b/server/static/config.go
old mode 100644
new mode 100755
diff --git a/server/static/static.go b/server/static/static.go
old mode 100644
new mode 100755
index be1db2c6..8e2054af
--- a/server/static/static.go
+++ b/server/static/static.go
@@ -3,27 +3,48 @@ package static
import (
"errors"
"fmt"
+ "github.com/alist-org/alist/v3/public"
+ "io"
"io/fs"
"net/http"
- "net/http/pprof"
+ "os"
"strings"
- "github.com/alist-org/alist/v3/cmd/flags"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/setting"
"github.com/alist-org/alist/v3/pkg/utils"
- "github.com/alist-org/alist/v3/public"
"github.com/gin-gonic/gin"
)
-func InitIndex() {
- index, err := public.Public.ReadFile("dist/index.html")
+var static fs.FS = public.Public
+
+func initStatic() {
+ if conf.Conf.DistDir == "" {
+ dist, err := fs.Sub(static, "dist")
+ if err != nil {
+ utils.Log.Fatalf("failed to read dist dir")
+ }
+ static = dist
+ return
+ }
+ static = os.DirFS(conf.Conf.DistDir)
+}
+
+func initIndex() {
+ indexFile, err := static.Open("index.html")
if err != nil {
if errors.Is(err, fs.ErrNotExist) {
utils.Log.Fatalf("index.html not exist, you may forget to put dist of frontend to public/dist")
}
utils.Log.Fatalf("failed to read index.html: %v", err)
}
+ defer func() {
+ _ = indexFile.Close()
+ }()
+ index, err := io.ReadAll(indexFile)
+ if err != nil {
+ utils.Log.Fatalf("failed to read dist/index.html")
+ }
conf.RawIndexHtml = string(index)
siteConfig := getSiteConfig()
replaceMap := map[string]string{
@@ -62,7 +83,8 @@ func UpdateIndex() {
}
func Static(r *gin.RouterGroup, noRoute func(handlers ...gin.HandlerFunc)) {
- InitIndex()
+ initStatic()
+ initIndex()
folders := []string{"assets", "images", "streamer", "static"}
r.Use(func(c *gin.Context) {
for i := range folders {
@@ -72,8 +94,7 @@ func Static(r *gin.RouterGroup, noRoute func(handlers ...gin.HandlerFunc)) {
}
})
for i, folder := range folders {
- folder = "dist/" + folder
- sub, err := fs.Sub(public.Public, folder)
+ sub, err := fs.Sub(static, folder)
if err != nil {
utils.Log.Fatalf("can't find folder: %s", folder)
}
@@ -85,8 +106,6 @@ func Static(r *gin.RouterGroup, noRoute func(handlers ...gin.HandlerFunc)) {
c.Status(200)
if strings.HasPrefix(c.Request.URL.Path, "/@manage") {
_, _ = c.Writer.WriteString(conf.ManageHtml)
- } else if strings.HasPrefix(c.Request.URL.Path, "/debug/pprof") && flags.Debug {
- pprof.Index(c.Writer, c.Request)
} else {
_, _ = c.Writer.WriteString(conf.IndexHtml)
}
diff --git a/server/webdav.go b/server/webdav.go
old mode 100644
new mode 100755
index 8d5fd91a..2b5c9618
--- a/server/webdav.go
+++ b/server/webdav.go
@@ -51,6 +51,7 @@ func WebDAVAuth(c *gin.Context) {
username, password, ok := c.Request.BasicAuth()
if !ok {
bt := c.GetHeader("Authorization")
+ log.Debugf("[webdav auth] token: %s", bt)
if strings.HasPrefix(bt, "Bearer") {
bt = strings.TrimPrefix(bt, "Bearer ")
token := setting.GetStr(conf.Token)
diff --git a/server/webdav/buffered_response_writer.go b/server/webdav/buffered_response_writer.go
old mode 100644
new mode 100755
diff --git a/server/webdav/file.go b/server/webdav/file.go
old mode 100644
new mode 100755
index 2832336f..01e96f7d
--- a/server/webdav/file.go
+++ b/server/webdav/file.go
@@ -10,6 +10,7 @@ import (
"path"
"path/filepath"
+ "github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/fs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
@@ -54,7 +55,8 @@ func moveFiles(ctx context.Context, src, dst string, overwrite bool) (status int
//
// See section 9.8.5 for when various HTTP status codes apply.
func copyFiles(ctx context.Context, src, dst string, overwrite bool) (status int, err error) {
- _, err = fs.Copy(ctx, src, dst)
+ dstDir := path.Dir(dst)
+ _, err = fs.Copy(context.WithValue(ctx, conf.NoTaskKey, struct{}{}), src, dstDir)
if err != nil {
return http.StatusInternalServerError, err
}
diff --git a/server/webdav/if.go b/server/webdav/if.go
old mode 100644
new mode 100755
diff --git a/server/webdav/internal/xml/README b/server/webdav/internal/xml/README
old mode 100644
new mode 100755
diff --git a/server/webdav/internal/xml/atom_test.go b/server/webdav/internal/xml/atom_test.go
old mode 100644
new mode 100755
diff --git a/server/webdav/internal/xml/example_test.go b/server/webdav/internal/xml/example_test.go
old mode 100644
new mode 100755
diff --git a/server/webdav/internal/xml/marshal.go b/server/webdav/internal/xml/marshal.go
old mode 100644
new mode 100755
diff --git a/server/webdav/internal/xml/marshal_test.go b/server/webdav/internal/xml/marshal_test.go
old mode 100644
new mode 100755
diff --git a/server/webdav/internal/xml/read.go b/server/webdav/internal/xml/read.go
old mode 100644
new mode 100755
diff --git a/server/webdav/internal/xml/read_test.go b/server/webdav/internal/xml/read_test.go
old mode 100644
new mode 100755
diff --git a/server/webdav/internal/xml/typeinfo.go b/server/webdav/internal/xml/typeinfo.go
old mode 100644
new mode 100755
diff --git a/server/webdav/internal/xml/xml.go b/server/webdav/internal/xml/xml.go
old mode 100644
new mode 100755
diff --git a/server/webdav/internal/xml/xml_test.go b/server/webdav/internal/xml/xml_test.go
old mode 100644
new mode 100755
diff --git a/server/webdav/litmus_test_server.go b/server/webdav/litmus_test_server.go
old mode 100644
new mode 100755
diff --git a/server/webdav/lock.go b/server/webdav/lock.go
old mode 100644
new mode 100755
diff --git a/server/webdav/lock_test.go b/server/webdav/lock_test.go
old mode 100644
new mode 100755
diff --git a/server/webdav/prop.go b/server/webdav/prop.go
old mode 100644
new mode 100755
index 73f92a2f..3c3b10d8
--- a/server/webdav/prop.go
+++ b/server/webdav/prop.go
@@ -131,8 +131,8 @@ var liveProps = map[xml.Name]struct {
dir: true,
},
{Space: "DAV:", Local: "creationdate"}: {
- findFn: nil,
- dir: false,
+ findFn: findCreationDate,
+ dir: true,
},
{Space: "DAV:", Local: "getcontentlanguage"}: {
findFn: nil,
@@ -383,6 +383,9 @@ func findContentLength(ctx context.Context, ls LockSystem, name string, fi model
func findLastModified(ctx context.Context, ls LockSystem, name string, fi model.Obj) (string, error) {
return fi.ModTime().UTC().Format(http.TimeFormat), nil
}
+func findCreationDate(ctx context.Context, ls LockSystem, name string, fi model.Obj) (string, error) {
+ return fi.CreateTime().UTC().Format(http.TimeFormat), nil
+}
// ErrNotImplemented should be returned by optional interfaces if they
// want the original implementation to be used.
@@ -457,7 +460,7 @@ type ETager interface {
func findETag(ctx context.Context, ls LockSystem, name string, fi model.Obj) (string, error) {
if do, ok := fi.(ETager); ok {
etag, err := do.ETag(ctx)
- if err != ErrNotImplemented {
+ if !errors.Is(err, ErrNotImplemented) {
return etag, err
}
}
diff --git a/server/webdav/util.go b/server/webdav/util.go
new file mode 100755
index 00000000..15d9e07c
--- /dev/null
+++ b/server/webdav/util.go
@@ -0,0 +1,29 @@
+package webdav
+
+import (
+ log "github.com/sirupsen/logrus"
+ "net/http"
+ "strconv"
+ "time"
+)
+
+func (h *Handler) getModTime(r *http.Request) time.Time {
+ return h.getHeaderTime(r, "X-OC-Mtime")
+}
+
+// owncloud/ nextcloud haven't impl this, but we can add the support since rclone may support this soon
+func (h *Handler) getCreateTime(r *http.Request) time.Time {
+ return h.getHeaderTime(r, "X-OC-Ctime")
+}
+
+func (h *Handler) getHeaderTime(r *http.Request, header string) time.Time {
+ hVal := r.Header.Get(header)
+ if hVal != "" {
+ modTimeUnix, err := strconv.ParseInt(hVal, 10, 64)
+ if err == nil {
+ return time.Unix(modTimeUnix, 0)
+ }
+ log.Warnf("getModTime in Webdav, failed to parse %s, %s", header, err)
+ }
+ return time.Now()
+}
diff --git a/server/webdav/webdav.go b/server/webdav/webdav.go
old mode 100644
new mode 100755
index 09c4ea71..509a7f1c
--- a/server/webdav/webdav.go
+++ b/server/webdav/webdav.go
@@ -15,6 +15,8 @@ import (
"strings"
"time"
+ "github.com/alist-org/alist/v3/internal/stream"
+
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/fs"
"github.com/alist-org/alist/v3/internal/model"
@@ -218,20 +220,24 @@ func (h *Handler) handleGetHeadPost(w http.ResponseWriter, r *http.Request) (sta
user := ctx.Value("user").(*model.User)
reqPath, err = user.JoinPath(reqPath)
if err != nil {
- return 403, err
+ return http.StatusForbidden, err
}
fi, err := fs.Get(ctx, reqPath, &fs.GetArgs{})
if err != nil {
return http.StatusNotFound, err
}
- if fi.IsDir() {
- return http.StatusMethodNotAllowed, nil
- }
etag, err := findETag(ctx, h.LockSystem, reqPath, fi)
if err != nil {
return http.StatusInternalServerError, err
}
w.Header().Set("ETag", etag)
+ if r.Method == http.MethodHead {
+ w.Header().Set("Content-Length", fmt.Sprintf("%d", fi.GetSize()))
+ return http.StatusOK, nil
+ }
+ if fi.IsDir() {
+ return http.StatusMethodNotAllowed, nil
+ }
// Let ServeContent determine the Content-Type header.
storage, _ := fs.GetStorage(reqPath, &fs.GetStoragesArgs{})
downProxyUrl := storage.GetStorage().DownProxyUrl
@@ -321,12 +327,13 @@ func (h *Handler) handlePut(w http.ResponseWriter, r *http.Request) (status int,
obj := model.Object{
Name: path.Base(reqPath),
Size: r.ContentLength,
- Modified: time.Now(),
+ Modified: h.getModTime(r),
+ Ctime: h.getCreateTime(r),
}
- stream := &model.FileStream{
- Obj: &obj,
- ReadCloser: r.Body,
- Mimetype: r.Header.Get("Content-Type"),
+ stream := &stream.FileStream{
+ Obj: &obj,
+ Reader: r.Body,
+ Mimetype: r.Header.Get("Content-Type"),
}
if stream.Mimetype == "" {
stream.Mimetype = utils.GetMimeType(reqPath)
@@ -336,6 +343,8 @@ func (h *Handler) handlePut(w http.ResponseWriter, r *http.Request) (status int,
return http.StatusNotFound, err
}
+ _ = r.Body.Close()
+ _ = stream.Close()
// TODO(rost): Returning 405 Method Not Allowed might not be appropriate.
if err != nil {
return http.StatusMethodNotAllowed, err
@@ -373,6 +382,21 @@ func (h *Handler) handleMkcol(w http.ResponseWriter, r *http.Request) (status in
if r.ContentLength > 0 {
return http.StatusUnsupportedMediaType, nil
}
+
+ // RFC 4918 9.3.1
+ //405 (Method Not Allowed) - MKCOL can only be executed on an unmapped URL
+ if _, err := fs.Get(ctx, reqPath, &fs.GetArgs{}); err == nil {
+ return http.StatusMethodNotAllowed, err
+ }
+ // RFC 4918 9.3.1
+ // 409 (Conflict) The server MUST NOT create those intermediate collections automatically.
+ reqDir := path.Dir(reqPath)
+ if _, err := fs.Get(ctx, reqDir, &fs.GetArgs{}); err != nil {
+ if errs.IsObjectNotFound(err) {
+ return http.StatusConflict, err
+ }
+ return http.StatusMethodNotAllowed, err
+ }
if err := fs.MakeDir(ctx, reqPath); err != nil {
if os.IsNotExist(err) {
return http.StatusConflict, err
@@ -512,12 +536,12 @@ func (h *Handler) handleLock(w http.ResponseWriter, r *http.Request) (retStatus
}
}
reqPath, status, err := h.stripPrefix(r.URL.Path)
- reqPath, err = user.JoinPath(reqPath)
if err != nil {
- return 403, err
+ return status, err
}
+ reqPath, err = user.JoinPath(reqPath)
if err != nil {
- return status, err
+ return 403, err
}
ld = LockDetails{
Root: reqPath,
diff --git a/server/webdav/xml.go b/server/webdav/xml.go
old mode 100644
new mode 100755
diff --git a/server/webdav/xml_test.go b/server/webdav/xml_test.go
old mode 100644
new mode 100755
diff --git a/wrapper/zcc-arm64 b/wrapper/zcc-arm64
old mode 100644
new mode 100755
diff --git a/wrapper/zcxx-arm64 b/wrapper/zcxx-arm64
old mode 100644
new mode 100755
`, userID)
c.Data(200, "text/html; charset=utf-8", []byte(html))
return
}
if argument == "sso_get_token" {
- user, err := db.GetUserBySSOID(UserID)
+ user, err := db.GetUserBySSOID(userID)
if err != nil {
- user, err = autoRegister(UserID, UserID, err)
+ user, err = autoRegister(userID, userID, err)
if err != nil {
common.ErrorResp(c, err, 400)
}
}
- token, err := common.GenerateToken(user.Username)
+ token, err := common.GenerateToken(user)
if err != nil {
common.ErrorResp(c, err, 400)
}
+ if useCompatibility {
+ c.Redirect(302, common.GetApiUrl(c.Request)+"/@login?token="+token)
+ return
+ }
html := fmt.Sprintf(`