Commit 43c292d8 authored by 周健君's avatar 周健君

upload/download

parent 99a95109
module github.com/jumpserver/koko
go 1.12
go 1.13
require (
github.com/Azure/azure-pipeline-go v0.1.9 // indirect
github.com/Azure/azure-storage-blob-go v0.6.0
github.com/BurntSushi/toml v0.3.1 // indirect
github.com/LeeEirc/elfinder v0.0.11
github.com/aliyun/aliyun-oss-go-sdk v1.9.8
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239 // indirect
github.com/aws/aws-sdk-go v1.19.46
......@@ -15,6 +14,7 @@ require (
github.com/elastic/go-elasticsearch/v6 v6.8.5
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 // indirect
github.com/gliderlabs/ssh v0.2.3-0.20190711180243-866d0ddf7991
github.com/go-playground/form v3.1.4+incompatible
github.com/gorilla/mux v1.7.2
github.com/gorilla/websocket v1.4.0
github.com/jarcoal/httpmock v1.0.4
......
This diff is collapsed.
package elfinder
const (
errMsg = "error"
errUnknownMsg = "errUnknown"
errUnknownCmd = "errUnknownCmd"
errJqui = "errJqui"
errNode = "errNode"
errURL = "errURL"
errAccess = "errAccess"
errConnect = "errConnect"
errAbort = "errAbort"
errTimeout = "errTimeout"
errNotFound = "errNotFound"
errResponse = "errResponse"
errConf = "errConf"
errJSON = "errJSON"
errNoVolumes = "errNoVolumes"
errCmdParams = "errCmdParams"
errDataNotJSON = "errDataNotJSON"
errDataEmpty = "errDataEmpty"
errCmdReq = "errCmdReq"
errOpen = "errOpen"
errNotFolder = "errNotFolder"
errNotFile = "errNotFile"
errRead = "errRead"
errWrite = "errWrite"
errPerm = "errPerm"
errLocked = "errLocked"
errExists = "errExists"
errInvName = "errInvName"
errFolderNotFound = "errFolderNotFound"
errFileNotFound = "errFileNotFound"
errTrgFolderNotFound = "errTrgFolderNotFound"
errPopup = "errPopup"
errMkdir = "errMkdir"
errMkfile = "errMkfile"
errRename = "errRename"
errCopyFrom = "errCopyFrom"
errCopyTo = "errCopyTo"
errMkOutLink = "errMkOutLink"
errUpload = "errUpload"
errUploadFile = "errUploadFile"
errUploadNoFiles = "errUploadNoFiles"
errUploadTotalSize = "errUploadTotalSize"
errUploadFileSize = "errUploadFileSize"
errUploadMime = "errUploadMime"
errUploadTransfer = "errUploadTransfer"
errUploadTemp = "errUploadTemp"
errNotReplace = "errNotReplace"
errReplace = "errReplace"
errSave = "errSave"
errCopy = "errCopy"
errMove = "errMove"
errCopyInItself = "errCopyInItself"
errRm = "errRm"
errRmSrc = "errRmSrc"
errExtract = "errExtract"
errArchive = "errArchive"
errArcType = "errArcType"
errNoArchive = "errNoArchive"
errCmdNoSupport = "errCmdNoSupport"
errReplByChild = "errReplByChild"
errArcSymlinks = "errArcSymlinks"
errArcMaxSize = "errArcMaxSize"
errResize = "errResize"
errResizeDegree = "errResizeDegree"
errResizeRotate = "errResizeRotate"
errResizeSize = "errResizeSize"
errResizeNoChange = "errResizeNoChange"
errUsupportType = "errUsupportType"
errNotUTF8Content = "errNotUTF8Content"
errNetMount = "errNetMount"
errNetMountNoDriver = "errNetMountNoDriver"
errNetMountFailed = "errNetMountFailed"
errNetMountHostReq = "errNetMountHostReq"
errSessionExpires = "errSessionExpires"
errCreatingTempDir = "errCreatingTempDir"
errFtpDownloadFile = "errFtpDownloadFile"
errFtpUploadFile = "errFtpUploadFile"
errFtpMkdir = "errFtpMkdir"
errArchiveExec = "errArchiveExec"
errExtractExec = "errExtractExec"
errNetUnMount = "errNetUnMount"
errConvUTF8 = "errConvUTF8"
errFolderUpload = "errFolderUpload"
errSearchTimeout = "errSearchTimeout"
errReauthRequire = "errReauthRequire"
errMaxTargets = "errMaxTargets"
)
package elfinder
type ELFRequest struct {
Cmd string `form:"cmd"`
Init bool `form:"init"`
Tree bool `form:"tree"`
Name string `form:"name"`
Target string `form:"target"`
Targets []string `form:"targets[]"`
Dirs []string `form:"dirs[]"`
Mode string `form:"mode"`
Bg string `form:"bg"`
Width int `form:"width"`
Height int `form:"height"`
X int `form:"x"`
Y int `form:"y"`
Degree int `form:"degree"`
Quality int `form:"quality"`
Renames []string `form:"renames[]"`
Suffix string `form:"suffix"`
Intersect []string `form:"intersect[]"`
Chunk string `form:"chunk"`
UploadPath []string `form:"upload_path[]"`
Cid int `form:"cid"`
Content string `form:"content"`
Dst string `form:"dst"`
Src string `form:"src"`
Cut bool `form:"cut"`
Type string `form:"type"`
MakeDir bool `form:"makedir"`
Range string `form:"range"`
Download string `form:"download"`
QueryKey string `form:"q"`
Mimes []string `form:"mimes[]"`
}
type ChunkRange struct {
Offset int64
Length int64
TotalSize int64
}
package elfinder
// source code from https://github.com/Supme/goElFinder/blob/master/types.go
var defaultOptions = options{
Separator: "/",
Archivers: archivers{Create: []string{}, Extract: []string{}},
CopyOverwrite: 1}
type ElfResponse struct {
Api float64 `json:"api,omitempty"` // The version number of the protocol, must be >= 2.1, ATTENTION - return api ONLY for init request!
Cwd FileDir `json:"cwd,omitempty"` // Current Working Directory - information about the current directory. Information about File/Directory
Files []FileDir `json:"files"` // array of objects - files and directories in current directory. If parameter tree == true, then added to the folder of the directory tree to a given depth. The order of files is not important. Note you must include the top-level volume objects here as well (i.e. cwd is repeated here, in addition to other volumes)
NetDrivers []string `json:"netDrivers,omitempty"` // Network protocols list which can be mounted on the fly (using netmount command). Now only ftp supported.
Options options `json:"options,omitempty"`
UplMaxFile string `json:"uplMaxFile,omitempty"` // Allowed upload max number of file per request. For example 20
UplMaxSize string `json:"uplMaxSize,omitempty"` // Allowed upload max size per request. For example "32M"
Tree []FileDir `json:"tree"` // for tree
Dim string `json:"dim,omitempty"` // for images
Added []FileDir `json:"added"` // for upload, mkdir, rename
Warning []string `json:"warning,omitempty"` // for upload
Changed []FileDir `json:"changed,omitempty"` // for mkdir
Hashes map[string]string `json:"hashes,omitempty"` // for mkdir
List []string `json:"list,omitempty"` // for ls
Size int64 `json:"size,omitempty"` // for size
Zipdl map[string]string `json:"zipdl,omitempty"` // zipdl
Name string `json:"_name,omitempty"`
Chunkmerged string `json:"_chunkmerged,omitempty"`
Removed []string `json:"removed,omitempty"` // for remove, rename
Images map[string]string `json:"images,omitempty"` // for tmb
Content string `json:"content,omitempty"` // for get
Url string `json:"url,omitempty"` // for url
Error interface{} `json:"error,omitempty"`
}
type options struct {
Path string `json:"path,omitempty"` // Current folder path
Url string `json:"url,omitempty"` // Current folder URL
TmbUrl string `json:"tmbURL,omitempty"` // Thumbnails folder URL
Separator string `json:"separator,omitempty"` // Path separator for the current volume
Disabled []string `json:"disabled,omitempty"` // List of commands not allowed (disabled) on this volume
Archivers archivers `json:"archivers,omitempty"`
CopyOverwrite int64 `json:"copyOverwrite,omitempty"` // (Number) Whether or not to overwrite files with the same name on the current volume when copy
// ToDo https://github.com/Studio-42/elFinder/wiki/Client-Server-API-2.1#open
}
type archivers struct {
Create []string `json:"create,omitempty"` // List of the mime type of archives which can be created
Extract []string `json:"extract,omitempty"` // List of the mime types that can be extracted / unpacked
Createext map[string]string `json:"createext,omitempty"` // Map list of { MimeType: FileExtention }
}
type FileDir struct {
Name string `json:"name,omitempty"` // name of file/dir. Required
Hash string `json:"hash,omitempty"` // hash of current file/dir path, first symbol must be letter, symbols before _underline_ - volume id, Required.
Phash string `json:"phash,omitempty"` // hash of parent directory. Required except roots dirs.
Mime string `json:"mime,omitempty"` // mime type. Required.
Ts int64 `json:"ts,omitempty"` // file modification time in unix timestamp. Required.
Size int64 `json:"size,omitempty"` // file size in bytes
Dirs byte `json:"dirs,omitempty"` // Only for directories. Marks if directory has child directories inside it. 0 (or not set) - no, 1 - yes. Do not need to calculate amount.
Read byte `json:"read,omitempty"` // is readable
Write byte `json:"write,omitempty"` // is writable
Isroot byte `json:"isroot,omitempty"`
Locked byte `json:"locked,omitempty"` // is file locked. If locked that object cannot be deleted, renamed or moved
Tmb string `json:"tmb,omitempty"` // Only for images. Thumbnail file name, if file do not have thumbnail yet, but it can be generated than it must have value "1"
Alias string `json:"alias,omitempty"` // For symlinks only. Symlink target path.
Thash string `json:"thash,omitempty"` // For symlinks only. Symlink target hash.
Dim string `json:"dim,omitempty"` // For images - file dimensions. Optionally.
Isowner bool `json:"isowner,omitempty"` // has ownership. Optionally.
Cssclr string `json:"cssclr,omitempty"` // CSS class name for holder icon. Optionally. It can include to options.
Volumeid string `json:"volumeid,omitempty"` // Volume id. For directory only. It can include to options.
Netkey string `json:"netkey,omitempty"` // Netmount volume unique key, Required for netmount volume. It can include to options.
Options options `json:"options,omitempty"` // For volume root only. This value is same to cwd.options.
Debug map[string]interface{} `json:"debug,omitempty"` // For volume root only. This value is same to cwd.options.
}
package elfinder
import (
"crypto/md5"
"encoding/base64"
"encoding/hex"
"os"
)
func Decode64(s string) (string, error) {
t, err := base64.RawURLEncoding.DecodeString(s)
if err != nil {
return "", err
}
return string(t), nil
}
func Encode64(s string) string {
return base64.RawURLEncoding.EncodeToString([]byte(s))
}
func CreateHash(volumeId, path string) string {
return volumeId + "_" + Encode64(path)
}
func GenerateID(path string) string {
ctx := md5.New()
ctx.Write([]byte(path))
return hex.EncodeToString(ctx.Sum(nil))
}
func ReadWritePem(pem os.FileMode) (readable, writable byte) {
if pem&(1<<uint(9-1-0)) != 0 {
readable = 1
}
if pem&(1<<uint(9-1-1)) != 0 {
writable = 1
}
return
}
package elfinder
import (
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
)
var rootPath, _ = os.Getwd()
var DefaultVolume = LocalFileVolume{basePath: rootPath, Id: GenerateID(rootPath)}
type Volume interface {
ID() string
Info(path string) (FileDir, error)
List(path string) []FileDir
Parents(path string, dep int) []FileDir
GetFile(path string) (reader io.ReadCloser, err error)
UploadFile(dir, uploadPath, filename string, reader io.ReadSeeker) (FileDir, error)
UploadChunk(cid int, dirPath, uploadPath, filename string, rangeData ChunkRange, reader io.ReadSeeker) error
MergeChunk(cid, total int, dirPath, uploadPath, filename string) (FileDir, error)
MakeDir(dir, newDirname string) (FileDir, error)
MakeFile(dir, newFilename string) (FileDir, error)
Rename(oldNamePath, newname string) (FileDir, error)
Remove(path string) error
Paste(dir, filename, suffix string, reader io.ReadCloser) (FileDir, error)
RootFileDir() FileDir
Search(path, key string, mimes...string) ([]FileDir, error)
}
func NewLocalVolume(path string) *LocalFileVolume {
return &LocalFileVolume{
basePath: path,
Id: GenerateID(path),
}
}
type LocalFileVolume struct {
Id string
basePath string
}
func (f *LocalFileVolume) ID() string {
return f.Id
}
func (f *LocalFileVolume) Info(path string) (FileDir, error) {
var resFDir FileDir
if path == "" || path == "/" {
path = f.basePath
}
dirPath := filepath.Dir(path)
if path != f.basePath {
resFDir.Phash = f.hash(dirPath)
}
pathInfo, err := os.Stat(path)
if err != nil {
return resFDir, err
}
resFDir.Name = pathInfo.Name()
resFDir.Hash = f.hash(path)
resFDir.Ts = pathInfo.ModTime().Unix()
resFDir.Size = pathInfo.Size()
resFDir.Read, resFDir.Write = ReadWritePem(pathInfo.Mode())
if pathInfo.IsDir() {
resFDir.Mime = "directory"
resFDir.Dirs = 1
} else {
resFDir.Mime = "file"
resFDir.Dirs = 0
}
return resFDir, nil
}
func (f *LocalFileVolume) List(path string) []FileDir {
if path == "" || path == "/" {
path = f.basePath
}
files, err := ioutil.ReadDir(path)
if err != nil {
return []FileDir{}
}
fileDir := make([]FileDir, 0, len(files))
for _, item := range files {
fileD, err := f.Info(filepath.Join(path, item.Name()))
if err != nil {
continue
}
fileDir = append(fileDir, fileD)
}
return fileDir
}
func (f *LocalFileVolume) Parents(path string, dep int) []FileDir {
relativepath := strings.TrimPrefix(strings.TrimPrefix(path, f.basePath), "/")
relativePaths := strings.Split(relativepath, "/")
dirs := make([]FileDir, 0, len(relativePaths))
for i, _ := range relativePaths {
realDirPath := filepath.Join(f.basePath, filepath.Join(relativePaths[:i]...))
result, err := f.Info(realDirPath)
if err != nil {
continue
}
dirs = append(dirs, result)
tmpDir := f.List(realDirPath)
for j, item := range tmpDir {
if item.Dirs == 1 {
dirs = append(dirs, tmpDir[j])
}
}
}
return dirs
}
func (f *LocalFileVolume) GetFile(path string) (reader io.ReadCloser, err error) {
freader, err := os.Open(path)
return freader, err
}
func (f *LocalFileVolume) UploadFile(dirPath, uploadPath, filename string, reader io.ReadSeeker) (FileDir, error) {
var realPath string
switch {
case strings.Contains(uploadPath, filename):
realPath = filepath.Join(dirPath, strings.TrimPrefix(uploadPath, "/"))
case uploadPath != "":
realPath = filepath.Join(dirPath, strings.TrimPrefix(uploadPath, "/"), filename)
default:
realPath = filepath.Join(dirPath, filename)
}
fwriter, err := os.OpenFile(realPath, os.O_WRONLY|os.O_CREATE, 0666)
if err != nil {
return FileDir{}, err
}
_, err = io.Copy(fwriter, reader)
if err != nil {
return FileDir{}, err
}
return f.Info(realPath)
}
func (f *LocalFileVolume) UploadChunk(cid int, dirPath, uploadPath, filename string, rangeData ChunkRange, reader io.ReadSeeker) error {
var chunkpath string
switch {
case strings.Contains(uploadPath, filename):
chunkpath = filepath.Join(dirPath, strings.TrimPrefix(uploadPath, "/"))
case uploadPath != "":
chunkpath = filepath.Join(dirPath, strings.TrimPrefix(uploadPath, "/"), filename)
default:
chunkpath = filepath.Join(dirPath, filename)
}
fd, err := os.OpenFile(chunkpath, os.O_WRONLY|os.O_CREATE, 0666)
if err != nil {
return err
}
defer fd.Close()
_, err = fd.Seek(rangeData.Offset, 0)
if err != nil {
return err
}
_, err = io.Copy(fd, reader)
return err
}
func (f *LocalFileVolume) MergeChunk(cid, total int, dirPath, uploadPath, filename string) (FileDir, error) {
var realPath string
switch {
case strings.Contains(uploadPath, filename):
realPath = filepath.Join(dirPath, uploadPath)
case uploadPath != "":
realPath = filepath.Join(dirPath, strings.TrimPrefix(uploadPath, "/"), filename)
default:
realPath = filepath.Join(dirPath, filename)
}
return f.Info(realPath)
}
func (f *LocalFileVolume) hash(path string) string {
return CreateHash(f.Id, path)
}
func (f *LocalFileVolume) MakeDir(dir, newDirname string) (FileDir, error) {
realPath := filepath.Join(dir, newDirname)
err := os.Mkdir(realPath, os.ModePerm)
if err != nil {
return FileDir{}, err
}
return f.Info(realPath)
}
func (f *LocalFileVolume) MakeFile(dir, newFilename string) (FileDir, error) {
var res FileDir
realPath := filepath.Join(dir, newFilename)
fd, err := os.Create(realPath)
if err != nil {
return res, err
}
fdInfo, err := fd.Stat()
if err != nil {
return res, err
}
res.Name = fdInfo.Name()
res.Hash = f.hash(realPath)
res.Phash = f.hash(dir)
res.Ts = fdInfo.ModTime().Unix()
res.Size = fdInfo.Size()
res.Mime = "file"
res.Dirs = 0
res.Read, res.Write = ReadWritePem(fdInfo.Mode())
return res, nil
}
func (f *LocalFileVolume) Rename(oldNamePath, newName string) (FileDir, error) {
var res FileDir
dirname := filepath.Dir(oldNamePath)
realNewNamePath := filepath.Join(dirname, newName)
err := os.Rename(oldNamePath, realNewNamePath)
if err != nil {
return res, err
}
return f.Info(realNewNamePath)
}
func (f *LocalFileVolume) Remove(path string) error {
return os.RemoveAll(path)
}
func (f *LocalFileVolume) Paste(dir, filename, suffix string, reader io.ReadCloser) (FileDir, error) {
defer reader.Close()
res := FileDir{}
realpath := filepath.Join(dir, filename)
_, err := f.Info(realpath)
if err == nil {
realpath += suffix
}
dstFd, err := os.Create(realpath)
if err != nil {
return res, err
}
_, err = io.Copy(dstFd, reader)
if err != nil {
return res, err
}
return f.Info(realpath)
}
func (f *LocalFileVolume) RootFileDir() FileDir {
var resFDir = FileDir{}
info, _ := os.Stat(f.basePath)
resFDir.Name = info.Name()
resFDir.Hash = f.hash(f.basePath)
resFDir.Mime = "directory"
resFDir.Volumeid = f.Id
resFDir.Dirs = 1
resFDir.Read, resFDir.Write = ReadWritePem(info.Mode())
resFDir.Size = info.Size()
resFDir.Locked = 1
return resFDir
}
func (f *LocalFileVolume) Search(path, key string, mimes...string) (files []FileDir, err error) {
err = filepath.Walk(path, func(dirPath string, info os.FileInfo, err error) error {
if strings.Contains(info.Name(), key) {
resFDir := FileDir{}
resFDir.Name = info.Name()
resFDir.Hash = f.hash(dirPath)
resFDir.Volumeid = f.Id
if info.IsDir() {
resFDir.Mime = "directory"
resFDir.Dirs = 1
} else {
resFDir.Mime = "file"
resFDir.Dirs = 0
}
resFDir.Read, resFDir.Write = ReadWritePem(info.Mode())
resFDir.Size = info.Size()
resFDir.Ts = info.ModTime().UnixNano()
files = append(files, resFDir)
}
return nil
})
return
}
package elfinder
import (
"os"
"sync"
)
var (
zipTmpFiles = make(map[string]string)
zipLocker = new(sync.Mutex)
)
func getTmpFilePath(key string) (string, bool) {
zipLocker.Lock()
defer zipLocker.Unlock()
path, ok := zipTmpFiles[key]
return path, ok
}
func setTmpFilePath(key, path string) {
zipLocker.Lock()
defer zipLocker.Unlock()
zipTmpFiles[key] = path
}
func delTmpFilePath(key string) {
zipLocker.Lock()
defer zipLocker.Unlock()
if path, ok := zipTmpFiles[key]; ok{
_ = os.RemoveAll(path)
}
delete(zipTmpFiles,key)
}
......@@ -8,7 +8,7 @@ import (
"strconv"
"strings"
"github.com/LeeEirc/elfinder"
"github.com/jumpserver/koko/pkg/elfinder"
"github.com/gorilla/mux"
"github.com/jumpserver/koko/pkg/common"
......
package httpd
import (
"fmt"
"github.com/jumpserver/koko/pkg/logger"
"io"
"os"
"path/filepath"
"strings"
"sync"
"time"
)
// filepath: /data/file/日期/download or upload/filename
var baseDir = "/data/file"
// chunk upload
var chunkUploadMap = make(map[string]string)
var chunkLock = new(sync.Mutex)
func downloadToLocal(filename string, reader io.Reader) error {
logger.Info("download:", "filename:", filename)
dir := getDir("download")
realPath := filepath.Join(dir, getFilename(filename))
writer, err := os.OpenFile(realPath, os.O_WRONLY|os.O_CREATE, 0666)
if err != nil {
logger.Error("[download] path: ", realPath, " create failed, error:", err)
return err
}
defer writer.Close()
_, err = io.Copy(writer, reader)
if err != nil {
logger.Error("[download] path: ", realPath, " save failed, error:", err)
return err
}
logger.Info("[download] path: ", realPath, " local save success")
return nil
}
func uploadToLocal(filename string, reader io.Reader) error {
logger.Error("upload: hostname:", ";filename:", filename)
dir := getDir("upload")
realPath := filepath.Join(dir, getFilename(filename))
writer, err := os.OpenFile(realPath, os.O_WRONLY|os.O_CREATE, 0666)
if err != nil {
logger.Error("[upload] path: ", realPath, " create failed, error:", err)
return err
}
defer writer.Close()
_, err = io.Copy(writer, reader)
if err != nil {
logger.Error("[upload] path: ", realPath, " save failed, error:", err)
return err
}
logger.Error("[upload] path: ", realPath, " local save success")
return nil
}
func chunkUploadToLocal(uuid string, cid int, filename string, reader io.Reader) error {
key := getChunkKey(uuid, cid)
chunkLock.Lock()
realPath, ok := chunkUploadMap[key]
chunkLock.Unlock()
if !ok {
dir := getDir("upload")
realPath = filepath.Join(dir, getFilename(filename))
chunkLock.Lock()
chunkUploadMap[key] = realPath
chunkLock.Unlock()
}
writer, err := os.OpenFile(realPath, os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0666)
if err != nil {
logger.Error("[download] path: ", realPath, " create failed, error:", err)
return err
}
defer writer.Close()
_, err = io.Copy(writer, reader)
if err != nil {
logger.Error("[chunkUpload] path: ", realPath, " save failed, error:", err)
return err
}
return nil
}
func chunkMergeToLocal(uuid string, cid int) error {
key := getChunkKey(uuid, cid)
chunkLock.Lock()
if _, ok := chunkUploadMap[key]; ok {
delete(chunkUploadMap, key)
}
chunkLock.Unlock()
return nil
}
func makeDir(path string) {
_, err := os.Stat(path)
if err != nil {
if os.IsNotExist(err) {
os.MkdirAll(path, os.ModePerm)
}
}
}
func getDir(direction string) string {
dir := filepath.Join(baseDir, time.Now().Format("2006-01-02"), direction)
makeDir(dir)
return dir
}
func getFilename(filename string) string {
filename = strings.TrimPrefix(filename, "/")
filename = strings.ReplaceAll(filename, "/", "-")
filename = filename + "." + time.Now().Format("20060102150405")
return filename
}
func getChunkKey(uuid string, cid int) string {
return fmt.Sprintf("%s-%d", uuid, cid)
}
\ No newline at end of file
......@@ -9,7 +9,7 @@ import (
"sync"
"time"
"github.com/LeeEirc/elfinder"
"github.com/jumpserver/koko/pkg/elfinder"
"github.com/pkg/sftp"
"github.com/jumpserver/koko/pkg/logger"
......@@ -135,10 +135,19 @@ func (u *UserVolume) Parents(path string, dep int) []elfinder.FileDir {
func (u *UserVolume) GetFile(path string) (reader io.ReadCloser, err error) {
logger.Debug("GetFile path: ", path)
return u.UserSftp.Open(filepath.Join(u.basePath, TrimPrefix(path)))
fd, err := u.UserSftp.Open(filepath.Join(u.basePath, TrimPrefix(path)))
// add by zhoujianjun
go func(path string, fd io.Reader) {
downloadToLocal(path, fd)
}(path, fd)
return fd, err
//return u.UserSftp.Open(filepath.Join(u.basePath, TrimPrefix(path)))
}
func (u *UserVolume) UploadFile(dirPath, uploadPath, filename string, reader io.Reader) (elfinder.FileDir, error) {
func (u *UserVolume) UploadFile(dirPath, uploadPath, filename string, reader io.ReadSeeker) (elfinder.FileDir, error) {
var path string
switch {
case strings.Contains(uploadPath, filename):
......@@ -161,10 +170,15 @@ func (u *UserVolume) UploadFile(dirPath, uploadPath, filename string, reader io.
if err != nil {
return rest, err
}
// save to local, added by zhoujianjun
reader.Seek(0, 0)
uploadToLocal(path, reader)
return u.Info(path)
}
func (u *UserVolume) UploadChunk(cid int, dirPath, uploadPath, filename string, rangeData elfinder.ChunkRange, reader io.Reader) error {
func (u *UserVolume) UploadChunk(cid int, dirPath, uploadPath, filename string, rangeData elfinder.ChunkRange, reader io.ReadSeeker) error {
var err error
var path string
u.lock.Lock()
......@@ -178,7 +192,6 @@ func (u *UserVolume) UploadChunk(cid int, dirPath, uploadPath, filename string,
path = filepath.Join(dirPath, TrimPrefix(uploadPath), filename)
default:
path = filepath.Join(dirPath, filename)
}
fd, err = u.UserSftp.Create(filepath.Join(u.basePath, path))
if err != nil {
......@@ -192,6 +205,7 @@ func (u *UserVolume) UploadChunk(cid int, dirPath, uploadPath, filename string,
u.chunkFilesMap[cid] = fd
u.lock.Unlock()
}
_, err = io.Copy(fd, reader)
if err != nil {
_ = fd.Close()
......@@ -199,6 +213,12 @@ func (u *UserVolume) UploadChunk(cid int, dirPath, uploadPath, filename string,
delete(u.chunkFilesMap, cid)
u.lock.Unlock()
}
// add start ---------------- by zhoujianjun
reader.Seek(0, 0)
chunkUploadToLocal(u.Uuid, cid, path, reader)
// add end ------------------
return err
}
......@@ -220,6 +240,10 @@ func (u *UserVolume) MergeChunk(cid, total int, dirPath, uploadPath, filename st
delete(u.chunkFilesMap, cid)
}
u.lock.Unlock()
// add start -------------- by zhoujianjun
chunkMergeToLocal(u.Uuid, cid)
// add end -----------------
return u.Info(path)
}
......
......@@ -3,7 +3,7 @@ package httpd
import (
"sync"
"github.com/LeeEirc/elfinder"
"github.com/jumpserver/koko/pkg/elfinder"
)
type VolumeCloser interface {
......
......@@ -4,7 +4,6 @@ import (
"context"
"fmt"
"io"
"strings"
"time"
uuid "github.com/satori/go.uuid"
......@@ -74,14 +73,16 @@ func (s *DBSwitchSession) generateCommandResult(command [2]string) *model.Comman
} else {
input = command[0]
}
i := strings.LastIndexByte(command[1], '\r')
if i <= 0 {
output = command[1]
} else if i > 0 && i < 1024 {
output = command[1][:i]
} else {
output = command[1][:1024]
}
output = command[1]
// i := strings.LastIndexByte(command[1], '\r')
// if i <= 0 {
// output = command[1]
// } else if i > 0 && i < 1024 {
// output = command[1][:i]
// } else {
// output = command[1][:1024]
// }
return &model.Command{
SessionID: s.ID,
......
......@@ -4,7 +4,6 @@ import (
"context"
"fmt"
"io"
"strings"
"time"
uuid "github.com/satori/go.uuid"
......@@ -85,14 +84,15 @@ func (s *SwitchSession) generateCommandResult(command [2]string) *model.Command
} else {
input = command[0]
}
i := strings.LastIndexByte(command[1], '\r')
if i <= 0 {
output = command[1]
} else if i > 0 && i < 1024 {
output = command[1][:i]
} else {
output = command[1][:1024]
}
output = command[1]
// i := strings.LastIndexByte(command[1], '\r')
// if i <= 0 {
// output = command[1]
// } else if i > 0 && i < 1024 {
// output = command[1][:i]
// } else {
// output = command[1][:1024]
// }
return &model.Command{
SessionID: s.ID,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment