mirror of
https://github.com/octoleo/syncthing.git
synced 2024-11-09 23:00:58 +00:00
Add pagination to Out of sync item list (fixes #1509)
This commit is contained in:
parent
ec3f17cb9c
commit
9afbca3001
@ -111,7 +111,7 @@ func startGUI(cfg config.GUIConfiguration, assetDir string, m *model.Model) erro
|
|||||||
getRestMux.HandleFunc("/rest/db/completion", withModel(m, restGetDBCompletion)) // device folder
|
getRestMux.HandleFunc("/rest/db/completion", withModel(m, restGetDBCompletion)) // device folder
|
||||||
getRestMux.HandleFunc("/rest/db/file", withModel(m, restGetDBFile)) // folder file
|
getRestMux.HandleFunc("/rest/db/file", withModel(m, restGetDBFile)) // folder file
|
||||||
getRestMux.HandleFunc("/rest/db/ignores", withModel(m, restGetDBIgnores)) // folder
|
getRestMux.HandleFunc("/rest/db/ignores", withModel(m, restGetDBIgnores)) // folder
|
||||||
getRestMux.HandleFunc("/rest/db/need", withModel(m, restGetDBNeed)) // folder
|
getRestMux.HandleFunc("/rest/db/need", withModel(m, restGetDBNeed)) // folder [perpage] [page]
|
||||||
getRestMux.HandleFunc("/rest/db/status", withModel(m, restGetDBStatus)) // folder
|
getRestMux.HandleFunc("/rest/db/status", withModel(m, restGetDBStatus)) // folder
|
||||||
getRestMux.HandleFunc("/rest/db/browse", withModel(m, restGetDBBrowse)) // folder [prefix] [dirsonly] [levels]
|
getRestMux.HandleFunc("/rest/db/browse", withModel(m, restGetDBBrowse)) // folder [prefix] [dirsonly] [levels]
|
||||||
getRestMux.HandleFunc("/rest/events", restGetEvents) // since [limit]
|
getRestMux.HandleFunc("/rest/events", restGetEvents) // since [limit]
|
||||||
@ -133,7 +133,7 @@ func startGUI(cfg config.GUIConfiguration, assetDir string, m *model.Model) erro
|
|||||||
|
|
||||||
// The POST handlers
|
// The POST handlers
|
||||||
postRestMux := http.NewServeMux()
|
postRestMux := http.NewServeMux()
|
||||||
postRestMux.HandleFunc("/rest/db/prio", withModel(m, restPostDBPrio)) // folder file
|
postRestMux.HandleFunc("/rest/db/prio", withModel(m, restPostDBPrio)) // folder file [perpage] [page]
|
||||||
postRestMux.HandleFunc("/rest/db/ignores", withModel(m, restPostDBIgnores)) // folder
|
postRestMux.HandleFunc("/rest/db/ignores", withModel(m, restPostDBIgnores)) // folder
|
||||||
postRestMux.HandleFunc("/rest/db/override", withModel(m, restPostDBOverride)) // folder
|
postRestMux.HandleFunc("/rest/db/override", withModel(m, restPostDBOverride)) // folder
|
||||||
postRestMux.HandleFunc("/rest/db/scan", withModel(m, restPostDBScan)) // folder [sub...]
|
postRestMux.HandleFunc("/rest/db/scan", withModel(m, restPostDBScan)) // folder [sub...]
|
||||||
@ -379,15 +379,29 @@ func restPostDBOverride(m *model.Model, w http.ResponseWriter, r *http.Request)
|
|||||||
}
|
}
|
||||||
|
|
||||||
func restGetDBNeed(m *model.Model, w http.ResponseWriter, r *http.Request) {
|
func restGetDBNeed(m *model.Model, w http.ResponseWriter, r *http.Request) {
|
||||||
var qs = r.URL.Query()
|
qs := r.URL.Query()
|
||||||
var folder = qs.Get("folder")
|
|
||||||
|
folder := qs.Get("folder")
|
||||||
|
|
||||||
|
page, err := strconv.Atoi(qs.Get("page"))
|
||||||
|
if err != nil || page < 1 {
|
||||||
|
page = 1
|
||||||
|
}
|
||||||
|
perpage, err := strconv.Atoi(qs.Get("perpage"))
|
||||||
|
if err != nil || perpage < 1 {
|
||||||
|
perpage = 1 << 16
|
||||||
|
}
|
||||||
|
|
||||||
|
progress, queued, rest, total := m.NeedFolderFiles(folder, page, perpage)
|
||||||
|
|
||||||
progress, queued, rest := m.NeedFolderFiles(folder, 100)
|
|
||||||
// Convert the struct to a more loose structure, and inject the size.
|
// Convert the struct to a more loose structure, and inject the size.
|
||||||
output := map[string][]jsonDBFileInfo{
|
output := map[string]interface{}{
|
||||||
"progress": toNeedSlice(progress),
|
"progress": toNeedSlice(progress),
|
||||||
"queued": toNeedSlice(queued),
|
"queued": toNeedSlice(queued),
|
||||||
"rest": toNeedSlice(rest),
|
"rest": toNeedSlice(rest),
|
||||||
|
"total": total,
|
||||||
|
"page": page,
|
||||||
|
"perpage": perpage,
|
||||||
}
|
}
|
||||||
|
|
||||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||||
|
@ -967,10 +967,22 @@
|
|||||||
<hr/>
|
<hr/>
|
||||||
|
|
||||||
<table class="table table-striped table-condensed">
|
<table class="table table-striped table-condensed">
|
||||||
<tr ng-repeat="f in needed.progress" ng-init="a = needAction(f)">
|
|
||||||
<td class="small-data"><span class="glyphicon glyphicon-{{needIcons[a]}}"></span> {{needActions[a]}}</td>
|
<tr dir-paginate="f in needed | itemsPerPage: neededPageSize" current-page="neededCurrentPage" total-items="neededTotal">
|
||||||
<td title="{{f.name}}">{{f.name | basename}}</td>
|
<!-- Icon -->
|
||||||
<td ng-if="a == 'sync' && progress[neededFolder] && progress[neededFolder][f.name]">
|
<td class="small-data"><span class="glyphicon glyphicon-{{needIcons[f.action]}}"></span> {{needActions[f.action]}}</td>
|
||||||
|
|
||||||
|
<!-- Name -->
|
||||||
|
<td ng-if="f.type != 'queued'" title="{{f.name}}">{{f.name | basename}}</td>
|
||||||
|
<td ng-if="f.type == 'queued'">
|
||||||
|
<a href="" ng-click="bumpFile(neededFolder, f.name)" title="{{'Move to top of queue' | translate}}">
|
||||||
|
<span class="glyphicon glyphicon-eject"></span>
|
||||||
|
</a>
|
||||||
|
<span title="{{f.name}}"> {{f.name | basename}}</span>
|
||||||
|
</td>
|
||||||
|
|
||||||
|
<!-- Size/Progress -->
|
||||||
|
<td ng-if="f.type == 'progress' && f.action == 'sync' && progress[neededFolder] && progress[neededFolder][f.name]">
|
||||||
<div class="progress">
|
<div class="progress">
|
||||||
<div class="progress-bar progress-bar-success" style="width: {{progress[neededFolder][f.name].reused}}%"></div>
|
<div class="progress-bar progress-bar-success" style="width: {{progress[neededFolder][f.name].reused}}%"></div>
|
||||||
<div class="progress-bar" style="width: {{progress[neededFolder][f.name].copiedFromOrigin}}%"></div>
|
<div class="progress-bar" style="width: {{progress[neededFolder][f.name].copiedFromOrigin}}%"></div>
|
||||||
@ -982,24 +994,20 @@
|
|||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
</td>
|
</td>
|
||||||
<td class="text-right small-data" ng-if="a != 'sync' || !progress[neededFolder] || !progress[neededFolder][f.name]">
|
<td class="text-right small-data" ng-if="f.type != 'progress' || f.action != 'sync' || !progress[neededFolder] || !progress[neededFolder][f.name]">
|
||||||
<span ng-if="f.size > 0">{{f.size | binary}}B</span>
|
<span ng-if="f.size > 0">{{f.size | binary}}B</span>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
|
||||||
<tr ng-repeat="f in needed.queued" ng-init="a = needAction(f)">
|
|
||||||
<td class="small-data"><span class="glyphicon glyphicon-{{needIcons[a]}}"></span> {{needActions[a]}}</td>
|
|
||||||
<td><a href="" ng-if="$index != 0" ng-click="bumpFile(neededFolder, f.name)" title="{{'Move to top of queue' | translate}}"><span class="glyphicon glyphicon-eject"></span></a><span ng-if="$index != 0"> </span><span title="{{f.name}}">{{f.name | basename}}</span></td>
|
|
||||||
<td class="text-right small-data">
|
|
||||||
<span ng-if="f.size > 0">{{f.size | binary}}B</span>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr ng-repeat="f in needed.rest" ng-init="a = needAction(f)">
|
|
||||||
<td class="small-data"><span class="glyphicon glyphicon-{{needIcons[a]}}"></span> {{needActions[a]}}</td>
|
|
||||||
<td title="{{f.name}}">{{f.name | basename}}</td>
|
|
||||||
<td class="text-right small-data"><span ng-if="f.size > 0">{{f.size | binary}}B</span></td>
|
|
||||||
</tr>
|
</tr>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
|
<dir-pagination-controls on-page-change="neededPageChanged(newPageNumber)"></dir-pagination-controls>
|
||||||
|
<ul class="pagination pull-right">
|
||||||
|
<li ng-repeat="option in [10, 20, 30, 50, 100]" ng-class="{ active: neededPageSize == option }">
|
||||||
|
<a href="#" ng-click="neededChangePageSize(option)">{{option}}</a>
|
||||||
|
<li>
|
||||||
|
</ul>
|
||||||
|
<div class="clearfix">
|
||||||
</modal>
|
</modal>
|
||||||
|
|
||||||
<!-- About modal -->
|
<!-- About modal -->
|
||||||
@ -1087,6 +1095,7 @@
|
|||||||
<script src="vendor/angular/angular.min.js"></script>
|
<script src="vendor/angular/angular.min.js"></script>
|
||||||
<script src="vendor/angular/angular-translate.min.js"></script>
|
<script src="vendor/angular/angular-translate.min.js"></script>
|
||||||
<script src="vendor/angular/angular-translate-loader.min.js"></script>
|
<script src="vendor/angular/angular-translate-loader.min.js"></script>
|
||||||
|
<script src="vendor/angular/angular-dirPagination.js"></script>
|
||||||
<script src="vendor/jquery/jquery-2.0.3.min.js"></script>
|
<script src="vendor/jquery/jquery-2.0.3.min.js"></script>
|
||||||
<script src="vendor/bootstrap/js/bootstrap.min.js"></script>
|
<script src="vendor/bootstrap/js/bootstrap.min.js"></script>
|
||||||
<!-- / vendor scripts -->
|
<!-- / vendor scripts -->
|
||||||
|
@ -9,6 +9,7 @@
|
|||||||
/*global $: false, angular: false, console: false, validLangs: false */
|
/*global $: false, angular: false, console: false, validLangs: false */
|
||||||
|
|
||||||
var syncthing = angular.module('syncthing', [
|
var syncthing = angular.module('syncthing', [
|
||||||
|
'angularUtils.directives.dirPagination',
|
||||||
'pascalprecht.translate',
|
'pascalprecht.translate',
|
||||||
|
|
||||||
'syncthing.core'
|
'syncthing.core'
|
||||||
|
@ -40,6 +40,10 @@ angular.module('syncthing.core')
|
|||||||
$scope.folderStats = {};
|
$scope.folderStats = {};
|
||||||
$scope.progress = {};
|
$scope.progress = {};
|
||||||
$scope.version = {};
|
$scope.version = {};
|
||||||
|
$scope.needed = [];
|
||||||
|
$scope.neededTotal = 0;
|
||||||
|
$scope.neededCurrentPage = 1;
|
||||||
|
$scope.neededPageSize = 10;
|
||||||
|
|
||||||
$(window).bind('beforeunload', function () {
|
$(window).bind('beforeunload', function () {
|
||||||
navigatingAway = true;
|
navigatingAway = true;
|
||||||
@ -415,14 +419,63 @@ angular.module('syncthing.core')
|
|||||||
}
|
}
|
||||||
|
|
||||||
function refreshNeed(folder) {
|
function refreshNeed(folder) {
|
||||||
$http.get(urlbase + "/db/need?folder=" + encodeURIComponent(folder)).success(function (data) {
|
var url = urlbase + "/db/need?folder=" + encodeURIComponent(folder);
|
||||||
|
url += "&page=" + $scope.neededCurrentPage;
|
||||||
|
url += "&perpage=" + $scope.neededPageSize;
|
||||||
|
$http.get(url).success(function (data) {
|
||||||
if ($scope.neededFolder == folder) {
|
if ($scope.neededFolder == folder) {
|
||||||
console.log("refreshNeed", folder, data);
|
console.log("refreshNeed", folder, data);
|
||||||
$scope.needed = data;
|
parseNeeded(data);
|
||||||
}
|
}
|
||||||
}).error($scope.emitHTTPError);
|
}).error($scope.emitHTTPError);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function needAction(file) {
|
||||||
|
var fDelete = 4096;
|
||||||
|
var fDirectory = 16384;
|
||||||
|
|
||||||
|
if ((file.flags & (fDelete + fDirectory)) === fDelete + fDirectory) {
|
||||||
|
return 'rmdir';
|
||||||
|
} else if ((file.flags & fDelete) === fDelete) {
|
||||||
|
return 'rm';
|
||||||
|
} else if ((file.flags & fDirectory) === fDirectory) {
|
||||||
|
return 'touch';
|
||||||
|
} else {
|
||||||
|
return 'sync';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
function parseNeeded(data) {
|
||||||
|
var merged = [];
|
||||||
|
data.progress.forEach(function (item) {
|
||||||
|
item.type = "progress";
|
||||||
|
item.action = needAction(item);
|
||||||
|
merged.push(item);
|
||||||
|
});
|
||||||
|
data.queued.forEach(function (item) {
|
||||||
|
item.type = "queued";
|
||||||
|
item.action = needAction(item);
|
||||||
|
merged.push(item);
|
||||||
|
});
|
||||||
|
data.rest.forEach(function (item) {
|
||||||
|
item.type = "rest";
|
||||||
|
item.action = needAction(item);
|
||||||
|
merged.push(item);
|
||||||
|
});
|
||||||
|
$scope.needed = merged;
|
||||||
|
$scope.neededTotal = data.total;
|
||||||
|
}
|
||||||
|
|
||||||
|
$scope.neededPageChanged = function (page) {
|
||||||
|
$scope.neededCurrentPage = page;
|
||||||
|
refreshNeed($scope.neededFolder);
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.neededChangePageSize = function (perpage) {
|
||||||
|
$scope.neededPageSize = perpage;
|
||||||
|
refreshNeed($scope.neededFolder);
|
||||||
|
}
|
||||||
|
|
||||||
var refreshDeviceStats = debounce(function () {
|
var refreshDeviceStats = debounce(function () {
|
||||||
$http.get(urlbase + "/stats/device").success(function (data) {
|
$http.get(urlbase + "/stats/device").success(function (data) {
|
||||||
$scope.deviceStats = data;
|
$scope.deviceStats = data;
|
||||||
@ -1181,24 +1234,11 @@ angular.module('syncthing.core')
|
|||||||
$('#needed').modal().on('hidden.bs.modal', function () {
|
$('#needed').modal().on('hidden.bs.modal', function () {
|
||||||
$scope.neededFolder = undefined;
|
$scope.neededFolder = undefined;
|
||||||
$scope.needed = undefined;
|
$scope.needed = undefined;
|
||||||
|
$scope.neededTotal = 0;
|
||||||
|
$scope.neededCurrentPage = 1;
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
$scope.needAction = function (file) {
|
|
||||||
var fDelete = 4096;
|
|
||||||
var fDirectory = 16384;
|
|
||||||
|
|
||||||
if ((file.flags & (fDelete + fDirectory)) === fDelete + fDirectory) {
|
|
||||||
return 'rmdir';
|
|
||||||
} else if ((file.flags & fDelete) === fDelete) {
|
|
||||||
return 'rm';
|
|
||||||
} else if ((file.flags & fDirectory) === fDirectory) {
|
|
||||||
return 'touch';
|
|
||||||
} else {
|
|
||||||
return 'sync';
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.override = function (folder) {
|
$scope.override = function (folder) {
|
||||||
$http.post(urlbase + "/db/override?folder=" + encodeURIComponent(folder));
|
$http.post(urlbase + "/db/override?folder=" + encodeURIComponent(folder));
|
||||||
};
|
};
|
||||||
@ -1220,10 +1260,14 @@ angular.module('syncthing.core')
|
|||||||
};
|
};
|
||||||
|
|
||||||
$scope.bumpFile = function (folder, file) {
|
$scope.bumpFile = function (folder, file) {
|
||||||
$http.post(urlbase + "/db/prio?folder=" + encodeURIComponent(folder) + "&file=" + encodeURIComponent(file)).success(function (data) {
|
var url = urlbase + "/db/prio?folder=" + encodeURIComponent(folder) + "&file=" + encodeURIComponent(file);
|
||||||
|
// In order to get the right view of data in the response.
|
||||||
|
url += "&page=" + $scope.neededCurrentPage;
|
||||||
|
url += "&perpage=" + $scope.neededPageSize;
|
||||||
|
$http.post(url).success(function (data) {
|
||||||
if ($scope.neededFolder == folder) {
|
if ($scope.neededFolder == folder) {
|
||||||
console.log("bumpFile", folder, data);
|
console.log("bumpFile", folder, data);
|
||||||
$scope.needed = data;
|
parseNeeded(data);
|
||||||
}
|
}
|
||||||
}).error($scope.emitHTTPError);
|
}).error($scope.emitHTTPError);
|
||||||
};
|
};
|
||||||
|
@ -375,53 +375,71 @@ func (m *Model) NeedSize(folder string) (nfiles int, bytes int64) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// NeedFiles returns the list of currently needed files in progress, queued,
|
// NeedFiles returns paginated list of currently needed files in progress, queued,
|
||||||
// and to be queued on next puller iteration. Also takes a soft cap which is
|
// and to be queued on next puller iteration, as well as the total number of
|
||||||
// only respected when adding files from the model rather than the runner queue.
|
// files currently needed.
|
||||||
func (m *Model) NeedFolderFiles(folder string, max int) ([]db.FileInfoTruncated, []db.FileInfoTruncated, []db.FileInfoTruncated) {
|
func (m *Model) NeedFolderFiles(folder string, page, perpage int) ([]db.FileInfoTruncated, []db.FileInfoTruncated, []db.FileInfoTruncated, int) {
|
||||||
m.fmut.RLock()
|
m.fmut.RLock()
|
||||||
defer m.fmut.RUnlock()
|
defer m.fmut.RUnlock()
|
||||||
|
|
||||||
if rf, ok := m.folderFiles[folder]; ok {
|
total := 0
|
||||||
var progress, queued, rest []db.FileInfoTruncated
|
|
||||||
var seen map[string]bool
|
|
||||||
|
|
||||||
runner, ok := m.folderRunners[folder]
|
rf, ok := m.folderFiles[folder]
|
||||||
if ok {
|
if !ok {
|
||||||
progressNames, queuedNames := runner.Jobs()
|
return nil, nil, nil, 0
|
||||||
|
|
||||||
progress = make([]db.FileInfoTruncated, len(progressNames))
|
|
||||||
queued = make([]db.FileInfoTruncated, len(queuedNames))
|
|
||||||
seen = make(map[string]bool, len(progressNames)+len(queuedNames))
|
|
||||||
|
|
||||||
for i, name := range progressNames {
|
|
||||||
if f, ok := rf.GetGlobalTruncated(name); ok {
|
|
||||||
progress[i] = f
|
|
||||||
seen[name] = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, name := range queuedNames {
|
|
||||||
if f, ok := rf.GetGlobalTruncated(name); ok {
|
|
||||||
queued[i] = f
|
|
||||||
seen[name] = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
left := max - len(progress) - len(queued)
|
|
||||||
if max < 1 || left > 0 {
|
|
||||||
rf.WithNeedTruncated(protocol.LocalDeviceID, func(f db.FileIntf) bool {
|
|
||||||
left--
|
|
||||||
ft := f.(db.FileInfoTruncated)
|
|
||||||
if !seen[ft.Name] {
|
|
||||||
rest = append(rest, ft)
|
|
||||||
}
|
|
||||||
return max < 1 || left > 0
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return progress, queued, rest
|
|
||||||
}
|
}
|
||||||
return nil, nil, nil
|
|
||||||
|
var progress, queued, rest []db.FileInfoTruncated
|
||||||
|
var seen map[string]struct{}
|
||||||
|
|
||||||
|
skip := (page - 1) * perpage
|
||||||
|
get := perpage
|
||||||
|
|
||||||
|
runner, ok := m.folderRunners[folder]
|
||||||
|
if ok {
|
||||||
|
allProgressNames, allQueuedNames := runner.Jobs()
|
||||||
|
|
||||||
|
var progressNames, queuedNames []string
|
||||||
|
progressNames, skip, get = getChunk(allProgressNames, skip, get)
|
||||||
|
queuedNames, skip, get = getChunk(allQueuedNames, skip, get)
|
||||||
|
|
||||||
|
progress = make([]db.FileInfoTruncated, len(progressNames))
|
||||||
|
queued = make([]db.FileInfoTruncated, len(queuedNames))
|
||||||
|
seen = make(map[string]struct{}, len(progressNames)+len(queuedNames))
|
||||||
|
|
||||||
|
for i, name := range progressNames {
|
||||||
|
if f, ok := rf.GetGlobalTruncated(name); ok {
|
||||||
|
progress[i] = f
|
||||||
|
seen[name] = struct{}{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, name := range queuedNames {
|
||||||
|
if f, ok := rf.GetGlobalTruncated(name); ok {
|
||||||
|
queued[i] = f
|
||||||
|
seen[name] = struct{}{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rest = make([]db.FileInfoTruncated, 0, perpage)
|
||||||
|
rf.WithNeedTruncated(protocol.LocalDeviceID, func(f db.FileIntf) bool {
|
||||||
|
total++
|
||||||
|
if skip > 0 {
|
||||||
|
skip--
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if get > 0 {
|
||||||
|
ft := f.(db.FileInfoTruncated)
|
||||||
|
if _, ok := seen[ft.Name]; !ok {
|
||||||
|
rest = append(rest, ft)
|
||||||
|
get--
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
|
||||||
|
return progress, queued, rest, total
|
||||||
}
|
}
|
||||||
|
|
||||||
// Index is called when a new device is connected and we receive their full index.
|
// Index is called when a new device is connected and we receive their full index.
|
||||||
@ -1616,3 +1634,17 @@ func symlinkInvalid(isLink bool) bool {
|
|||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Skips `skip` elements and retrieves up to `get` elements from a given slice.
|
||||||
|
// Returns the resulting slice, plus how much elements are left to skip or
|
||||||
|
// copy to satisfy the values which were provided, given the slice is not
|
||||||
|
// big enough.
|
||||||
|
func getChunk(data []string, skip, get int) ([]string, int, int) {
|
||||||
|
l := len(data)
|
||||||
|
if l <= skip {
|
||||||
|
return []string{}, skip - l, get
|
||||||
|
} else if l < skip+get {
|
||||||
|
return data[skip:l], 0, get - (l - skip)
|
||||||
|
}
|
||||||
|
return data[skip : skip+get], 0, 0
|
||||||
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user