Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
f4107c5ce9
commit
50f0475ee1
24 changed files with 60 additions and 175 deletions
|
@ -1308,8 +1308,8 @@ module Ci
|
||||||
end
|
end
|
||||||
|
|
||||||
def has_expired_test_reports?
|
def has_expired_test_reports?
|
||||||
strong_memoize(:artifacts_expired) do
|
strong_memoize(:has_expired_test_reports) do
|
||||||
!has_reports?(::Ci::JobArtifact.test_reports.not_expired)
|
has_reports?(::Ci::JobArtifact.test_reports.expired)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,7 @@ module Ci
|
||||||
}, _suffix: true
|
}, _suffix: true
|
||||||
|
|
||||||
scope :expired_before, -> (timestamp) { where(arel_table[:expire_at].lt(timestamp)) }
|
scope :expired_before, -> (timestamp) { where(arel_table[:expire_at].lt(timestamp)) }
|
||||||
scope :expired, -> (limit) { expired_before(Time.current).limit(limit) }
|
scope :expired, -> { expired_before(Time.current) }
|
||||||
scope :project_id_in, ->(ids) { where(project_id: ids) }
|
scope :project_id_in, ->(ids) { where(project_id: ids) }
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -2302,12 +2302,7 @@ class User < ApplicationRecord
|
||||||
.merge(search_members)
|
.merge(search_members)
|
||||||
.shortest_traversal_ids_prefixes
|
.shortest_traversal_ids_prefixes
|
||||||
|
|
||||||
# Use efficient btree index to perform search
|
|
||||||
if Feature.enabled?(:ci_owned_runners_unnest_index, self)
|
|
||||||
Ci::NamespaceMirror.contains_traversal_ids(traversal_ids)
|
Ci::NamespaceMirror.contains_traversal_ids(traversal_ids)
|
||||||
else
|
|
||||||
Ci::NamespaceMirror.contains_any_of_namespaces(traversal_ids.map(&:last))
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,7 @@ module Ci
|
||||||
private
|
private
|
||||||
|
|
||||||
def destroy_artifacts_batch
|
def destroy_artifacts_batch
|
||||||
artifacts = ::Ci::PipelineArtifact.unlocked.expired(BATCH_SIZE).to_a
|
artifacts = ::Ci::PipelineArtifact.unlocked.expired.limit(BATCH_SIZE).to_a
|
||||||
return false if artifacts.empty?
|
return false if artifacts.empty?
|
||||||
|
|
||||||
artifacts.each(&:destroy!)
|
artifacts.each(&:destroy!)
|
||||||
|
|
|
@ -1,8 +0,0 @@
|
||||||
---
|
|
||||||
name: ci_owned_runners_unnest_index
|
|
||||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/83843
|
|
||||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/357869
|
|
||||||
milestone: '14.10'
|
|
||||||
type: development
|
|
||||||
group: group::sharding
|
|
||||||
default_enabled: true
|
|
|
@ -87,7 +87,6 @@ module Backup
|
||||||
PoolRepository.includes(:source_project).find_each do |pool|
|
PoolRepository.includes(:source_project).find_each do |pool|
|
||||||
progress.puts " - Object pool #{pool.disk_path}..."
|
progress.puts " - Object pool #{pool.disk_path}..."
|
||||||
|
|
||||||
pool.source_project ||= pool.member_projects.first&.root_of_fork_network
|
|
||||||
unless pool.source_project
|
unless pool.source_project
|
||||||
progress.puts " - Object pool #{pool.disk_path}... " + "[SKIPPED]".color(:cyan)
|
progress.puts " - Object pool #{pool.disk_path}... " + "[SKIPPED]".color(:cyan)
|
||||||
next
|
next
|
||||||
|
|
|
@ -163,7 +163,7 @@ RSpec.describe Backup::Repositories do
|
||||||
expect(pool_repository.object_pool.exists?).to be(true)
|
expect(pool_repository.object_pool.exists?).to be(true)
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'skips pools with no source project, :sidekiq_might_not_need_inline' do
|
it 'skips pools when no source project is found', :sidekiq_might_not_need_inline do
|
||||||
pool_repository = create(:pool_repository, state: :obsolete)
|
pool_repository = create(:pool_repository, state: :obsolete)
|
||||||
pool_repository.update_column(:source_project_id, nil)
|
pool_repository.update_column(:source_project_id, nil)
|
||||||
|
|
||||||
|
|
|
@ -4839,9 +4839,9 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
|
||||||
end
|
end
|
||||||
|
|
||||||
describe '#has_expired_test_reports?' do
|
describe '#has_expired_test_reports?' do
|
||||||
subject { pipeline_with_test_report.has_expired_test_reports? }
|
subject { pipeline.has_expired_test_reports? }
|
||||||
|
|
||||||
let(:pipeline_with_test_report) { create(:ci_pipeline, :with_test_reports) }
|
let(:pipeline) { create(:ci_pipeline, :success, :with_test_reports) }
|
||||||
|
|
||||||
context 'when artifacts are not expired' do
|
context 'when artifacts are not expired' do
|
||||||
it { is_expected.to be_falsey }
|
it { is_expected.to be_falsey }
|
||||||
|
@ -4849,11 +4849,23 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
|
||||||
|
|
||||||
context 'when artifacts are expired' do
|
context 'when artifacts are expired' do
|
||||||
before do
|
before do
|
||||||
pipeline_with_test_report.job_artifacts.first.update!(expire_at: Date.yesterday)
|
pipeline.job_artifacts.first.update!(expire_at: Date.yesterday)
|
||||||
end
|
end
|
||||||
|
|
||||||
it { is_expected.to be_truthy }
|
it { is_expected.to be_truthy }
|
||||||
end
|
end
|
||||||
|
|
||||||
|
context 'when the pipeline is still running' do
|
||||||
|
let(:pipeline) { create(:ci_pipeline, :running) }
|
||||||
|
|
||||||
|
it { is_expected.to be_falsey }
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when the pipeline is completed without test reports' do
|
||||||
|
let(:pipeline) { create(:ci_pipeline, :success) }
|
||||||
|
|
||||||
|
it { is_expected.to be_falsey }
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it_behaves_like 'it has loose foreign keys' do
|
it_behaves_like 'it has loose foreign keys' do
|
||||||
|
|
|
@ -68,8 +68,8 @@ RSpec.describe Ci::Artifactable do
|
||||||
end
|
end
|
||||||
|
|
||||||
describe '.expired' do
|
describe '.expired' do
|
||||||
it 'returns a limited number of expired artifacts' do
|
it 'returns all expired artifacts' do
|
||||||
expect(Ci::JobArtifact.expired(1).order_id_asc).to eq([recently_expired_artifact])
|
expect(Ci::JobArtifact.expired).to contain_exactly(recently_expired_artifact, later_expired_artifact)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -4374,14 +4374,6 @@ RSpec.describe User do
|
||||||
|
|
||||||
it_behaves_like '#ci_owned_runners'
|
it_behaves_like '#ci_owned_runners'
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'when FF ci_owned_runners_unnest_index is disabled uses GIN index' do
|
|
||||||
before do
|
|
||||||
stub_feature_flags(ci_owned_runners_unnest_index: false)
|
|
||||||
end
|
|
||||||
|
|
||||||
it_behaves_like '#ci_owned_runners'
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
describe '#projects_with_reporter_access_limited_to' do
|
describe '#projects_with_reporter_access_limited_to' do
|
||||||
|
|
|
@ -63,9 +63,7 @@ func TestDownloadingFromValidArchive(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDownloadingFromValidHTTPArchive(t *testing.T) {
|
func TestDownloadingFromValidHTTPArchive(t *testing.T) {
|
||||||
tempDir, err := ioutil.TempDir("", "uploads")
|
tempDir := t.TempDir()
|
||||||
require.NoError(t, err)
|
|
||||||
defer os.RemoveAll(tempDir)
|
|
||||||
|
|
||||||
f, err := os.Create(filepath.Join(tempDir, "archive.zip"))
|
f, err := os.Create(filepath.Join(tempDir, "archive.zip"))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
@ -121,9 +119,7 @@ func TestIncompleteApiResponse(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDownloadingFromNonExistingHTTPArchive(t *testing.T) {
|
func TestDownloadingFromNonExistingHTTPArchive(t *testing.T) {
|
||||||
tempDir, err := ioutil.TempDir("", "uploads")
|
tempDir := t.TempDir()
|
||||||
require.NoError(t, err)
|
|
||||||
defer os.RemoveAll(tempDir)
|
|
||||||
|
|
||||||
fileServer := httptest.NewServer(http.FileServer(http.Dir(tempDir)))
|
fileServer := httptest.NewServer(http.FileServer(http.Dir(tempDir)))
|
||||||
defer fileServer.Close()
|
defer fileServer.Close()
|
||||||
|
|
|
@ -8,7 +8,6 @@ import (
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"net"
|
"net"
|
||||||
"net/http/httptest"
|
"net/http/httptest"
|
||||||
"os"
|
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
@ -75,8 +74,7 @@ func TestUploadPackTimesOut(t *testing.T) {
|
||||||
func startSmartHTTPServer(t testing.TB, s gitalypb.SmartHTTPServiceServer) string {
|
func startSmartHTTPServer(t testing.TB, s gitalypb.SmartHTTPServiceServer) string {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
|
|
||||||
tmp, err := ioutil.TempDir("", "")
|
tmp := t.TempDir()
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
socket := filepath.Join(tmp, "gitaly.sock")
|
socket := filepath.Join(tmp, "gitaly.sock")
|
||||||
ln, err := net.Listen("unix", socket)
|
ln, err := net.Listen("unix", socket)
|
||||||
|
@ -90,7 +88,6 @@ func startSmartHTTPServer(t testing.TB, s gitalypb.SmartHTTPServiceServer) strin
|
||||||
|
|
||||||
t.Cleanup(func() {
|
t.Cleanup(func() {
|
||||||
srv.GracefulStop()
|
srv.GracefulStop()
|
||||||
require.NoError(t, os.RemoveAll(tmp), "error removing temp dir %q", tmp)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
return fmt.Sprintf("%s://%s", ln.Addr().Network(), ln.Addr().String())
|
return fmt.Sprintf("%s://%s", ln.Addr().Network(), ln.Addr().String())
|
||||||
|
|
|
@ -111,11 +111,9 @@ func newRSFactory(flags int) RSFactory {
|
||||||
|
|
||||||
func TestHttpWebServer(t *testing.T) {
|
func TestHttpWebServer(t *testing.T) {
|
||||||
Convey("Scenario: testing WebServer", t, func() {
|
Convey("Scenario: testing WebServer", t, func() {
|
||||||
dir, err := ioutil.TempDir("", "webserver")
|
dir := t.TempDir()
|
||||||
So(err, ShouldBeNil)
|
|
||||||
defer os.RemoveAll(dir)
|
|
||||||
|
|
||||||
err = ioutil.WriteFile(filepath.Join(dir, "file"), make([]byte, 10000), 0755)
|
err := ioutil.WriteFile(filepath.Join(dir, "file"), make([]byte, 10000), 0755)
|
||||||
So(err, ShouldBeNil)
|
So(err, ShouldBeNil)
|
||||||
|
|
||||||
server := httptest.NewServer(http.FileServer(http.Dir(dir)))
|
server := httptest.NewServer(http.FileServer(http.Dir(dir)))
|
||||||
|
|
|
@ -4,7 +4,6 @@ import (
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/http/httptest"
|
"net/http/httptest"
|
||||||
"os"
|
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
@ -14,11 +13,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestIfNoDeployPageExist(t *testing.T) {
|
func TestIfNoDeployPageExist(t *testing.T) {
|
||||||
dir, err := ioutil.TempDir("", "deploy")
|
dir := t.TempDir()
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
defer os.RemoveAll(dir)
|
|
||||||
|
|
||||||
w := httptest.NewRecorder()
|
w := httptest.NewRecorder()
|
||||||
|
|
||||||
|
@ -33,11 +28,7 @@ func TestIfNoDeployPageExist(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestIfDeployPageExist(t *testing.T) {
|
func TestIfDeployPageExist(t *testing.T) {
|
||||||
dir, err := ioutil.TempDir("", "deploy")
|
dir := t.TempDir()
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
defer os.RemoveAll(dir)
|
|
||||||
|
|
||||||
deployPage := "DEPLOY"
|
deployPage := "DEPLOY"
|
||||||
ioutil.WriteFile(filepath.Join(dir, "index.html"), []byte(deployPage), 0600)
|
ioutil.WriteFile(filepath.Join(dir, "index.html"), []byte(deployPage), 0600)
|
||||||
|
|
|
@ -5,7 +5,6 @@ import (
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/http/httptest"
|
"net/http/httptest"
|
||||||
"os"
|
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
@ -15,11 +14,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestIfErrorPageIsPresented(t *testing.T) {
|
func TestIfErrorPageIsPresented(t *testing.T) {
|
||||||
dir, err := ioutil.TempDir("", "error_page")
|
dir := t.TempDir()
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
defer os.RemoveAll(dir)
|
|
||||||
|
|
||||||
errorPage := "ERROR"
|
errorPage := "ERROR"
|
||||||
ioutil.WriteFile(filepath.Join(dir, "404.html"), []byte(errorPage), 0600)
|
ioutil.WriteFile(filepath.Join(dir, "404.html"), []byte(errorPage), 0600)
|
||||||
|
@ -42,11 +37,7 @@ func TestIfErrorPageIsPresented(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestIfErrorPassedIfNoErrorPageIsFound(t *testing.T) {
|
func TestIfErrorPassedIfNoErrorPageIsFound(t *testing.T) {
|
||||||
dir, err := ioutil.TempDir("", "error_page")
|
dir := t.TempDir()
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
defer os.RemoveAll(dir)
|
|
||||||
|
|
||||||
w := httptest.NewRecorder()
|
w := httptest.NewRecorder()
|
||||||
errorResponse := "ERROR"
|
errorResponse := "ERROR"
|
||||||
|
@ -63,11 +54,7 @@ func TestIfErrorPassedIfNoErrorPageIsFound(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestIfErrorPageIsIgnoredInDevelopment(t *testing.T) {
|
func TestIfErrorPageIsIgnoredInDevelopment(t *testing.T) {
|
||||||
dir, err := ioutil.TempDir("", "error_page")
|
dir := t.TempDir()
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
defer os.RemoveAll(dir)
|
|
||||||
|
|
||||||
errorPage := "ERROR"
|
errorPage := "ERROR"
|
||||||
ioutil.WriteFile(filepath.Join(dir, "500.html"), []byte(errorPage), 0600)
|
ioutil.WriteFile(filepath.Join(dir, "500.html"), []byte(errorPage), 0600)
|
||||||
|
@ -86,11 +73,7 @@ func TestIfErrorPageIsIgnoredInDevelopment(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestIfErrorPageIsIgnoredIfCustomError(t *testing.T) {
|
func TestIfErrorPageIsIgnoredIfCustomError(t *testing.T) {
|
||||||
dir, err := ioutil.TempDir("", "error_page")
|
dir := t.TempDir()
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
defer os.RemoveAll(dir)
|
|
||||||
|
|
||||||
errorPage := "ERROR"
|
errorPage := "ERROR"
|
||||||
ioutil.WriteFile(filepath.Join(dir, "500.html"), []byte(errorPage), 0600)
|
ioutil.WriteFile(filepath.Join(dir, "500.html"), []byte(errorPage), 0600)
|
||||||
|
@ -121,11 +104,7 @@ func TestErrorPageInterceptedByContentType(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tc := range testCases {
|
for _, tc := range testCases {
|
||||||
dir, err := ioutil.TempDir("", "error_page")
|
dir := t.TempDir()
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
defer os.RemoveAll(dir)
|
|
||||||
|
|
||||||
errorPage := "ERROR"
|
errorPage := "ERROR"
|
||||||
ioutil.WriteFile(filepath.Join(dir, "500.html"), []byte(errorPage), 0600)
|
ioutil.WriteFile(filepath.Join(dir, "500.html"), []byte(errorPage), 0600)
|
||||||
|
|
|
@ -6,7 +6,6 @@ import (
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/http/httptest"
|
"net/http/httptest"
|
||||||
"os"
|
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
@ -26,11 +25,7 @@ func TestServingNonExistingFile(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestServingDirectory(t *testing.T) {
|
func TestServingDirectory(t *testing.T) {
|
||||||
dir, err := ioutil.TempDir("", "deploy")
|
dir := t.TempDir()
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
defer os.RemoveAll(dir)
|
|
||||||
|
|
||||||
httpRequest, _ := http.NewRequest("GET", "/file", nil)
|
httpRequest, _ := http.NewRequest("GET", "/file", nil)
|
||||||
w := httptest.NewRecorder()
|
w := httptest.NewRecorder()
|
||||||
|
@ -64,11 +59,7 @@ func TestExecutingHandlerWhenNoFileFound(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestServingTheActualFile(t *testing.T) {
|
func TestServingTheActualFile(t *testing.T) {
|
||||||
dir, err := ioutil.TempDir("", "deploy")
|
dir := t.TempDir()
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
defer os.RemoveAll(dir)
|
|
||||||
|
|
||||||
httpRequest, _ := http.NewRequest("GET", "/file", nil)
|
httpRequest, _ := http.NewRequest("GET", "/file", nil)
|
||||||
|
|
||||||
|
@ -121,11 +112,7 @@ func TestExcludedPaths(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func testServingThePregzippedFile(t *testing.T, enableGzip bool) {
|
func testServingThePregzippedFile(t *testing.T, enableGzip bool) {
|
||||||
dir, err := ioutil.TempDir("", "deploy")
|
dir := t.TempDir()
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
defer os.RemoveAll(dir)
|
|
||||||
|
|
||||||
httpRequest, _ := http.NewRequest("GET", "/file", nil)
|
httpRequest, _ := http.NewRequest("GET", "/file", nil)
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,6 @@ import (
|
||||||
"mime/multipart"
|
"mime/multipart"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/http/httptest"
|
"net/http/httptest"
|
||||||
"os"
|
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
@ -56,16 +55,11 @@ func testUploadArtifactsFromTestZip(t *testing.T, ts *httptest.Server) *httptest
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestUploadHandlerSendingToExternalStorage(t *testing.T) {
|
func TestUploadHandlerSendingToExternalStorage(t *testing.T) {
|
||||||
tempPath, err := ioutil.TempDir("", "uploads")
|
tempPath := t.TempDir()
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
defer os.RemoveAll(tempPath)
|
|
||||||
|
|
||||||
archiveData, md5 := createTestZipArchive(t)
|
archiveData, md5 := createTestZipArchive(t)
|
||||||
archiveFile, err := ioutil.TempFile("", "artifact.zip")
|
archiveFile, err := ioutil.TempFile(tempPath, "artifact.zip")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
defer os.Remove(archiveFile.Name())
|
|
||||||
_, err = archiveFile.Write(archiveData)
|
_, err = archiveFile.Write(archiveData)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
archiveFile.Close()
|
archiveFile.Close()
|
||||||
|
@ -135,11 +129,7 @@ func TestUploadHandlerSendingToExternalStorage(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestUploadHandlerSendingToExternalStorageAndStorageServerUnreachable(t *testing.T) {
|
func TestUploadHandlerSendingToExternalStorageAndStorageServerUnreachable(t *testing.T) {
|
||||||
tempPath, err := ioutil.TempDir("", "uploads")
|
tempPath := t.TempDir()
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
defer os.RemoveAll(tempPath)
|
|
||||||
|
|
||||||
responseProcessor := func(w http.ResponseWriter, r *http.Request) {
|
responseProcessor := func(w http.ResponseWriter, r *http.Request) {
|
||||||
t.Fatal("it should not be called")
|
t.Fatal("it should not be called")
|
||||||
|
@ -161,11 +151,7 @@ func TestUploadHandlerSendingToExternalStorageAndStorageServerUnreachable(t *tes
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestUploadHandlerSendingToExternalStorageAndInvalidURLIsUsed(t *testing.T) {
|
func TestUploadHandlerSendingToExternalStorageAndInvalidURLIsUsed(t *testing.T) {
|
||||||
tempPath, err := ioutil.TempDir("", "uploads")
|
tempPath := t.TempDir()
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
defer os.RemoveAll(tempPath)
|
|
||||||
|
|
||||||
responseProcessor := func(w http.ResponseWriter, r *http.Request) {
|
responseProcessor := func(w http.ResponseWriter, r *http.Request) {
|
||||||
t.Fatal("it should not be called")
|
t.Fatal("it should not be called")
|
||||||
|
|
|
@ -130,8 +130,7 @@ type testServer struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func setupWithTmpPath(t *testing.T, filename string, includeFormat bool, format string, authResponse *api.Response, bodyProcessor func(w http.ResponseWriter, r *http.Request)) *testServer {
|
func setupWithTmpPath(t *testing.T, filename string, includeFormat bool, format string, authResponse *api.Response, bodyProcessor func(w http.ResponseWriter, r *http.Request)) *testServer {
|
||||||
tempPath, err := ioutil.TempDir("", "uploads")
|
tempPath := t.TempDir()
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
if authResponse == nil {
|
if authResponse == nil {
|
||||||
authResponse = &api.Response{TempPath: tempPath}
|
authResponse = &api.Response{TempPath: tempPath}
|
||||||
|
@ -147,7 +146,6 @@ func setupWithTmpPath(t *testing.T, filename string, includeFormat bool, format
|
||||||
|
|
||||||
cleanup := func() {
|
cleanup := func() {
|
||||||
ts.Close()
|
ts.Close()
|
||||||
require.NoError(t, os.RemoveAll(tempPath))
|
|
||||||
require.NoError(t, writer.Close())
|
require.NoError(t, writer.Close())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -292,8 +290,7 @@ func TestUploadFormProcessing(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLsifFileProcessing(t *testing.T) {
|
func TestLsifFileProcessing(t *testing.T) {
|
||||||
tempPath, err := ioutil.TempDir("", "uploads")
|
tempPath := t.TempDir()
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
s := setupWithTmpPath(t, "file", true, "zip", &api.Response{TempPath: tempPath, ProcessLsif: true}, nil)
|
s := setupWithTmpPath(t, "file", true, "zip", &api.Response{TempPath: tempPath, ProcessLsif: true}, nil)
|
||||||
defer s.cleanup()
|
defer s.cleanup()
|
||||||
|
@ -312,8 +309,7 @@ func TestLsifFileProcessing(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestInvalidLsifFileProcessing(t *testing.T) {
|
func TestInvalidLsifFileProcessing(t *testing.T) {
|
||||||
tempPath, err := ioutil.TempDir("", "uploads")
|
tempPath := t.TempDir()
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
s := setupWithTmpPath(t, "file", true, "zip", &api.Response{TempPath: tempPath, ProcessLsif: true}, nil)
|
s := setupWithTmpPath(t, "file", true, "zip", &api.Response{TempPath: tempPath, ProcessLsif: true}, nil)
|
||||||
defer s.cleanup()
|
defer s.cleanup()
|
||||||
|
|
|
@ -4,7 +4,6 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
|
||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
@ -43,9 +42,7 @@ func TestUploadWrongSize(t *testing.T) {
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
defer cancel()
|
defer cancel()
|
||||||
|
|
||||||
tmpFolder, err := ioutil.TempDir("", "workhorse-test-tmp")
|
tmpFolder := t.TempDir()
|
||||||
require.NoError(t, err)
|
|
||||||
defer os.RemoveAll(tmpFolder)
|
|
||||||
|
|
||||||
opts := &destination.UploadOpts{LocalTempPath: tmpFolder}
|
opts := &destination.UploadOpts{LocalTempPath: tmpFolder}
|
||||||
fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize+1, "upload", opts)
|
fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize+1, "upload", opts)
|
||||||
|
@ -59,9 +56,7 @@ func TestUploadWithKnownSizeExceedLimit(t *testing.T) {
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
defer cancel()
|
defer cancel()
|
||||||
|
|
||||||
tmpFolder, err := ioutil.TempDir("", "workhorse-test-tmp")
|
tmpFolder := t.TempDir()
|
||||||
require.NoError(t, err)
|
|
||||||
defer os.RemoveAll(tmpFolder)
|
|
||||||
|
|
||||||
opts := &destination.UploadOpts{LocalTempPath: tmpFolder, MaximumSize: test.ObjectSize - 1}
|
opts := &destination.UploadOpts{LocalTempPath: tmpFolder, MaximumSize: test.ObjectSize - 1}
|
||||||
fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, "upload", opts)
|
fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, "upload", opts)
|
||||||
|
@ -75,9 +70,7 @@ func TestUploadWithUnknownSizeExceedLimit(t *testing.T) {
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
defer cancel()
|
defer cancel()
|
||||||
|
|
||||||
tmpFolder, err := ioutil.TempDir("", "workhorse-test-tmp")
|
tmpFolder := t.TempDir()
|
||||||
require.NoError(t, err)
|
|
||||||
defer os.RemoveAll(tmpFolder)
|
|
||||||
|
|
||||||
opts := &destination.UploadOpts{LocalTempPath: tmpFolder, MaximumSize: test.ObjectSize - 1}
|
opts := &destination.UploadOpts{LocalTempPath: tmpFolder, MaximumSize: test.ObjectSize - 1}
|
||||||
fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), -1, "upload", opts)
|
fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), -1, "upload", opts)
|
||||||
|
@ -139,9 +132,7 @@ func TestUpload(t *testing.T) {
|
||||||
remoteMultipart
|
remoteMultipart
|
||||||
)
|
)
|
||||||
|
|
||||||
tmpFolder, err := ioutil.TempDir("", "workhorse-test-tmp")
|
tmpFolder := t.TempDir()
|
||||||
require.NoError(t, err)
|
|
||||||
defer os.RemoveAll(tmpFolder)
|
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
|
@ -301,8 +292,7 @@ func TestUploadWithS3WorkhorseClient(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestUploadWithAzureWorkhorseClient(t *testing.T) {
|
func TestUploadWithAzureWorkhorseClient(t *testing.T) {
|
||||||
mux, bucketDir, cleanup := test.SetupGoCloudFileBucket(t, "azblob")
|
mux, bucketDir := test.SetupGoCloudFileBucket(t, "azblob")
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
defer cancel()
|
defer cancel()
|
||||||
|
@ -427,10 +417,6 @@ func TestUploadRemoteFileWithLimit(t *testing.T) {
|
||||||
var opts destination.UploadOpts
|
var opts destination.UploadOpts
|
||||||
|
|
||||||
for _, remoteType := range remoteTypes {
|
for _, remoteType := range remoteTypes {
|
||||||
tmpFolder, err := ioutil.TempDir("", "workhorse-test-tmp")
|
|
||||||
require.NoError(t, err)
|
|
||||||
defer os.RemoveAll(tmpFolder)
|
|
||||||
|
|
||||||
osStub, ts := test.StartObjectStore()
|
osStub, ts := test.StartObjectStore()
|
||||||
defer ts.Close()
|
defer ts.Close()
|
||||||
|
|
||||||
|
|
|
@ -15,8 +15,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestGoCloudObjectUpload(t *testing.T) {
|
func TestGoCloudObjectUpload(t *testing.T) {
|
||||||
mux, _, cleanup := test.SetupGoCloudFileBucket(t, "azuretest")
|
mux, _ := test.SetupGoCloudFileBucket(t, "azuretest")
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
deadline := time.Now().Add(testTimeout)
|
deadline := time.Now().Add(testTimeout)
|
||||||
|
|
|
@ -4,8 +4,6 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
@ -47,9 +45,7 @@ func TestS3ObjectUpload(t *testing.T) {
|
||||||
defer ts.Close()
|
defer ts.Close()
|
||||||
|
|
||||||
deadline := time.Now().Add(testTimeout)
|
deadline := time.Now().Add(testTimeout)
|
||||||
tmpDir, err := ioutil.TempDir("", "workhorse-test-")
|
tmpDir := t.TempDir()
|
||||||
require.NoError(t, err)
|
|
||||||
defer os.Remove(tmpDir)
|
|
||||||
|
|
||||||
objectName := filepath.Join(tmpDir, "s3-test-data")
|
objectName := filepath.Join(tmpDir, "s3-test-data")
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
|
@ -87,9 +83,7 @@ func TestConcurrentS3ObjectUpload(t *testing.T) {
|
||||||
defer artifactsServer.Close()
|
defer artifactsServer.Close()
|
||||||
|
|
||||||
deadline := time.Now().Add(testTimeout)
|
deadline := time.Now().Add(testTimeout)
|
||||||
tmpDir, err := ioutil.TempDir("", "workhorse-test-")
|
tmpDir := t.TempDir()
|
||||||
require.NoError(t, err)
|
|
||||||
defer os.Remove(tmpDir)
|
|
||||||
|
|
||||||
var wg sync.WaitGroup
|
var wg sync.WaitGroup
|
||||||
|
|
||||||
|
@ -136,9 +130,7 @@ func TestS3ObjectUploadCancel(t *testing.T) {
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
|
|
||||||
deadline := time.Now().Add(testTimeout)
|
deadline := time.Now().Add(testTimeout)
|
||||||
tmpDir, err := ioutil.TempDir("", "workhorse-test-")
|
tmpDir := t.TempDir()
|
||||||
require.NoError(t, err)
|
|
||||||
defer os.Remove(tmpDir)
|
|
||||||
|
|
||||||
objectName := filepath.Join(tmpDir, "s3-test-data")
|
objectName := filepath.Join(tmpDir, "s3-test-data")
|
||||||
|
|
||||||
|
@ -160,9 +152,7 @@ func TestS3ObjectUploadLimitReached(t *testing.T) {
|
||||||
defer ts.Close()
|
defer ts.Close()
|
||||||
|
|
||||||
deadline := time.Now().Add(testTimeout)
|
deadline := time.Now().Add(testTimeout)
|
||||||
tmpDir, err := ioutil.TempDir("", "workhorse-test-")
|
tmpDir := t.TempDir()
|
||||||
require.NoError(t, err)
|
|
||||||
defer os.Remove(tmpDir)
|
|
||||||
|
|
||||||
objectName := filepath.Join(tmpDir, "s3-test-data")
|
objectName := filepath.Join(tmpDir, "s3-test-data")
|
||||||
object, err := objectstore.NewS3Object(objectName, creds, config)
|
object, err := objectstore.NewS3Object(objectName, creds, config)
|
||||||
|
|
|
@ -2,9 +2,7 @@ package test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"io/ioutil"
|
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
@ -20,18 +18,14 @@ func (o *dirOpener) OpenBucketURL(ctx context.Context, u *url.URL) (*blob.Bucket
|
||||||
return fileblob.OpenBucket(o.tmpDir, nil)
|
return fileblob.OpenBucket(o.tmpDir, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
func SetupGoCloudFileBucket(t *testing.T, scheme string) (m *blob.URLMux, bucketDir string, cleanup func()) {
|
func SetupGoCloudFileBucket(t *testing.T, scheme string) (m *blob.URLMux, bucketDir string) {
|
||||||
tmpDir, err := ioutil.TempDir("", "")
|
tmpDir := t.TempDir()
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
mux := new(blob.URLMux)
|
mux := new(blob.URLMux)
|
||||||
fake := &dirOpener{tmpDir: tmpDir}
|
fake := &dirOpener{tmpDir: tmpDir}
|
||||||
mux.RegisterBucket(scheme, fake)
|
mux.RegisterBucket(scheme, fake)
|
||||||
cleanup = func() {
|
|
||||||
os.RemoveAll(tmpDir)
|
|
||||||
}
|
|
||||||
|
|
||||||
return mux, tmpDir, cleanup
|
return mux, tmpDir
|
||||||
}
|
}
|
||||||
|
|
||||||
func GoCloudObjectExists(t *testing.T, bucketDir string, objectName string) {
|
func GoCloudObjectExists(t *testing.T, bucketDir string, objectName string) {
|
||||||
|
|
|
@ -124,13 +124,10 @@ func S3ObjectDoesNotExist(t *testing.T, sess *session.Session, config config.S3C
|
||||||
}
|
}
|
||||||
|
|
||||||
func downloadObject(t *testing.T, sess *session.Session, config config.S3Config, objectName string, handler func(tmpfile *os.File, numBytes int64, err error)) {
|
func downloadObject(t *testing.T, sess *session.Session, config config.S3Config, objectName string, handler func(tmpfile *os.File, numBytes int64, err error)) {
|
||||||
tmpDir, err := ioutil.TempDir("", "workhorse-test-")
|
tmpDir := t.TempDir()
|
||||||
require.NoError(t, err)
|
|
||||||
defer os.Remove(tmpDir)
|
|
||||||
|
|
||||||
tmpfile, err := ioutil.TempFile(tmpDir, "s3-output")
|
tmpfile, err := ioutil.TempFile(tmpDir, "s3-output")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
defer os.Remove(tmpfile.Name())
|
|
||||||
|
|
||||||
downloadSvc := s3manager.NewDownloader(sess)
|
downloadSvc := s3manager.NewDownloader(sess)
|
||||||
numBytes, err := downloadSvc.Download(tmpfile, &s3.GetObjectInput{
|
numBytes, err := downloadSvc.Download(tmpfile, &s3.GetObjectInput{
|
||||||
|
|
|
@ -283,8 +283,7 @@ func TestUseWorkhorseClientEnabled(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGoCloudConfig(t *testing.T) {
|
func TestGoCloudConfig(t *testing.T) {
|
||||||
mux, _, cleanup := test.SetupGoCloudFileBucket(t, "azblob")
|
mux, _ := test.SetupGoCloudFileBucket(t, "azblob")
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
|
|
Loading…
Reference in a new issue