1# frozen_string_literal: true 2 3require 'spec_helper' 4require 'carrierwave/storage/fog' 5 6class Implementation < GitlabUploader 7 include ObjectStorage::Concern 8 include ::RecordsUploads::Concern 9 prepend ::ObjectStorage::Extension::RecordsUploads 10 11 storage_options Gitlab.config.uploads 12 13 private 14 15 # user/:id 16 def dynamic_segment 17 File.join(model.class.underscore, model.id.to_s) 18 end 19end 20 21RSpec.describe ObjectStorage do 22 let(:uploader_class) { Implementation } 23 let(:object) { build_stubbed(:user) } 24 let(:uploader) { uploader_class.new(object, :file) } 25 26 describe '#object_store=' do 27 before do 28 allow(uploader_class).to receive(:object_store_enabled?).and_return(true) 29 end 30 31 it "reload the local storage" do 32 uploader.object_store = described_class::Store::LOCAL 33 expect(uploader.file_storage?).to be_truthy 34 end 35 36 it "reload the REMOTE storage" do 37 uploader.object_store = described_class::Store::REMOTE 38 expect(uploader.file_storage?).to be_falsey 39 end 40 41 context 'object_store is Store::LOCAL' do 42 before do 43 uploader.object_store = described_class::Store::LOCAL 44 end 45 46 describe '#store_dir' do 47 it 'is the composition of (base_dir, dynamic_segment)' do 48 expect(uploader.store_dir).to start_with("uploads/-/system/user/") 49 end 50 end 51 end 52 53 context 'object_store is Store::REMOTE' do 54 before do 55 uploader.object_store = described_class::Store::REMOTE 56 end 57 58 describe '#store_dir' do 59 it 'is the composition of (dynamic_segment)' do 60 expect(uploader.store_dir).to start_with("user/") 61 end 62 end 63 end 64 end 65 66 describe '#object_store' do 67 subject { uploader.object_store } 68 69 it "delegates to <mount>_store on model" do 70 expect(object).to receive(:file_store) 71 72 subject 73 end 74 75 context 'when store is null' do 76 before do 77 expect(object).to receive(:file_store).and_return(nil) 78 end 79 80 it "uses Store::LOCAL" do 81 is_expected.to eq(described_class::Store::LOCAL) 82 end 83 end 84 85 context 'when value is set' do 86 before do 87 expect(object).to receive(:file_store).and_return(described_class::Store::REMOTE) 88 end 89 90 it "returns the given value" do 91 is_expected.to eq(described_class::Store::REMOTE) 92 end 93 end 94 end 95 96 describe '#file_cache_storage?' do 97 context 'when file storage is used' do 98 before do 99 expect(uploader_class).to receive(:cache_storage) { CarrierWave::Storage::File } 100 end 101 102 it { expect(uploader).to be_file_cache_storage } 103 end 104 105 context 'when is remote storage' do 106 before do 107 expect(uploader_class).to receive(:cache_storage) { CarrierWave::Storage::Fog } 108 end 109 110 it { expect(uploader).not_to be_file_cache_storage } 111 end 112 end 113 114 # this means the model shall include 115 # include RecordsUpload::Concern 116 # prepend ObjectStorage::Extension::RecordsUploads 117 # the object_store persistence is delegated to the `Upload` model. 118 # 119 context 'when persist_object_store? is false' do 120 let(:object) { create(:project, :with_avatar) } 121 let(:uploader) { object.avatar } 122 123 it { expect(object).to be_a(Avatarable) } 124 it { expect(uploader.persist_object_store?).to be_falsey } 125 126 describe 'delegates the object_store logic to the `Upload` model' do 127 it 'sets @upload to the found `upload`' do 128 expect(uploader.upload).to eq(uploader.upload) 129 end 130 131 it 'sets @object_store to the `Upload` value' do 132 expect(uploader.object_store).to eq(uploader.upload.store) 133 end 134 end 135 136 describe '#migrate!' do 137 let(:new_store) { ObjectStorage::Store::REMOTE } 138 139 before do 140 stub_uploads_object_storage(uploader: AvatarUploader) 141 end 142 143 subject { uploader.migrate!(new_store) } 144 145 it 'persist @object_store to the recorded upload' do 146 subject 147 148 expect(uploader.upload.store).to eq(new_store) 149 end 150 151 describe 'fails' do 152 it 'is handled gracefully' do 153 store = uploader.object_store 154 expect_next_instance_of(Upload) do |instance| 155 expect(instance).to receive(:save!).and_raise("An error") 156 end 157 158 expect { subject }.to raise_error("An error") 159 expect(uploader.exists?).to be_truthy 160 expect(uploader.upload.store).to eq(store) 161 end 162 end 163 end 164 end 165 166 # this means the model holds an <mounted_as>_store attribute directly 167 # and do not delegate the object_store persistence to the `Upload` model. 168 # 169 context 'persist_object_store? is true' do 170 context 'when using JobArtifactsUploader' do 171 let(:store) { described_class::Store::LOCAL } 172 let(:object) { create(:ci_job_artifact, :archive, file_store: store) } 173 let(:uploader) { object.file } 174 175 context 'checking described_class' do 176 it "uploader include described_class::Concern" do 177 expect(uploader).to be_a(described_class::Concern) 178 end 179 end 180 181 describe '#use_file' do 182 context 'when file is stored locally' do 183 it "calls a regular path" do 184 expect { |b| uploader.use_file(&b) }.not_to yield_with_args(%r[tmp/cache]) 185 end 186 end 187 188 context 'when file is stored remotely' do 189 let(:store) { described_class::Store::REMOTE } 190 191 before do 192 stub_artifacts_object_storage 193 end 194 195 it "calls a cache path" do 196 expect { |b| uploader.use_file(&b) }.to yield_with_args(%r[tmp/cache]) 197 end 198 199 it "cleans up the cached file" do 200 cached_path = '' 201 202 uploader.use_file do |path| 203 cached_path = path 204 205 expect(File.exist?(cached_path)).to be_truthy 206 end 207 208 expect(File.exist?(cached_path)).to be_falsey 209 end 210 end 211 end 212 213 describe '#use_open_file' do 214 context 'when file is stored locally' do 215 it "returns the file" do 216 expect { |b| uploader.use_open_file(&b) }.to yield_with_args(an_instance_of(ObjectStorage::Concern::OpenFile)) 217 end 218 end 219 220 context 'when file is stored remotely' do 221 let(:store) { described_class::Store::REMOTE } 222 223 before do 224 stub_artifacts_object_storage 225 226 # We need to check the Host header not including the port because AWS does not accept 227 stub_request(:get, %r{s3.amazonaws.com/#{uploader.path}}) 228 .with { |request| !request.headers['Host'].to_s.include?(':443') } 229 .to_return(status: 200, body: '') 230 end 231 232 it "returns the file" do 233 expect { |b| uploader.use_open_file(&b) }.to yield_with_args(an_instance_of(ObjectStorage::Concern::OpenFile)) 234 end 235 end 236 end 237 238 describe '#migrate!' do 239 subject { uploader.migrate!(new_store) } 240 241 shared_examples "updates the underlying <mounted>_store" do 242 it do 243 subject 244 245 expect(object.file_store).to eq(new_store) 246 end 247 end 248 249 context 'when using the same storage' do 250 let(:new_store) { store } 251 252 it "to not migrate the storage" do 253 subject 254 255 expect(uploader).not_to receive(:store!) 256 expect(uploader.object_store).to eq(store) 257 end 258 end 259 260 context 'when migrating to local storage' do 261 let(:store) { described_class::Store::REMOTE } 262 let(:new_store) { described_class::Store::LOCAL } 263 264 before do 265 stub_artifacts_object_storage 266 end 267 268 include_examples "updates the underlying <mounted>_store" 269 270 it "local file does not exist" do 271 expect(File.exist?(uploader.path)).to eq(false) 272 end 273 274 it "remote file exist" do 275 expect(uploader.file.exists?).to be_truthy 276 end 277 278 it "does migrate the file" do 279 subject 280 281 expect(uploader.object_store).to eq(new_store) 282 expect(File.exist?(uploader.path)).to eq(true) 283 end 284 end 285 286 context 'when migrating to remote storage' do 287 let(:new_store) { described_class::Store::REMOTE } 288 let!(:current_path) { uploader.path } 289 290 it "file does exist" do 291 expect(File.exist?(current_path)).to eq(true) 292 end 293 294 context 'when storage is disabled' do 295 before do 296 stub_artifacts_object_storage(enabled: false) 297 end 298 299 it "to raise an error" do 300 expect { subject }.to raise_error(/Object Storage is not enabled for JobArtifactUploader/) 301 end 302 end 303 304 context 'when credentials are set' do 305 before do 306 stub_artifacts_object_storage 307 end 308 309 include_examples "updates the underlying <mounted>_store" 310 311 it "does migrate the file" do 312 subject 313 314 expect(uploader.object_store).to eq(new_store) 315 end 316 317 it "does delete original file" do 318 subject 319 320 expect(File.exist?(current_path)).to eq(false) 321 end 322 323 context 'when subject save fails' do 324 before do 325 expect(uploader).to receive(:persist_object_store!).and_raise(RuntimeError, "exception") 326 end 327 328 it "original file is not removed" do 329 expect { subject }.to raise_error(/exception/) 330 331 expect(File.exist?(current_path)).to eq(true) 332 end 333 end 334 end 335 end 336 end 337 end 338 end 339 340 describe '#fog_directory' do 341 let(:remote_directory) { 'directory' } 342 343 before do 344 allow(uploader_class).to receive(:options) do 345 double(object_store: double(remote_directory: remote_directory)) 346 end 347 end 348 349 subject { uploader.fog_directory } 350 351 it { is_expected.to eq(remote_directory) } 352 end 353 354 context 'when file is in use' do 355 def when_file_is_in_use 356 uploader.use_file do 357 yield 358 end 359 end 360 361 it 'cannot migrate' do 362 when_file_is_in_use do 363 expect(uploader).not_to receive(:unsafe_migrate!) 364 365 expect { uploader.migrate!(described_class::Store::REMOTE) }.to raise_error(ObjectStorage::ExclusiveLeaseTaken) 366 end 367 end 368 369 it 'cannot use_file' do 370 when_file_is_in_use do 371 expect(uploader).not_to receive(:unsafe_use_file) 372 373 expect { uploader.use_file }.to raise_error(ObjectStorage::ExclusiveLeaseTaken) 374 end 375 end 376 377 it 'can still migrate other files of the same model' do 378 uploader2 = uploader_class.new(object, :file) 379 uploader2.upload = create(:upload) 380 uploader.upload = create(:upload) 381 382 when_file_is_in_use do 383 expect(uploader2).to receive(:unsafe_migrate!) 384 385 uploader2.migrate!(described_class::Store::REMOTE) 386 end 387 end 388 end 389 390 describe '#fog_credentials' do 391 let(:connection) { Settingslogic.new("provider" => "AWS") } 392 393 before do 394 allow(uploader_class).to receive(:options) do 395 double(object_store: double(connection: connection)) 396 end 397 end 398 399 subject { uploader.fog_credentials } 400 401 it { is_expected.to eq(provider: 'AWS') } 402 end 403 404 describe '#fog_public' do 405 subject { uploader.fog_public } 406 407 it { is_expected.to eq(nil) } 408 end 409 410 describe '#fog_attributes' do 411 subject { uploader.fog_attributes } 412 413 it { is_expected.to eq({}) } 414 415 context 'with encryption configured' do 416 let(:raw_options) do 417 { 418 "enabled" => true, 419 "connection" => { "provider" => 'AWS' }, 420 "storage_options" => { "server_side_encryption" => "AES256" } 421 } 422 end 423 424 let(:options) { Settingslogic.new(raw_options) } 425 426 before do 427 allow(uploader_class).to receive(:options) do 428 double(object_store: options) 429 end 430 end 431 432 it { is_expected.to eq({ "x-amz-server-side-encryption" => "AES256" }) } 433 end 434 end 435 436 describe '.workhorse_authorize' do 437 let(:has_length) { true } 438 let(:maximum_size) { nil } 439 440 subject { uploader_class.workhorse_authorize(has_length: has_length, maximum_size: maximum_size) } 441 442 shared_examples 'returns the maximum size given' do 443 it "returns temporary path" do 444 expect(subject[:MaximumSize]).to eq(maximum_size) 445 end 446 end 447 448 shared_examples 'uses local storage' do 449 it_behaves_like 'returns the maximum size given' do 450 it "returns temporary path" do 451 is_expected.to have_key(:TempPath) 452 453 expect(subject[:TempPath]).to start_with(uploader_class.root) 454 expect(subject[:TempPath]).to include(described_class::TMP_UPLOAD_PATH) 455 end 456 end 457 end 458 459 shared_examples 'uses remote storage' do 460 it_behaves_like 'returns the maximum size given' do 461 it "returns remote store" do 462 is_expected.to have_key(:RemoteObject) 463 464 expect(subject[:RemoteObject]).to have_key(:ID) 465 expect(subject[:RemoteObject]).to include(Timeout: a_kind_of(Integer)) 466 expect(subject[:RemoteObject][:Timeout]).to be(ObjectStorage::DirectUpload::TIMEOUT) 467 expect(subject[:RemoteObject]).to have_key(:GetURL) 468 expect(subject[:RemoteObject]).to have_key(:DeleteURL) 469 expect(subject[:RemoteObject]).to have_key(:StoreURL) 470 expect(subject[:RemoteObject][:GetURL]).to include(described_class::TMP_UPLOAD_PATH) 471 expect(subject[:RemoteObject][:DeleteURL]).to include(described_class::TMP_UPLOAD_PATH) 472 expect(subject[:RemoteObject][:StoreURL]).to include(described_class::TMP_UPLOAD_PATH) 473 end 474 end 475 end 476 477 shared_examples 'uses remote storage with multipart uploads' do 478 it_behaves_like 'uses remote storage' do 479 it "returns multipart upload" do 480 is_expected.to have_key(:RemoteObject) 481 482 expect(subject[:RemoteObject]).to have_key(:MultipartUpload) 483 expect(subject[:RemoteObject][:MultipartUpload]).to have_key(:PartSize) 484 expect(subject[:RemoteObject][:MultipartUpload]).to have_key(:PartURLs) 485 expect(subject[:RemoteObject][:MultipartUpload]).to have_key(:CompleteURL) 486 expect(subject[:RemoteObject][:MultipartUpload]).to have_key(:AbortURL) 487 expect(subject[:RemoteObject][:MultipartUpload][:PartURLs]).to all(include(described_class::TMP_UPLOAD_PATH)) 488 expect(subject[:RemoteObject][:MultipartUpload][:CompleteURL]).to include(described_class::TMP_UPLOAD_PATH) 489 expect(subject[:RemoteObject][:MultipartUpload][:AbortURL]).to include(described_class::TMP_UPLOAD_PATH) 490 end 491 end 492 end 493 494 shared_examples 'uses remote storage without multipart uploads' do 495 it_behaves_like 'uses remote storage' do 496 it "does not return multipart upload" do 497 is_expected.to have_key(:RemoteObject) 498 expect(subject[:RemoteObject]).not_to have_key(:MultipartUpload) 499 end 500 end 501 end 502 503 context 'when object storage is disabled' do 504 before do 505 allow(Gitlab.config.uploads.object_store).to receive(:enabled) { false } 506 end 507 508 it_behaves_like 'uses local storage' 509 end 510 511 context 'when object storage is enabled' do 512 before do 513 allow(Gitlab.config.uploads.object_store).to receive(:enabled) { true } 514 end 515 516 context 'when direct upload is enabled' do 517 before do 518 allow(Gitlab.config.uploads.object_store).to receive(:direct_upload) { true } 519 end 520 521 context 'uses AWS' do 522 let(:storage_url) { "https://uploads.s3.eu-central-1.amazonaws.com/" } 523 let(:credentials) do 524 { 525 provider: "AWS", 526 aws_access_key_id: "AWS_ACCESS_KEY_ID", 527 aws_secret_access_key: "AWS_SECRET_ACCESS_KEY", 528 region: "eu-central-1" 529 } 530 end 531 532 before do 533 expect_next_instance_of(ObjectStorage::Config) do |instance| 534 allow(instance).to receive(:credentials).and_return(credentials) 535 end 536 end 537 538 context 'for known length' do 539 it_behaves_like 'uses remote storage without multipart uploads' do 540 it 'returns links for S3' do 541 expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url) 542 expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url) 543 expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url) 544 end 545 end 546 end 547 548 context 'for unknown length' do 549 let(:has_length) { false } 550 let(:maximum_size) { 1.gigabyte } 551 552 before do 553 stub_object_storage_multipart_init(storage_url) 554 end 555 556 it_behaves_like 'uses remote storage with multipart uploads' do 557 it 'returns links for S3' do 558 expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url) 559 expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url) 560 expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url) 561 expect(subject[:RemoteObject][:MultipartUpload][:PartURLs]).to all(start_with(storage_url)) 562 expect(subject[:RemoteObject][:MultipartUpload][:CompleteURL]).to start_with(storage_url) 563 expect(subject[:RemoteObject][:MultipartUpload][:AbortURL]).to start_with(storage_url) 564 end 565 end 566 end 567 end 568 569 context 'uses Google' do 570 let(:storage_url) { "https://storage.googleapis.com/uploads/" } 571 let(:credentials) do 572 { 573 provider: "Google", 574 google_storage_access_key_id: 'ACCESS_KEY_ID', 575 google_storage_secret_access_key: 'SECRET_ACCESS_KEY' 576 } 577 end 578 579 before do 580 expect_next_instance_of(ObjectStorage::Config) do |instance| 581 allow(instance).to receive(:credentials).and_return(credentials) 582 end 583 end 584 585 context 'for known length' do 586 it_behaves_like 'uses remote storage without multipart uploads' do 587 it 'returns links for Google Cloud' do 588 expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url) 589 expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url) 590 expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url) 591 end 592 end 593 end 594 595 context 'for unknown length' do 596 let(:has_length) { false } 597 let(:maximum_size) { 1.gigabyte } 598 599 it_behaves_like 'uses remote storage without multipart uploads' do 600 it 'returns links for Google Cloud' do 601 expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url) 602 expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url) 603 expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url) 604 end 605 end 606 end 607 end 608 609 context 'uses GDK/minio' do 610 let(:storage_url) { "http://minio:9000/uploads/" } 611 let(:credentials) do 612 { provider: "AWS", 613 aws_access_key_id: "AWS_ACCESS_KEY_ID", 614 aws_secret_access_key: "AWS_SECRET_ACCESS_KEY", 615 endpoint: 'http://minio:9000', 616 path_style: true, 617 region: "gdk" } 618 end 619 620 before do 621 expect_next_instance_of(ObjectStorage::Config) do |instance| 622 allow(instance).to receive(:credentials).and_return(credentials) 623 end 624 end 625 626 context 'for known length' do 627 it_behaves_like 'uses remote storage without multipart uploads' do 628 it 'returns links for S3' do 629 expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url) 630 expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url) 631 expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url) 632 end 633 end 634 end 635 636 context 'for unknown length' do 637 let(:has_length) { false } 638 let(:maximum_size) { 1.gigabyte } 639 640 before do 641 stub_object_storage_multipart_init(storage_url) 642 end 643 644 it_behaves_like 'uses remote storage with multipart uploads' do 645 it 'returns links for S3' do 646 expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url) 647 expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url) 648 expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url) 649 expect(subject[:RemoteObject][:MultipartUpload][:PartURLs]).to all(start_with(storage_url)) 650 expect(subject[:RemoteObject][:MultipartUpload][:CompleteURL]).to start_with(storage_url) 651 expect(subject[:RemoteObject][:MultipartUpload][:AbortURL]).to start_with(storage_url) 652 end 653 end 654 end 655 end 656 end 657 658 context 'when direct upload is disabled' do 659 before do 660 allow(Gitlab.config.uploads.object_store).to receive(:direct_upload) { false } 661 end 662 663 it_behaves_like 'uses local storage' 664 end 665 end 666 end 667 668 describe '#cache!' do 669 subject do 670 uploader.cache!(uploaded_file) 671 end 672 673 context 'when local file is used' do 674 let(:temp_file) { Tempfile.new("test") } 675 676 before do 677 FileUtils.touch(temp_file) 678 end 679 680 after do 681 FileUtils.rm_f(temp_file) 682 end 683 684 context 'when valid file is used' do 685 context 'when valid file is specified' do 686 let(:uploaded_file) { temp_file } 687 688 it 'properly caches the file' do 689 subject 690 691 expect(uploader).to be_exists 692 expect(uploader.path).to start_with(uploader_class.root) 693 expect(uploader.filename).to eq(File.basename(uploaded_file.path)) 694 end 695 696 context 'when object storage and direct upload is specified' do 697 before do 698 stub_uploads_object_storage(uploader_class, enabled: true, direct_upload: true) 699 end 700 701 context 'when file is stored' do 702 subject do 703 uploader.store!(uploaded_file) 704 end 705 706 it 'file to be remotely stored in permament location' do 707 subject 708 709 expect(uploader).to be_exists 710 expect(uploader).not_to be_cached 711 expect(uploader).not_to be_file_storage 712 expect(uploader.path).not_to be_nil 713 expect(uploader.path).not_to include('tmp/upload') 714 expect(uploader.path).not_to include('tmp/cache') 715 expect(uploader.object_store).to eq(described_class::Store::REMOTE) 716 end 717 end 718 end 719 720 context 'when object storage and direct upload is not used' do 721 before do 722 stub_uploads_object_storage(uploader_class, enabled: true, direct_upload: false) 723 end 724 725 context 'when file is stored' do 726 subject do 727 uploader.store!(uploaded_file) 728 end 729 730 it 'file to be remotely stored in permament location' do 731 subject 732 733 expect(uploader).to be_exists 734 expect(uploader).not_to be_cached 735 expect(uploader).to be_file_storage 736 expect(uploader.path).not_to be_nil 737 expect(uploader.path).not_to include('tmp/upload') 738 expect(uploader.path).not_to include('tmp/cache') 739 expect(uploader.object_store).to eq(described_class::Store::LOCAL) 740 end 741 end 742 end 743 end 744 end 745 end 746 747 context 'when remote file is used' do 748 let(:temp_file) { Tempfile.new("test") } 749 750 let!(:fog_connection) do 751 stub_uploads_object_storage(uploader_class) 752 end 753 754 before do 755 FileUtils.touch(temp_file) 756 end 757 758 after do 759 FileUtils.rm_f(temp_file) 760 end 761 762 context 'when valid file is used' do 763 context 'when invalid file is specified' do 764 let(:uploaded_file) do 765 UploadedFile.new(temp_file.path, remote_id: "../test/123123") 766 end 767 768 it 'raises an error' do 769 expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Bad file path/) 770 end 771 end 772 773 context 'when non existing file is specified' do 774 let(:uploaded_file) do 775 UploadedFile.new(temp_file.path, remote_id: "test/123123") 776 end 777 778 it 'raises an error' do 779 expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Missing file/) 780 end 781 end 782 783 context 'when empty remote_id is specified' do 784 let(:uploaded_file) do 785 UploadedFile.new(temp_file.path, remote_id: '') 786 end 787 788 it 'uses local storage' do 789 subject 790 791 expect(uploader).to be_file_storage 792 expect(uploader.object_store).to eq(described_class::Store::LOCAL) 793 end 794 end 795 796 context 'when valid file is specified' do 797 let(:uploaded_file) do 798 UploadedFile.new(temp_file.path, filename: "my_file.txt", remote_id: "test/123123") 799 end 800 801 let!(:fog_file) do 802 fog_connection.directories.new(key: 'uploads').files.create( # rubocop:disable Rails/SaveBang 803 key: 'tmp/uploads/test/123123', 804 body: 'content' 805 ) 806 end 807 808 it 'file to be cached and remote stored' do 809 expect { subject }.not_to raise_error 810 811 expect(uploader).to be_exists 812 expect(uploader).to be_cached 813 expect(uploader).not_to be_file_storage 814 expect(uploader.path).not_to be_nil 815 expect(uploader.path).not_to include('tmp/cache') 816 expect(uploader.path).not_to include('tmp/cache') 817 expect(uploader.object_store).to eq(described_class::Store::REMOTE) 818 end 819 820 context 'when file is stored' do 821 subject do 822 uploader.store!(uploaded_file) 823 end 824 825 it 'file to be remotely stored in permament location' do 826 subject 827 828 expect(uploader).to be_exists 829 expect(uploader).not_to be_cached 830 expect(uploader).not_to be_file_storage 831 expect(uploader.path).not_to be_nil 832 expect(uploader.path).not_to include('tmp/upload') 833 expect(uploader.path).not_to include('tmp/cache') 834 expect(uploader.url).to include('/my_file.txt') 835 expect(uploader.object_store).to eq(described_class::Store::REMOTE) 836 end 837 end 838 end 839 end 840 end 841 end 842 843 describe '#retrieve_from_store!' do 844 [:group, :project, :user].each do |model| 845 context "for #{model}s" do 846 let(:models) { create_list(model, 3, :with_avatar).map(&:reload) } 847 let(:avatars) { models.map(&:avatar) } 848 849 it 'batches fetching uploads from the database' do 850 # Ensure that these are all created and fully loaded before we start 851 # running queries for avatars 852 models 853 854 expect { avatars }.not_to exceed_query_limit(1) 855 end 856 857 it 'does not attempt to replace methods' do 858 models.each do |model| 859 expect(model.avatar.upload).to receive(:method_missing).and_call_original 860 861 model.avatar.upload.path 862 end 863 end 864 865 it 'fetches a unique upload for each model' do 866 expect(avatars.map(&:url).uniq).to eq(avatars.map(&:url)) 867 expect(avatars.map(&:upload).uniq).to eq(avatars.map(&:upload)) 868 end 869 end 870 end 871 end 872 873 describe 'OpenFile' do 874 subject { ObjectStorage::Concern::OpenFile.new(file) } 875 876 let(:file) { double(read: true, size: true, path: true) } 877 878 it 'delegates read and size methods' do 879 expect(subject.read).to eq(true) 880 expect(subject.size).to eq(true) 881 end 882 883 it 'does not delegate path method' do 884 expect { subject.path }.to raise_error(NoMethodError) 885 end 886 end 887end 888