Skip to content
GitHubLinkedIn

SQL Server → S3 backup

SQL Server supports S3-compatible backups using create credential and backup ... to url. We use this with MinIO for project backups.

  • A project bucket and least-privilege user exist in MinIO.
  • The SQL Server host can reach the S3 endpoint.
  • Credentials are stored in the password manager (never in Git).

Create one credential per bucket URL prefix:

declare @bucket_url sysname = N's3://s3.coragem.app/<bucket-name>';
declare @access_key nvarchar(128) = N'<access_key>';
declare @secret_key nvarchar(256) = N'<secret_key>';

declare @sql nvarchar(max);

if exists (select 1 from sys.credentials where name = @bucket_url)
begin
  set @sql = N'drop credential [' + replace(@bucket_url, ']', ']]') + N']';
  exec sp_executesql @sql;
end;

set @sql = N'
  create credential [' + replace(@bucket_url, ']', ']]') + N']
  with identity = ''S3 Access Key'',
  secret = ''' + replace(@access_key, '''', '''''') + N':' + replace(@secret_key, '''', '''''') + N''';';
exec sp_executesql @sql;
declare @bucket_url nvarchar(255) = N's3://s3.coragem.app/<bucket-name>';
declare @db sysname = N'<db_name>';
declare @server sysname = replace(cast(serverproperty('machinename') as nvarchar(128)), N' ', N'_');
declare @utc nvarchar(32) = format(getutcdate(), 'yyyyMMdd-HHmmss');
declare @file nvarchar(400) = upper(@server) + N'-' + upper(@db) + N'-' + @utc + N'-utc.bak';

declare @sql nvarchar(max) =
  N'backup database [' + replace(@db, ']', ']]') + N'] to url = ''' + @bucket_url + N'/' + @file + N'''
    with init, format' +
    case when serverproperty('EngineEdition') = 4 then N'' else N', compression' end +
    N', maxtransfersize = 20971520, blocksize = 65536, stats = 10;';

exec sp_executesql @sql;

3) Backup all user databases (or one database)

Section titled “3) Backup all user databases (or one database)”

Use this when you want one script to back up either:

  • all user databases, or
  • a single named database

This script estimates database size from sys.master_files and then stripes the backup into N parts (multiple URL = ... devices in one BACKUP DATABASE) when the database is larger than your target part size.

-- Backup Script: Backup specified database(s) to an S3 bucket
-- Supports backing up all user databases or a single specified database.
-- Stripes backups into parts of a maximum estimated size (e.g., 100GB) and includes "part X of Y" only when needed.

-- Parameters
declare @all_databases bit = 1; -- 1 = backup all user databases, 0 = a single database
declare @database_to_backup sysname = N'<db_name>'; -- used when @all_databases = 0
declare @bucket_url nvarchar(255) = N's3://s3.coragem.app/<bucket-name>'; -- bucket URL prefix (must match a credential name)
declare @max_part_size_gb int = 100; -- max estimated size per part (GB)

-- Variables
declare @dbname sysname;
declare @sql nvarchar(max);
declare @devices nvarchar(max);
declare @server sysname = replace(cast(serverproperty('machinename') as nvarchar(128)), N' ', N'_');
declare @utcdate nvarchar(16) = format(getutcdate(), 'yyyyMMdd');
declare @utctime nvarchar(16) = format(getutcdate(), 'HHmmss');
declare @estimated_db_size_mb bigint;
declare @total_parts int;
declare @i int;

-- Temporary table to log backup results
if object_id('tempdb..#backuplog') is not null drop table #backuplog;
create table #backuplog (
  srv_name       sysname,
  db_name        sysname,
  bak_location   nvarchar(500),
  backup_time    datetime2,
  backup_result  bit
);

declare db_cursor cursor for
select name
from sys.databases
where state_desc = 'ONLINE'
  and name not in ('master', 'tempdb', 'model', 'msdb')
  and (@all_databases = 1 or name = @database_to_backup);

open db_cursor;
fetch next from db_cursor into @dbname;

while @@fetch_status = 0
begin
  -- Estimate DB size from allocated file size (MB). This is a heuristic for choosing stripe count.
  select @estimated_db_size_mb = cast(ceiling(sum(size * 8.0 / 1024.0)) as bigint)
  from sys.master_files
  where database_id = db_id(@dbname);

  set @total_parts = cast(
    ceiling(@estimated_db_size_mb / (@max_part_size_gb * 1024.0)) as int
  );
  if @total_parts < 1 set @total_parts = 1;

  -- Build the device list: URL='...part...'[, URL='...part...']
  set @devices = N'';
  set @i = 1;

  while @i <= @total_parts
  begin
    declare @suffix nvarchar(64) =
      case when @total_parts > 1
           then N'-' + cast(@i as nvarchar(10)) + N'-of-' + cast(@total_parts as nvarchar(10))
           else N''
      end;

    declare @file nvarchar(400) =
      upper(@server) + N'-' + upper(@dbname) + N'-' + @utcdate + N'-' + @utctime + @suffix + N'-utc.bak';

    set @devices = @devices
      + case when @i > 1 then N',' + char(10) + N'  ' else N'  ' end
      + N'URL = '''
      + replace(@bucket_url + N'/' + @file, N'''', N'''''')
      + N'''';

    set @i += 1;
  end;

  set @sql =
    N'backup database [' + replace(@dbname, ']', ']]') + N']' + char(10) +
    N'to' + char(10) + @devices + char(10) +
    N'with init, format' +
      case when serverproperty('EngineEdition') = 4 then N'' else N', compression' end +
      N', maxtransfersize = 20971520, blocksize = 65536, stats = 10;';

  begin try
    exec sp_executesql @sql;

    -- Log each stripe file for convenience.
    set @i = 1;
    while @i <= @total_parts
    begin
      declare @suffix2 nvarchar(64) =
        case when @total_parts > 1
             then N'-' + cast(@i as nvarchar(10)) + N'-of-' + cast(@total_parts as nvarchar(10))
             else N''
        end;

      declare @file2 nvarchar(400) =
        upper(@server) + N'-' + upper(@dbname) + N'-' + @utcdate + N'-' + @utctime + @suffix2 + N'-utc.bak';

      insert into #backuplog
      values (@server, @dbname, @bucket_url + N'/' + @file2, getutcdate(), 1);

      set @i += 1;
    end
  end try
  begin catch
    insert into #backuplog
    values (@server, @dbname, @bucket_url, getutcdate(), 0);
  end catch;

  fetch next from db_cursor into @dbname;
end

close db_cursor;
deallocate db_cursor;

select * from #backuplog order by backup_time desc, db_name, bak_location;

For restore/recovery workflows, see Restore SQL Server from S3 (support).

  • Access is denied / auth errors: credential missing/wrong for the s3://... URL you used.
  • Connectivity: SQL host cannot reach the S3 endpoint (DNS/VPN/routing).