diff --git a/lib/Convos/Core/Backend/S3.pm b/lib/Convos/Core/Backend/S3.pm new file mode 100644 index 000000000..508f3c03f --- /dev/null +++ b/lib/Convos/Core/Backend/S3.pm @@ -0,0 +1,410 @@ +# ABOUTME: S3-compatible storage backend for Convos +# ABOUTME: Extends File backend to store objects in S3/Tigris while keeping logs local +package Convos::Core::Backend::S3; +use Mojo::Base 'Convos::Core::Backend::File', -async_await; + +use Convos::Util::S3 qw(sign_request); +use Mojo::Collection; +use Mojo::JSON qw(encode_json decode_json); +use Mojo::UserAgent; +use Mojo::URL; +use Mojo::DOM; + +has s3_endpoint => sub { $ENV{CONVOS_S3_ENDPOINT} || 'https://fly.storage.tigris.dev' }; +has s3_bucket => sub { $ENV{CONVOS_S3_BUCKET} || die 'CONVOS_S3_BUCKET required' }; +has s3_key => sub { $ENV{CONVOS_S3_KEY} || die 'CONVOS_S3_KEY required' }; +has s3_secret => sub { $ENV{CONVOS_S3_SECRET} || die 'CONVOS_S3_SECRET required' }; +has s3_region => sub { $ENV{CONVOS_S3_REGION} || 'auto' }; +has ua => sub { Mojo::UserAgent->new }; + +async sub save_object_p { + my ($self, $obj) = @_; + my $key = $self->_s3_key($obj); + my $content = encode_json($obj->TO_JSON('private')); + + await $self->_s3_request_p('PUT', $key, $content, 'application/json'); + $obj->logf(debug => 'Save success. (s3://%s/%s)', $self->s3_bucket, $key); + + return $obj; +} + +async sub load_object_p { + my ($self, $obj) = @_; + my $key = $self->_s3_key($obj); + + my $res = await $self->_s3_request_p('GET', $key); + + # Return empty hash on 404 (object doesn't exist yet) + return {} unless $res->is_success; + + my $data = {}; + eval { $data = decode_json($res->body); }; + die($@ || 'Invalid JSON from S3') unless $data; + + return $data; +} + +async sub delete_object_p { + my ($self, $obj) = @_; + + if ($obj->isa('Convos::Core::Connection')) { + $obj->unsubscribe($_) for qw(conversation message state); + } + + my $key = $self->_s3_key($obj); + await $self->_s3_request_p('DELETE', $key); + + return $obj; +} + +async sub users_p { + my $self = shift; + + # List all user directories + my $result = await $self->_s3_list_p('users/', '/'); + + my @users; + for my $prefix (@{$result->{prefixes}}) { + # Each prefix is like "users/joe@example.com/" + # Load the user.json file for this user + my $key = "${prefix}user.json"; + my $res = await $self->_s3_request_p('GET', $key); + next unless $res->is_success; + + my $data = {}; + eval { $data = decode_json($res->body); }; + push @users, $data if $data && ref $data eq 'HASH'; + } + + # Sort users by registered date, then email (matching File backend behavior) + @users = sort { + ($a->{registered} || '') cmp ($b->{registered} || '') + || ($a->{email} || '') cmp ($b->{email} || '') + } @users; + + return \@users; +} + +async sub connections_p { + my ($self, $user) = @_; + + # Get user directory path from user object + my $user_path = join '/', @{$user->uri}; + my $prefix = "users/${user_path}/"; + + # List all connection directories for this user + my $result = await $self->_s3_list_p($prefix, '/'); + + my @connections; + for my $conn_prefix (@{$result->{prefixes}}) { + # Each prefix is like "users/joe@example.com/irc-libera/" + # Load the connection.json file + my $key = "${conn_prefix}connection.json"; + my $res = await $self->_s3_request_p('GET', $key); + next unless $res->is_success; + + my $data = {}; + eval { $data = decode_json($res->body); }; + if ($data && ref $data eq 'HASH') { + delete $data->{state}; # should not be stored in connection.json + push @connections, $data; + } + } + + return \@connections; +} + +async sub files_p { + my ($self, $user, $params) = @_; + + # Build prefix for user's upload directory + my $prefix = sprintf 'users/%s/upload/', $user->email; + + # List all files in the upload directory (no delimiter to get all keys) + my $result = await $self->_s3_list_p($prefix, ''); + + # Build a set of .data files for checking existence + my %data_files = map { $_ => 1 } grep { /\.data$/ } @{$result->{keys}}; + + # Find all .json files that have corresponding .data files + my @items; + for my $key (@{$result->{keys}}) { + next unless $key =~ m!/([^/]+)\.json$!; + my $id = $1; + my $data_key = $key; + $data_key =~ s/\.json$/.data/; + next unless $data_files{$data_key}; + + # Load the metadata JSON + my $res = await $self->_s3_request_p('GET', $key); + next unless $res->is_success; + + my $info = {}; + eval { $info = decode_json($res->body); }; + next unless $info && ref $info eq 'HASH'; + + push @items, { + id => $id, + info => $info, + saved => $info->{saved} || '', + }; + } + + # Sort by saved date descending, then by id + @items = sort { ($b->{saved} || '') cmp ($a->{saved} || '') || $a->{id} cmp $b->{id} } @items; + + # Apply pagination + $params->{limit} = 60 if !$params->{limit} or $params->{limit} > 60; + my $res = {files => []}; + + my @before; + for my $item (@items) { + if ($params->{after} and $params->{after} eq $item->{id}) { + $res->{after} = $item->{id}; + } + elsif (@{$res->{files}} >= $params->{limit}) { + $res->{next} = $item->{id}; + last; + } + elsif (!$params->{after} or $res->{after}) { + push @{$res->{files}}, { + id => $item->{id}, + name => $item->{info}{filename} || $item->{id}, + saved => $item->{saved}, + size => $item->{info}{size} || 0, + }; + } + else { + push @before, $item->{id}; + } + } + + if (@{$res->{files}} and @before > $params->{limit}) { + $res->{prev} = $before[-$params->{limit}]; + } + + $res->{files} = Mojo::Collection->new(@{$res->{files}}); + return $res; +} + +async sub _s3_list_p { + my ($self, $prefix, $delimiter) = @_; + $prefix //= ''; + $delimiter //= ''; + + # Build URL with query params for LIST operation + my $url = sprintf '%s/%s', $self->s3_endpoint, $self->s3_bucket; + my $query = Mojo::URL->new->query(prefix => $prefix, delimiter => $delimiter)->query->to_string; + $url .= "?$query" if $query; + + # Generate AWS4 signature for GET request + my $headers = sign_request( + method => 'GET', + url => $url, + headers => {}, + payload => '', + key => $self->s3_key, + secret => $self->s3_secret, + region => $self->s3_region, + ); + + # Execute LIST request + my $tx = await $self->ua->get_p($url => $headers); + my $res = $tx->res; + + return {keys => [], prefixes => []} unless $res->is_success; + + # Parse XML response + my $dom = Mojo::DOM->new($res->body); + + # Extract object keys from elements + my @keys = $dom->find('Contents > Key')->map('text')->each; + + # Extract directory prefixes from elements + my @prefixes = $dom->find('CommonPrefixes > Prefix')->map('text')->each; + + return {keys => \@keys, prefixes => \@prefixes}; +} + +sub _s3_key { + my ($self, $obj) = @_; + my $path = $obj->uri->to_string; + + # Prefix user-related objects with 'users/' for organization + return "users/$path"; +} + +async sub _s3_request_p { + my ($self, $method, $key, $body, $content_type) = @_; + $body //= ''; + $content_type //= 'application/octet-stream'; + + my $url = sprintf '%s/%s/%s', $self->s3_endpoint, $self->s3_bucket, $key; + + # Generate AWS4 signature + my $headers = sign_request( + method => $method, + url => $url, + headers => {'Content-Type' => $content_type}, + payload => $body, + key => $self->s3_key, + secret => $self->s3_secret, + region => $self->s3_region, + ); + + # Add Content-Type to final headers + $headers->{'Content-Type'} = $content_type; + + # Execute request based on method + my $tx; + if ($method eq 'GET') { + $tx = await $self->ua->get_p($url => $headers); + } + elsif ($method eq 'PUT') { + $tx = await $self->ua->put_p($url => $headers => $body); + } + elsif ($method eq 'DELETE') { + $tx = await $self->ua->delete_p($url => $headers); + } + else { + die "Unsupported HTTP method: $method"; + } + + my $res = $tx->res; + + # Return response for caller to check status + return $res; +} + +1; + +=encoding utf8 + +=head1 NAME + +Convos::Core::Backend::S3 - S3-compatible storage backend + +=head1 SYNOPSIS + + use Convos::Core::Backend::S3; + + my $backend = Convos::Core::Backend::S3->new( + s3_endpoint => 'https://fly.storage.tigris.dev', + s3_bucket => 'my-bucket', + s3_key => $access_key, + s3_secret => $secret_key, + s3_region => 'auto', + home => '/path/to/local/logs', + ); + +=head1 DESCRIPTION + +L is a storage backend that stores objects +(users, connections, settings) in S3-compatible storage while keeping +message logs and notifications on the local filesystem. + +This is useful for deploying Convos in environments like Fly.io where +you want persistent object storage without maintaining local volumes, +but still want fast local access to message logs. + +=head2 Environment Variables + +=over 4 + +=item * CONVOS_S3_ENDPOINT - S3 endpoint URL (default: https://fly.storage.tigris.dev) + +=item * CONVOS_S3_BUCKET - S3 bucket name (required) + +=item * CONVOS_S3_KEY - AWS access key ID (required) + +=item * CONVOS_S3_SECRET - AWS secret access key (required) + +=item * CONVOS_S3_REGION - AWS region or 'auto' for Tigris (default: auto) + +=back + +=head1 ATTRIBUTES + +L inherits all attributes from +L and implements the following new ones. + +=head2 s3_endpoint + +S3-compatible endpoint URL. + +=head2 s3_bucket + +S3 bucket name for object storage. + +=head2 s3_key + +AWS access key ID. + +=head2 s3_secret + +AWS secret access key. + +=head2 s3_region + +AWS region identifier or 'auto' for Tigris. + +=head2 ua + +L instance for HTTP requests. + +=head1 METHODS + +L inherits all methods from +L and implements the following new ones. + +=head2 save_object_p + + $p = $backend->save_object_p($obj); + +Saves object to S3 as JSON. Returns a promise that resolves to C<$obj>. + +=head2 load_object_p + + $p = $backend->load_object_p($obj); + +Loads object data from S3. Returns a promise that resolves to a hashref +of object data, or an empty hashref if the object doesn't exist (404). + +=head2 delete_object_p + + $p = $backend->delete_object_p($obj); + +Deletes object from S3. Returns a promise that resolves to C<$obj>. + +=head2 users_p + + $p = $backend->users_p; + +Lists all users by querying S3 for user directories, then loading each +user.json file. Returns a promise that resolves to an arrayref of user +data hashes, sorted by registration date and email. + +=head2 connections_p + + $p = $backend->connections_p($user); + +Lists all connections for a user by querying S3 for connection directories +within the user's path, then loading each connection.json file. Returns a +promise that resolves to an arrayref of connection data hashes. + +=head2 files_p + + $p = $backend->files_p($user, \%params); + +Lists uploaded files for a user by querying S3 for files in the user's +upload directory. Each file consists of a C<.json> metadata file and a +C<.data> content file. Returns a promise that resolves to a hashref +containing a C key with a L of file records. + +Pagination is supported via C and C parameters. + +=head1 SEE ALSO + +L, L. + +=cut diff --git a/lib/Convos/Util/S3.pm b/lib/Convos/Util/S3.pm new file mode 100644 index 000000000..5f4265b52 --- /dev/null +++ b/lib/Convos/Util/S3.pm @@ -0,0 +1,165 @@ +# ABOUTME: AWS Signature Version 4 signing utility for S3-compatible APIs +# ABOUTME: Provides cryptographic request signing for Tigris, AWS S3, and other S3-compatible services +package Convos::Util::S3; +use Mojo::Base -strict, -signatures; + +use Digest::SHA qw(hmac_sha256 hmac_sha256_hex sha256_hex); +use Mojo::URL; +use Mojo::Util qw(url_escape); + +use Exporter qw(import); +our @EXPORT_OK = qw(sign_request); + +sub sign_request (%params) { + my $method = uc($params{method} // 'GET'); + my $url = Mojo::URL->new($params{url}); + my $headers = $params{headers} // {}; + my $payload = $params{payload} // ''; + my $key = $params{key} or die 'key required'; + my $secret = $params{secret} or die 'secret required'; + my $region = $params{region} // 'auto'; + my $date = $params{date} // _iso8601_now(); + + # Extract components from date + my ($datestamp) = $date =~ /^(\d{8})/; + + # Calculate payload hash + my $payload_hash = sha256_hex($payload); + + # Build canonical headers (must be sorted) + # Auto-inject Host from URL if not provided + my %canonical_headers = ( + 'host' => $url->host_port, + %$headers, + 'x-amz-content-sha256' => $payload_hash, + 'x-amz-date' => $date, + ); + + my $signed_headers = join ';', sort { lc($a) cmp lc($b) } keys %canonical_headers; + my $canonical_headers_str = join "\n", + map { lc($_) . ':' . $canonical_headers{$_} } + sort { lc($a) cmp lc($b) } keys %canonical_headers; + + # Build canonical URI (path component) + my $canonical_uri = $url->path->to_string || '/'; + + # Build canonical query string (sorted by key) + my $canonical_query = ''; + if (my $query = $url->query) { + my @pairs; + for my $name (sort @{$query->names}) { + for my $value (sort @{$query->every_param($name)}) { + push @pairs, url_escape($name) . '=' . url_escape($value // ''); + } + } + $canonical_query = join '&', @pairs; + } + + # Build canonical request + my $canonical_request = join "\n", + $method, + $canonical_uri, + $canonical_query, + $canonical_headers_str, + '', # Empty line after headers + $signed_headers, + $payload_hash; + + # Build string to sign + my $algorithm = 'AWS4-HMAC-SHA256'; + my $credential_scope = "$datestamp/$region/s3/aws4_request"; + my $string_to_sign = join "\n", + $algorithm, + $date, + $credential_scope, + sha256_hex($canonical_request); + + # Calculate signing key (HMAC chain) + my $k_date = hmac_sha256($datestamp, "AWS4$secret"); + my $k_region = hmac_sha256($region, $k_date); + my $k_service = hmac_sha256('s3', $k_region); + my $k_signing = hmac_sha256('aws4_request', $k_service); + + # Calculate signature + my $signature = hmac_sha256_hex($string_to_sign, $k_signing); + + # Build authorization header + my $authorization = "$algorithm Credential=$key/$credential_scope, " . + "SignedHeaders=$signed_headers, Signature=$signature"; + + return { + 'Authorization' => $authorization, + 'x-amz-date' => $date, + 'x-amz-content-sha256' => $payload_hash, + }; +} + +sub _iso8601_now { + my @t = gmtime; + return sprintf '%04d%02d%02dT%02d%02d%02dZ', + $t[5] + 1900, $t[4] + 1, $t[3], + $t[2], $t[1], $t[0]; +} + +1; + +=encoding utf8 + +=head1 NAME + +Convos::Util::S3 - AWS Signature Version 4 signing utility + +=head1 SYNOPSIS + + use Convos::Util::S3 qw(sign_request); + + my $headers = sign_request( + method => 'PUT', + url => 'https://bucket.s3.amazonaws.com/key', + headers => {'Content-Type' => 'application/json'}, + payload => '{"data":"value"}', + key => $access_key, + secret => $secret_key, + region => 'us-east-1', + ); + +=head1 DESCRIPTION + +L provides AWS Signature Version 4 signing for S3-compatible +APIs including Tigris, AWS S3, and others. + +=head1 FUNCTIONS + +=head2 sign_request + + \%headers = sign_request(%params); + +Signs an HTTP request for S3-compatible APIs. Parameters: + +=over 4 + +=item * method - HTTP method (GET, PUT, DELETE, etc.) + +=item * url - Full URL including bucket and key + +=item * headers - Optional hashref of additional headers + +=item * payload - Request body (empty string for GET) + +=item * key - Access key ID + +=item * secret - Secret access key + +=item * region - AWS region or 'auto' for Tigris + +=item * date - Optional ISO8601 date (for testing) + +=back + +Returns hashref with Authorization, x-amz-date, and x-amz-content-sha256 headers. + +=head1 SEE ALSO + +L. + +=cut diff --git a/t/backend-s3-integration.t b/t/backend-s3-integration.t new file mode 100644 index 000000000..00bfa6f7f --- /dev/null +++ b/t/backend-s3-integration.t @@ -0,0 +1,271 @@ +#!perl +# ABOUTME: Integration tests for S3 backend against real Tigris/S3 endpoint +# ABOUTME: Requires environment variables to be set for S3 credentials + +use lib '.'; +use t::Helper; +use Mojo::JSON qw(decode_json encode_json); + +t::Helper->subprocess_in_main_process; + +# Skip all tests unless S3 credentials are provided +plan skip_all => 'Set CONVOS_S3_TEST_BUCKET, CONVOS_S3_KEY, CONVOS_S3_SECRET for integration tests' + unless $ENV{CONVOS_S3_TEST_BUCKET} && $ENV{CONVOS_S3_KEY} && $ENV{CONVOS_S3_SECRET}; + +use_ok 'Convos::Core::Backend::S3'; +use_ok 'Convos::Core'; + +# Create a unique test prefix to avoid conflicts with other tests +my $test_prefix = sprintf 'test-%d-%d', $$, time; +diag "Test prefix: $test_prefix"; + +# Create backend with real credentials +my $backend = Convos::Core::Backend::S3->new( + s3_endpoint => $ENV{CONVOS_S3_ENDPOINT} || 'https://fly.storage.tigris.dev', + s3_bucket => $ENV{CONVOS_S3_TEST_BUCKET}, + s3_key => $ENV{CONVOS_S3_KEY}, + s3_secret => $ENV{CONVOS_S3_SECRET}, + s3_region => $ENV{CONVOS_S3_REGION} || 'auto', + home => Mojo::File->new($ENV{CONVOS_HOME}), +); + +# Create a core with the S3 backend for testing +my $core = Convos::Core->new(backend => $backend); + +# Mock objects for testing +{ + package TestUser; + use Mojo::Base -base; + use Mojo::Path; + + has email => sub { die 'email required' }; + has uid => sub { shift->email }; + has registered => sub { Mojo::Date->new->to_datetime }; + + sub uri { Mojo::Path->new(shift->email . '/user.json') } + sub TO_JSON { + my ($self, $persist) = @_; + return { + email => $self->email, + registered => $self->registered, + }; + } + sub logf { } + sub isa { + my ($self, $class) = @_; + return 0 if $class eq 'Convos::Core::Connection'; + return $self->SUPER::isa($class); + } +} + +{ + package TestConnection; + use Mojo::Base -base; + use Mojo::Path; + + has id => sub { die 'id required' }; + has user => sub { die 'user required' }; + has name => 'Test Connection'; + has url => sub { Mojo::URL->new('irc://irc.example.com') }; + + sub uri { + my $self = shift; + return Mojo::Path->new($self->user->email . '/' . $self->id . '/connection.json'); + } + sub TO_JSON { + my ($self, $persist) = @_; + return { + connection_id => $self->id, + name => $self->name, + url => $self->url->to_string, + }; + } + sub logf { } + sub isa { + my ($self, $class) = @_; + return 1 if $class eq 'Convos::Core::Connection'; + return $self->SUPER::isa($class); + } + sub unsubscribe { } +} + +# Use unique email for each test run +my $test_email = "${test_prefix}\@example.com"; + +subtest 'save and load user object' => sub { + my $user = TestUser->new(email => $test_email); + + # Save user + my $saved; + $backend->save_object_p($user)->then(sub { + $saved = shift; + })->$wait_success('save user'); + + is $saved->email, $test_email, 'save_object_p returns user'; + + # Load user + my $loaded; + $backend->load_object_p($user)->then(sub { + $loaded = shift; + })->$wait_success('load user'); + + is $loaded->{email}, $test_email, 'loaded email matches'; + ok $loaded->{registered}, 'loaded has registered field'; +}; + +subtest 'list users' => sub { + # Make sure we have at least one user from previous test + my $users; + $backend->users_p->then(sub { + $users = shift; + })->$wait_success('list users'); + + ok ref($users) eq 'ARRAY', 'users_p returns arrayref'; + my @test_users = grep { $_->{email} && $_->{email} =~ /^\Q$test_prefix\E/ } @$users; + ok @test_users >= 1, 'found at least one test user'; +}; + +subtest 'save and load connection' => sub { + my $user = TestUser->new(email => $test_email); + my $conn = TestConnection->new( + id => 'irc-test', + user => $user, + name => 'Test IRC', + ); + + # Save connection + my $saved; + $backend->save_object_p($conn)->then(sub { + $saved = shift; + })->$wait_success('save connection'); + + is $saved->id, 'irc-test', 'save_object_p returns connection'; + + # Load connection + my $loaded; + $backend->load_object_p($conn)->then(sub { + $loaded = shift; + })->$wait_success('load connection'); + + is $loaded->{connection_id}, 'irc-test', 'loaded connection_id matches'; + is $loaded->{name}, 'Test IRC', 'loaded name matches'; +}; + +subtest 'list connections for user' => sub { + my $user = TestUser->new(email => $test_email); + + my $connections; + $backend->connections_p($user)->then(sub { + $connections = shift; + })->$wait_success('list connections'); + + ok ref($connections) eq 'ARRAY', 'connections_p returns arrayref'; + my @test_conns = grep { $_->{connection_id} && $_->{connection_id} eq 'irc-test' } @$connections; + is scalar(@test_conns), 1, 'found test connection'; +}; + +subtest 'delete connection' => sub { + my $user = TestUser->new(email => $test_email); + my $conn = TestConnection->new( + id => 'irc-test', + user => $user, + ); + + # Delete connection + my $deleted; + $backend->delete_object_p($conn)->then(sub { + $deleted = shift; + })->$wait_success('delete connection'); + + is $deleted->id, 'irc-test', 'delete_object_p returns connection'; + + # Verify it's gone + my $loaded; + $backend->load_object_p($conn)->then(sub { + $loaded = shift; + })->$wait_success('load deleted connection'); + + is_deeply $loaded, {}, 'deleted connection returns empty hash'; +}; + +subtest 'delete user' => sub { + my $user = TestUser->new(email => $test_email); + + # Delete user + my $deleted; + $backend->delete_object_p($user)->then(sub { + $deleted = shift; + })->$wait_success('delete user'); + + is $deleted->email, $test_email, 'delete_object_p returns user'; + + # Verify it's gone + my $loaded; + $backend->load_object_p($user)->then(sub { + $loaded = shift; + })->$wait_success('load deleted user'); + + is_deeply $loaded, {}, 'deleted user returns empty hash'; +}; + +subtest 'files_p with empty upload directory' => sub { + my $user = TestUser->new(email => $test_email); + + my $result; + $backend->files_p($user, {})->then(sub { + $result = shift; + })->$wait_success('list files'); + + isa_ok $result->{files}, 'Mojo::Collection', 'files is a Mojo::Collection'; + is $result->{files}->size, 0, 'no files for non-existent user'; +}; + +done_testing; + +=encoding utf8 + +=head1 NAME + +t/backend-s3-integration.t - Integration tests for S3 backend + +=head1 SYNOPSIS + + # Run with real Tigris credentials + CONVOS_S3_TEST_BUCKET=my-test-bucket \ + CONVOS_S3_KEY=tid_xxxx \ + CONVOS_S3_SECRET=tsec_xxxx \ + prove -l t/backend-s3-integration.t + +=head1 DESCRIPTION + +These tests run against a real S3-compatible endpoint (Tigris by default) +to verify the S3 backend works correctly in production-like conditions. + +=head2 Required Environment Variables + +=over 4 + +=item * CONVOS_S3_TEST_BUCKET - The S3 bucket to use for testing + +=item * CONVOS_S3_KEY - AWS/Tigris access key ID + +=item * CONVOS_S3_SECRET - AWS/Tigris secret access key + +=back + +=head2 Optional Environment Variables + +=over 4 + +=item * CONVOS_S3_ENDPOINT - S3 endpoint URL (default: https://fly.storage.tigris.dev) + +=item * CONVOS_S3_REGION - AWS region (default: auto) + +=back + +=head1 NOTES + +Tests create objects with a unique prefix based on PID and timestamp to +avoid conflicts. Objects are cleaned up after tests complete. + +=cut diff --git a/t/backend-s3.t b/t/backend-s3.t new file mode 100644 index 000000000..0f8c2cd97 --- /dev/null +++ b/t/backend-s3.t @@ -0,0 +1,528 @@ +#!perl +use lib '.'; +use t::Helper; +use Mojo::JSON qw(decode_json encode_json); +use Mojo::URL; + +t::Helper->subprocess_in_main_process; + +# Mock UserAgent to avoid real HTTP calls +{ + package MockUserAgent; + use Mojo::Base 'Mojo::UserAgent'; + + our $MOCK_RESPONSE; + our @REQUESTS; + + sub get_p { + my ($self, $url, $headers) = @_; + push @REQUESTS, {method => 'GET', url => $url, headers => $headers}; + return Mojo::Promise->resolve(MockTransaction->new(res => $MOCK_RESPONSE)); + } + + sub put_p { + my ($self, $url, $headers, $body) = @_; + push @REQUESTS, {method => 'PUT', url => $url, headers => $headers, body => $body}; + return Mojo::Promise->resolve(MockTransaction->new(res => $MOCK_RESPONSE)); + } + + sub delete_p { + my ($self, $url, $headers) = @_; + push @REQUESTS, {method => 'DELETE', url => $url, headers => $headers}; + return Mojo::Promise->resolve(MockTransaction->new(res => $MOCK_RESPONSE)); + } +} + +{ + package MockTransaction; + use Mojo::Base -base; + has 'res'; +} + +{ + package MockResponse; + use Mojo::Base -base; + has 'code'; + has 'body' => ''; + + sub is_success { my $self = shift; $self->code >= 200 && $self->code < 300 } +} + +# Mock the object with uri() method +{ + package TestObject; + use Mojo::Base -base; + has 'email' => 'test@example.com'; + + sub uri { Mojo::Path->new('test@example.com/user.json') } + sub TO_JSON { {email => shift->email} } + sub logf { } + sub isa { + my ($self, $class) = @_; + return 1 if $class eq 'Convos::Core::Connection'; + return $self->SUPER::isa($class); + } + sub unsubscribe { } +} + +use_ok 'Convos::Core::Backend::S3'; + +# Create backend with test credentials +my $backend = Convos::Core::Backend::S3->new( + s3_endpoint => 'https://fly.storage.tigris.dev', + s3_bucket => 'test-bucket', + s3_key => 'test-key', + s3_secret => 'test-secret', + s3_region => 'auto', + home => Mojo::File->new($ENV{CONVOS_HOME}), +); + +isa_ok $backend, 'Convos::Core::Backend::S3'; +isa_ok $backend, 'Convos::Core::Backend::File'; + +# Test _s3_key mapping +is $backend->_s3_key(TestObject->new), 'users/test@example.com/user.json', + '_s3_key prefixes user objects with users/'; + +# Test save_object_p +subtest 'save_object_p' => sub { + my $obj = TestObject->new; + + # Mock successful PUT + $MockUserAgent::MOCK_RESPONSE = MockResponse->new(code => 200); + @MockUserAgent::REQUESTS = (); + + # Override ua to use mock + $backend->{ua} = MockUserAgent->new; + + my $result; + $backend->save_object_p($obj)->then(sub { + $result = shift; + })->$wait_success('save_object_p'); + + is $result->email, 'test@example.com', 'save_object_p returns object'; + is scalar(@MockUserAgent::REQUESTS), 1, 'made one request'; + is $MockUserAgent::REQUESTS[0]{method}, 'PUT', 'used PUT method'; + like $MockUserAgent::REQUESTS[0]{url}, qr{/test-bucket/users/}, 'URL contains bucket and users prefix'; +}; + +# Test load_object_p +subtest 'load_object_p with data' => sub { + my $obj = TestObject->new; + + # Mock successful GET with JSON body + $MockUserAgent::MOCK_RESPONSE = MockResponse->new( + code => 200, + body => encode_json({email => 'loaded@example.com'}) + ); + @MockUserAgent::REQUESTS = (); + + $backend->{ua} = MockUserAgent->new; + + my $data; + $backend->load_object_p($obj)->then(sub { + $data = shift; + })->$wait_success('load_object_p'); + + is $data->{email}, 'loaded@example.com', 'load_object_p returns decoded data'; + is $MockUserAgent::REQUESTS[0]{method}, 'GET', 'used GET method'; +}; + +subtest 'load_object_p on 404' => sub { + my $obj = TestObject->new; + + # Mock 404 response + $MockUserAgent::MOCK_RESPONSE = MockResponse->new(code => 404); + @MockUserAgent::REQUESTS = (); + + $backend->{ua} = MockUserAgent->new; + + my $data; + $backend->load_object_p($obj)->then(sub { + $data = shift; + })->$wait_success('load_object_p 404'); + + is_deeply $data, {}, 'load_object_p returns {} on 404'; +}; + +# Test delete_object_p +subtest 'delete_object_p' => sub { + my $obj = TestObject->new; + + # Mock successful DELETE + $MockUserAgent::MOCK_RESPONSE = MockResponse->new(code => 204); + @MockUserAgent::REQUESTS = (); + + $backend->{ua} = MockUserAgent->new; + + my $result; + $backend->delete_object_p($obj)->then(sub { + $result = shift; + })->$wait_success('delete_object_p'); + + is $result->email, 'test@example.com', 'delete_object_p returns object'; + is $MockUserAgent::REQUESTS[0]{method}, 'DELETE', 'used DELETE method'; +}; + +# Test _s3_list_p +subtest '_s3_list_p' => sub { + my $xml_response = <<'XML'; + + + + users/joe@example.com/user.json + + + users/jane@example.com/user.json + + + users/joe@example.com/ + + + users/jane@example.com/ + + +XML + + # Mock successful LIST request + $MockUserAgent::MOCK_RESPONSE = MockResponse->new( + code => 200, + body => $xml_response + ); + @MockUserAgent::REQUESTS = (); + + $backend->{ua} = MockUserAgent->new; + + my $result; + $backend->_s3_list_p('users/', '/')->then(sub { + $result = shift; + })->$wait_success('_s3_list_p'); + + is scalar(@{$result->{keys}}), 2, 'found 2 keys'; + is scalar(@{$result->{prefixes}}), 2, 'found 2 prefixes'; + is $result->{keys}[0], 'users/joe@example.com/user.json', 'first key correct'; + is $result->{prefixes}[0], 'users/joe@example.com/', 'first prefix correct'; + like $MockUserAgent::REQUESTS[0]{url}, qr{\?prefix=users(%2F|/)&delimiter=(%2F|/)}, 'URL has query params'; +}; + +# Test users_p +subtest 'users_p' => sub { + # Mock LIST response showing two user directories + my $list_xml = <<'XML'; + + + + users/joe@example.com/ + + + users/jane@example.com/ + + +XML + + # Mock GET responses for each user.json file + my %user_data = ( + 'users/joe@example.com/user.json' => encode_json({ + email => 'joe@example.com', + registered => '2023-01-01T00:00:00Z', + }), + 'users/jane@example.com/user.json' => encode_json({ + email => 'jane@example.com', + registered => '2023-01-02T00:00:00Z', + }), + ); + + # Create a more sophisticated mock that returns different responses + my $call_count = 0; + my $mock_ua = MockUserAgent->new; + $mock_ua->{_get_p_handler} = sub { + my ($self, $url, $headers) = @_; + push @MockUserAgent::REQUESTS, {method => 'GET', url => $url, headers => $headers}; + + if ($call_count++ == 0) { + # First call is LIST + return Mojo::Promise->resolve( + MockTransaction->new(res => MockResponse->new(code => 200, body => $list_xml)) + ); + } else { + # Subsequent calls are GET for user.json files + for my $key (keys %user_data) { + if ($url =~ /\Q$key\E$/) { + return Mojo::Promise->resolve( + MockTransaction->new(res => MockResponse->new(code => 200, body => $user_data{$key})) + ); + } + } + return Mojo::Promise->resolve( + MockTransaction->new(res => MockResponse->new(code => 404)) + ); + } + }; + + no warnings 'redefine'; + local *MockUserAgent::get_p = sub { + shift->{_get_p_handler}->(@_); + }; + use warnings; + + @MockUserAgent::REQUESTS = (); + $backend->{ua} = $mock_ua; + $call_count = 0; + + my $users; + $backend->users_p->then(sub { + $users = shift; + })->$wait_success('users_p'); + + is scalar(@$users), 2, 'found 2 users'; + is $users->[0]{email}, 'joe@example.com', 'first user is joe'; + is $users->[1]{email}, 'jane@example.com', 'second user is jane'; +}; + +# Test connections_p +subtest 'connections_p' => sub { + # Create a mock user object + my $user = TestObject->new(email => 'joe@example.com'); + $user->{_uri} = Mojo::Path->new('joe@example.com'); + no warnings 'redefine'; + local *TestObject::uri = sub { shift->{_uri} }; + use warnings; + + # Mock LIST response showing two connection directories + my $list_xml = <<'XML'; + + + + users/joe@example.com/irc-libera/ + + + users/joe@example.com/irc-freenode/ + + +XML + + # Mock GET responses for each connection.json file + my %connection_data = ( + 'users/joe@example.com/irc-libera/connection.json' => encode_json({ + connection_id => 'irc-libera', + name => 'Libera Chat', + }), + 'users/joe@example.com/irc-freenode/connection.json' => encode_json({ + connection_id => 'irc-freenode', + name => 'Freenode', + }), + ); + + my $call_count = 0; + my $mock_ua = MockUserAgent->new; + $mock_ua->{_get_p_handler} = sub { + my ($self, $url, $headers) = @_; + push @MockUserAgent::REQUESTS, {method => 'GET', url => $url, headers => $headers}; + + if ($call_count++ == 0) { + # First call is LIST + return Mojo::Promise->resolve( + MockTransaction->new(res => MockResponse->new(code => 200, body => $list_xml)) + ); + } else { + # Subsequent calls are GET for connection.json files + for my $key (keys %connection_data) { + if ($url =~ /\Q$key\E$/) { + return Mojo::Promise->resolve( + MockTransaction->new(res => MockResponse->new(code => 200, body => $connection_data{$key})) + ); + } + } + return Mojo::Promise->resolve( + MockTransaction->new(res => MockResponse->new(code => 404)) + ); + } + }; + + no warnings 'redefine'; + local *MockUserAgent::get_p = sub { + shift->{_get_p_handler}->(@_); + }; + use warnings; + + @MockUserAgent::REQUESTS = (); + $backend->{ua} = $mock_ua; + $call_count = 0; + + my $connections; + $backend->connections_p($user)->then(sub { + $connections = shift; + })->$wait_success('connections_p'); + + is scalar(@$connections), 2, 'found 2 connections'; + is $connections->[0]{connection_id}, 'irc-libera', 'first connection is libera'; + is $connections->[1]{connection_id}, 'irc-freenode', 'second connection is freenode'; +}; + +# Test files_p +subtest 'files_p' => sub { + # Create a mock user object with email + my $user = TestObject->new(email => 'joe@example.com'); + + # Mock LIST response showing upload files + my $list_xml = <<'XML'; + + + + users/joe@example.com/upload/abc123.json + + + users/joe@example.com/upload/abc123.data + + + users/joe@example.com/upload/def456.json + + + users/joe@example.com/upload/def456.data + + + users/joe@example.com/upload/orphan.json + + +XML + + # Mock GET responses for each metadata JSON file + my %file_data = ( + 'users/joe@example.com/upload/abc123.json' => encode_json({ + filename => 'photo.jpg', + saved => '2023-01-02T00:00:00Z', + size => 1024, + }), + 'users/joe@example.com/upload/def456.json' => encode_json({ + filename => 'document.pdf', + saved => '2023-01-01T00:00:00Z', + size => 2048, + }), + ); + + my $call_count = 0; + my $mock_ua = MockUserAgent->new; + $mock_ua->{_get_p_handler} = sub { + my ($self, $url, $headers) = @_; + push @MockUserAgent::REQUESTS, {method => 'GET', url => $url, headers => $headers}; + + if ($call_count++ == 0) { + # First call is LIST + return Mojo::Promise->resolve( + MockTransaction->new(res => MockResponse->new(code => 200, body => $list_xml)) + ); + } else { + # Subsequent calls are GET for file metadata + for my $key (keys %file_data) { + if ($url =~ /\Q$key\E$/) { + return Mojo::Promise->resolve( + MockTransaction->new(res => MockResponse->new(code => 200, body => $file_data{$key})) + ); + } + } + return Mojo::Promise->resolve( + MockTransaction->new(res => MockResponse->new(code => 404)) + ); + } + }; + + no warnings 'redefine'; + local *MockUserAgent::get_p = sub { + shift->{_get_p_handler}->(@_); + }; + use warnings; + + @MockUserAgent::REQUESTS = (); + $backend->{ua} = $mock_ua; + $call_count = 0; + + my $result; + $backend->files_p($user, {})->then(sub { + $result = shift; + })->$wait_success('files_p'); + + isa_ok $result->{files}, 'Mojo::Collection', 'files is a Mojo::Collection'; + is $result->{files}->size, 2, 'found 2 files (orphan.json excluded - no .data file)'; + + # Files should be sorted by saved date descending + is $result->{files}[0]{id}, 'abc123', 'first file is abc123 (newer)'; + is $result->{files}[0]{name}, 'photo.jpg', 'first file name is photo.jpg'; + is $result->{files}[1]{id}, 'def456', 'second file is def456 (older)'; + is $result->{files}[1]{name}, 'document.pdf', 'second file name is document.pdf'; +}; + +# Test files_p with pagination +subtest 'files_p with pagination' => sub { + my $user = TestObject->new(email => 'joe@example.com'); + + # Mock LIST response with many files for pagination testing + my @keys; + for my $i (1..5) { + push @keys, sprintf 'users/joe@example.com/upload/file%03d.json', $i; + push @keys, sprintf 'users/joe@example.com/upload/file%03d.data', $i; + } + my $list_xml = < + + + @{[join "\n \n \n ", @keys]} + + +XML + + # Create mock metadata for each file + my %file_data; + for my $i (1..5) { + my $key = sprintf 'users/joe@example.com/upload/file%03d.json', $i; + $file_data{$key} = encode_json({ + filename => sprintf('file%03d.txt', $i), + saved => sprintf('2023-01-%02dT00:00:00Z', 6 - $i), # file001 is newest + size => $i * 100, + }); + } + + my $call_count = 0; + my $mock_ua = MockUserAgent->new; + $mock_ua->{_get_p_handler} = sub { + my ($self, $url, $headers) = @_; + push @MockUserAgent::REQUESTS, {method => 'GET', url => $url, headers => $headers}; + + if ($call_count++ == 0) { + return Mojo::Promise->resolve( + MockTransaction->new(res => MockResponse->new(code => 200, body => $list_xml)) + ); + } else { + for my $key (keys %file_data) { + if ($url =~ /\Q$key\E$/) { + return Mojo::Promise->resolve( + MockTransaction->new(res => MockResponse->new(code => 200, body => $file_data{$key})) + ); + } + } + return Mojo::Promise->resolve( + MockTransaction->new(res => MockResponse->new(code => 404)) + ); + } + }; + + no warnings 'redefine'; + local *MockUserAgent::get_p = sub { + shift->{_get_p_handler}->(@_); + }; + use warnings; + + @MockUserAgent::REQUESTS = (); + $backend->{ua} = $mock_ua; + $call_count = 0; + + my $result; + $backend->files_p($user, {limit => 2})->then(sub { + $result = shift; + })->$wait_success('files_p with limit'); + + is $result->{files}->size, 2, 'limited to 2 files'; + is $result->{files}[0]{id}, 'file001', 'first file is file001 (newest)'; + ok $result->{next}, 'has next page indicator'; +}; + +done_testing; diff --git a/t/util-s3.t b/t/util-s3.t new file mode 100644 index 000000000..e27d8c42d --- /dev/null +++ b/t/util-s3.t @@ -0,0 +1,72 @@ +use Mojo::Base -strict; +use Test::More; +use Convos::Util::S3 qw(sign_request); + +subtest 'sign GET request' => sub { + my $signed = sign_request( + method => 'GET', + url => 'https://bucket.s3.amazonaws.com/test.txt', + headers => {'Host' => 'bucket.s3.amazonaws.com'}, + payload => '', + key => 'AKIAIOSFODNN7EXAMPLE', + secret => 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY', + region => 'us-east-1', + date => '20130524T000000Z', # Fixed date for testing + ); + + ok $signed, 'got signed headers'; + ok $signed->{Authorization}, 'has Authorization header'; + like $signed->{Authorization}, qr/^AWS4-HMAC-SHA256/, 'Authorization starts with AWS4-HMAC-SHA256'; + like $signed->{Authorization}, qr/Credential=AKIAIOSFODNN7EXAMPLE/, 'Authorization contains credential'; + like $signed->{Authorization}, qr/SignedHeaders=/, 'Authorization contains SignedHeaders'; + like $signed->{Authorization}, qr/Signature=/, 'Authorization contains Signature'; + ok $signed->{'x-amz-date'}, 'has x-amz-date header'; + ok $signed->{'x-amz-content-sha256'}, 'has x-amz-content-sha256 header'; +}; + +subtest 'sign PUT request with JSON body' => sub { + my $payload = '{"test":"data"}'; + my $signed = sign_request( + method => 'PUT', + url => 'https://bucket.s3.amazonaws.com/test.json', + headers => { + 'Host' => 'bucket.s3.amazonaws.com', + 'Content-Type' => 'application/json', + }, + payload => $payload, + key => 'AKIAIOSFODNN7EXAMPLE', + secret => 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY', + region => 'us-east-1', + date => '20130524T000000Z', + ); + + ok $signed, 'got signed headers'; + ok $signed->{Authorization}, 'has Authorization header'; + like $signed->{Authorization}, qr/^AWS4-HMAC-SHA256/, 'Authorization starts with AWS4-HMAC-SHA256'; + like $signed->{Authorization}, qr/SignedHeaders=/, 'Authorization contains SignedHeaders'; + like $signed->{Authorization}, qr/SignedHeaders=Content-Type;/, 'SignedHeaders includes Content-Type'; + ok $signed->{'x-amz-content-sha256'}, 'has x-amz-content-sha256 header'; + isnt $signed->{'x-amz-content-sha256'}, 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', + 'payload hash is not empty string hash'; +}; + +subtest 'sign request with query parameters' => sub { + my $signed = sign_request( + method => 'GET', + url => 'https://bucket.s3.amazonaws.com/test.txt?max-keys=100&prefix=photos/', + headers => {'Host' => 'bucket.s3.amazonaws.com'}, + payload => '', + key => 'AKIAIOSFODNN7EXAMPLE', + secret => 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY', + region => 'us-east-1', + date => '20130524T000000Z', + ); + + ok $signed, 'got signed headers'; + ok $signed->{Authorization}, 'has Authorization header'; + like $signed->{Authorization}, qr/^AWS4-HMAC-SHA256/, 'Authorization starts with AWS4-HMAC-SHA256'; + ok $signed->{'x-amz-date'}, 'has x-amz-date header'; + ok $signed->{'x-amz-content-sha256'}, 'has x-amz-content-sha256 header'; +}; + +done_testing;