From 8525cd402ab7880760eb367139bde70d7526c316 Mon Sep 17 00:00:00 2001 From: Zoey de Souza Pessanha Date: Sat, 13 Jan 2024 15:18:46 -0300 Subject: [PATCH] feat: por from supabase-potion --- .earthlyignore | 28 + .envrc | 40 ++ .formatter.exs | 5 + .github/pull_request_template.md | 11 + .github/workflows/ci.yml | 39 ++ .github/workflows/release.yml | 30 + .gitignore | 40 ++ Earthfile | 33 + LICENSE | 13 + README.md | 1 + config/config.exs | 7 + flake.lock | 25 + flake.nix | 39 ++ lib/supabase.ex | 170 +++++ lib/supabase/application.ex | 26 + lib/supabase/client.ex | 200 ++++++ lib/supabase/client/auth.ex | 84 +++ lib/supabase/client/conn.ex | 54 ++ lib/supabase/client/db.ex | 29 + lib/supabase/client/global.ex | 27 + lib/supabase/client_registry.ex | 44 ++ lib/supabase/client_supervisor.ex | 54 ++ lib/supabase/fetcher.ex | 340 ++++++++++ lib/supabase/fetcher_behaviour.ex | 22 + lib/supabase/go_true.ex | 60 ++ lib/supabase/go_true/admin.ex | 126 ++++ lib/supabase/go_true/admin_behaviour.ex | 22 + lib/supabase/go_true/admin_handler.ex | 96 +++ lib/supabase/go_true/auth.ex | 97 +++ lib/supabase/go_true/pkce.ex | 18 + lib/supabase/go_true/plug.ex | 61 ++ lib/supabase/go_true/plug/verify_header.ex | 24 + .../go_true/schemas/admin_user_params.ex | 41 ++ lib/supabase/go_true/schemas/generate_link.ex | 119 ++++ .../go_true/schemas/invite_user_params.ex | 35 + .../go_true/schemas/pagination_params.ex | 24 + .../go_true/schemas/sign_in_request.ex | 67 ++ .../go_true/schemas/sign_in_with_id_token.ex | 60 ++ .../go_true/schemas/sign_in_with_oauth.ex | 85 +++ .../go_true/schemas/sign_in_with_password.ex | 46 ++ .../go_true/schemas/sign_up_request.ex | 50 ++ .../go_true/schemas/sign_up_with_password.ex | 98 +++ lib/supabase/go_true/session.ex | 46 ++ lib/supabase/go_true/user.ex | 98 +++ lib/supabase/go_true/user/factor.ex | 32 + lib/supabase/go_true/user/identity.ex | 45 ++ lib/supabase/go_true/user_handler.ex | 127 ++++ lib/supabase/go_true_behaviour.ex | 24 + lib/supabase/missing_supabase_config.ex | 60 ++ lib/supabase/storage.ex | 636 ++++++++++++++++++ lib/supabase/storage/action_error.ex | 5 + lib/supabase/storage/application.ex | 34 + lib/supabase/storage/bucket.ex | 127 ++++ lib/supabase/storage/cache.ex | 97 +++ lib/supabase/storage/cache_reloader.ex | 51 ++ lib/supabase/storage/endpoints.ex | 51 ++ .../storage/handlers/bucket_handler.ex | 133 ++++ .../storage/handlers/object_handler.ex | 226 +++++++ lib/supabase/storage/object.ex | 75 +++ lib/supabase/storage/object_options.ex | 57 ++ lib/supabase/storage/search_options.ex | 60 ++ lib/supabase/storage_behaviour.ex | 43 ++ lib/supabase/types/atom.ex | 30 + mix.exs | 61 ++ mix.lock | 27 + test/supabase_test.exs | 74 ++ test/test_helper.exs | 1 + 67 files changed, 4710 insertions(+) create mode 100644 .earthlyignore create mode 100644 .envrc create mode 100644 .formatter.exs create mode 100644 .github/pull_request_template.md create mode 100644 .github/workflows/ci.yml create mode 100644 .github/workflows/release.yml create mode 100644 .gitignore create mode 100644 Earthfile create mode 100644 LICENSE create mode 100644 README.md create mode 100644 config/config.exs create mode 100644 flake.lock create mode 100644 flake.nix create mode 100644 lib/supabase.ex create mode 100644 lib/supabase/application.ex create mode 100644 lib/supabase/client.ex create mode 100644 lib/supabase/client/auth.ex create mode 100644 lib/supabase/client/conn.ex create mode 100644 lib/supabase/client/db.ex create mode 100644 lib/supabase/client/global.ex create mode 100644 lib/supabase/client_registry.ex create mode 100644 lib/supabase/client_supervisor.ex create mode 100644 lib/supabase/fetcher.ex create mode 100644 lib/supabase/fetcher_behaviour.ex create mode 100644 lib/supabase/go_true.ex create mode 100644 lib/supabase/go_true/admin.ex create mode 100644 lib/supabase/go_true/admin_behaviour.ex create mode 100644 lib/supabase/go_true/admin_handler.ex create mode 100644 lib/supabase/go_true/auth.ex create mode 100644 lib/supabase/go_true/pkce.ex create mode 100644 lib/supabase/go_true/plug.ex create mode 100644 lib/supabase/go_true/plug/verify_header.ex create mode 100644 lib/supabase/go_true/schemas/admin_user_params.ex create mode 100644 lib/supabase/go_true/schemas/generate_link.ex create mode 100644 lib/supabase/go_true/schemas/invite_user_params.ex create mode 100644 lib/supabase/go_true/schemas/pagination_params.ex create mode 100644 lib/supabase/go_true/schemas/sign_in_request.ex create mode 100644 lib/supabase/go_true/schemas/sign_in_with_id_token.ex create mode 100644 lib/supabase/go_true/schemas/sign_in_with_oauth.ex create mode 100644 lib/supabase/go_true/schemas/sign_in_with_password.ex create mode 100644 lib/supabase/go_true/schemas/sign_up_request.ex create mode 100644 lib/supabase/go_true/schemas/sign_up_with_password.ex create mode 100644 lib/supabase/go_true/session.ex create mode 100644 lib/supabase/go_true/user.ex create mode 100644 lib/supabase/go_true/user/factor.ex create mode 100644 lib/supabase/go_true/user/identity.ex create mode 100644 lib/supabase/go_true/user_handler.ex create mode 100644 lib/supabase/go_true_behaviour.ex create mode 100644 lib/supabase/missing_supabase_config.ex create mode 100644 lib/supabase/storage.ex create mode 100644 lib/supabase/storage/action_error.ex create mode 100644 lib/supabase/storage/application.ex create mode 100644 lib/supabase/storage/bucket.ex create mode 100644 lib/supabase/storage/cache.ex create mode 100644 lib/supabase/storage/cache_reloader.ex create mode 100644 lib/supabase/storage/endpoints.ex create mode 100644 lib/supabase/storage/handlers/bucket_handler.ex create mode 100644 lib/supabase/storage/handlers/object_handler.ex create mode 100644 lib/supabase/storage/object.ex create mode 100644 lib/supabase/storage/object_options.ex create mode 100644 lib/supabase/storage/search_options.ex create mode 100644 lib/supabase/storage_behaviour.ex create mode 100644 lib/supabase/types/atom.ex create mode 100644 mix.exs create mode 100644 mix.lock create mode 100644 test/supabase_test.exs create mode 100644 test/test_helper.exs diff --git a/.earthlyignore b/.earthlyignore new file mode 100644 index 0000000..8351e13 --- /dev/null +++ b/.earthlyignore @@ -0,0 +1,28 @@ +# Ignore git, but keep git HEAD and refs to access current commit hash if needed +.git +!.git/HEAD +!.git/refs +.github + +.elixir_ls +.vscode + +# Mix artifacts +/_build/ +/deps/ +*.ez + +# Generated on crash by the VM +erl_crash.dump + +# Static artifacts - These should be fetched and built inside the Docker image +/apps/plataforma_digital/assets/node_modules/ +/apps/plataforma_digital/priv/static/assets/ +/apps/plataforma_digital/priv/static/cache_manifest.json + +# Nix artifacts +/.postgres/ +/.direnv/ +/.nix-*/ +result + diff --git a/.envrc b/.envrc new file mode 100644 index 0000000..dea5ba3 --- /dev/null +++ b/.envrc @@ -0,0 +1,40 @@ +export GPG_TTY="$(tty)" + +# this allows mix to work on the local directory +export MIX_HOME=$PWD/.nix-mix +export HEX_HOME=$PWD/.nix-mix +export PATH=$MIX_HOME/bin:$HEX_HOME/bin:$PATH +export ERL_AFLAGS="-kernel shell_history enabled" + +export LANG=en_US.UTF-8 + +use flake + +# Setup postgresql +if test -d "/Applications/Postgres.app"; then + export DATABASE_USER="$(whoami)" + export DATABASE_PASSWORD="" +else + # postges related + export DATABASE_USER="supabase_potion" + export DATABASE_PASSWORD="supabase_potion" + export PG_DATABASE="supabase_potion_dev" + # keep all your db data in a folder inside the project + export PGHOST="$PWD/.postgres" + export PGDATA="$PGHOST/data" + export PGLOG="$PGHOST/server.log" + + if [[ ! -d "$PGDATA" ]]; then + # initital set up of database server + initdb --auth=trust --no-locale --encoding=UTF8 -U=$DATABASE_USER >/dev/null + + # point to correct unix sockets + echo "unix_socket_directories = '$PGHOST'" >> "$PGDATA/postgresql.conf" + # creates loacl database user + echo "CREATE USER $DATABASE_USER SUPERUSER;" | postgres --single -E postgres + # creates local databse + echo "CREATE DATABASE $PG_DATABASE;" | postgres --single -E postgres + fi +fi + +source .env diff --git a/.formatter.exs b/.formatter.exs new file mode 100644 index 0000000..90a0853 --- /dev/null +++ b/.formatter.exs @@ -0,0 +1,5 @@ +# Used by "mix format" +[ + inputs: ["mix.exs", "config/*.exs"], + subdirectories: ["apps/*"] +] diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..bf98cc1 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,11 @@ +## Problem + + + +## Solution + + + +## Rationale + + diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..a87a3ef --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,39 @@ +name: ci + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + build: + runs-on: ubuntu-latest + env: + GHCR_USERNAME: ${{ github.actor }} + GHCR_TOKEN: ${{ secrets.GHCR_TOKEN }} + FORCE_COLOR: 1 + steps: + - uses: actions/checkout@v3 + - name: Put back the git branch into git (Earthly uses it for tagging) + run: | + branch="" + if [ -n "$GITHUB_HEAD_REF" ]; then + branch="$GITHUB_HEAD_REF" + else + branch="${GITHUB_REF##*/}" + fi + git checkout -b "$branch" || true + - name: Docker Login + run: docker login https://ghcr.io --username "$GHCR_USERNAME" --password "$GHCR_TOKEN" + - name: Download latest earthly + run: "sudo /bin/sh -c 'wget https://github.com/earthly/earthly/releases/latest/download/earthly-linux-amd64 -O /usr/local/bin/earthly && chmod +x /usr/local/bin/earthly'" + + - name: Earthly version + run: earthly --version + + - name: Run CI + run: earthly -P +ci + + - name: Run Tests + run: earthly -P +test diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..4aec11c --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,30 @@ +name: release + +on: + push: + tags: + - '*' + +env: + MIX_ENV: prod + +jobs: + publish: + runs-on: ubuntu-latest + strategy: + matrix: + elixir: [1.15.7] + otp: [26.1.2] + steps: + - uses: actions/checkout@v3 + - name: Set up Elixir + uses: erlef/setup-beam@v1 + with: + elixir-version: ${{ matrix.elixir }} + otp-version: ${{ matrix.otp }} + - name: Publish to Hex + uses: synchronal/hex-publish-action@v3 + with: + name: supabase_potion + key: ${{ secrets.HEX_PM_KEY }} + tag-release: true diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..ea0e15f --- /dev/null +++ b/.gitignore @@ -0,0 +1,40 @@ +# The directory Mix will write compiled artifacts to. +/_build/ + +# If you run "mix test --cover", coverage assets end up here. +/cover/ + +# The directory Mix downloads your dependencies sources to. +/deps/ + +# Where third-party dependencies like ExDoc output generated docs. +/doc/ + +# Ignore .fetch files in case you like to edit your project deps locally. +/.fetch + +# If the VM crashes, it generates a dump, let's ignore it too. +erl_crash.dump + +# Also ignore archive artifacts (built via "mix archive.build"). +*.ez + +# Temporary files, for example, from tests. +/tmp/ + +# Nix files +/.nix-mix/ +/.postgres/ +result + +# Secrets files +.env + +# LSP files +/.lexical/ + +# Nix files +result + +/.elixir_ls/ +/.elixir-tools/ diff --git a/Earthfile b/Earthfile new file mode 100644 index 0000000..c695654 --- /dev/null +++ b/Earthfile @@ -0,0 +1,33 @@ +VERSION 0.7 + +ARG MIX_ENV=test + +deps: + ARG ELIXIR=1.15.7 + ARG OTP=26.1.2 + FROM hexpm/elixir:${ELIXIR}-erlang-${OTP}-alpine-3.17.5 + RUN apk add --no-cache build-base + WORKDIR /src + RUN mix local.rebar --force + RUN mix local.hex --force + COPY mix.exs mix.lock ./ + COPY --dir lib . # check .earthlyignore + RUN mix deps.get + RUN mix deps.compile --force + RUN mix compile + SAVE ARTIFACT /src/_build AS LOCAL _build + SAVE ARTIFACT /src/deps AS LOCAL deps + +ci: + FROM +deps + COPY .formatter.exs . + RUN mix clean + RUN mix compile --warning-as-errors + RUN mix format --check-formatted + RUN mix credo --strict + +test: + FROM +deps + COPY mix.exs mix.lock ./ + COPY --dir lib ./ + RUN mix test diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..d448fa4 --- /dev/null +++ b/LICENSE @@ -0,0 +1,13 @@ + DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE + Version 2, December 2004 + + Copyright (C) 2023 Zoey Pessanha + + Everyone is permitted to copy and distribute verbatim or modified + copies of this license document, and changing it is allowed as long + as the name is changed. + + DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. You just DO WHAT THE FUCK YOU WANT TO. diff --git a/README.md b/README.md new file mode 100644 index 0000000..126afa4 --- /dev/null +++ b/README.md @@ -0,0 +1 @@ +# Supabase Auth (GoTrue) diff --git a/config/config.exs b/config/config.exs new file mode 100644 index 0000000..eb32525 --- /dev/null +++ b/config/config.exs @@ -0,0 +1,7 @@ +import Config + +if config_env() == :dev do + config :supabase_potion, + supabase_base_url: System.get_env("SUPABASE_URL"), + supabase_api_key: System.get_env("SUPABASE_KEY") +end diff --git a/flake.lock b/flake.lock new file mode 100644 index 0000000..4321d7f --- /dev/null +++ b/flake.lock @@ -0,0 +1,25 @@ +{ + "nodes": { + "nixpkgs": { + "locked": { + "lastModified": 1704842529, + "narHash": "sha256-OTeQA+F8d/Evad33JMfuXC89VMetQbsU4qcaePchGr4=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "eabe8d3eface69f5bb16c18f8662a702f50c20d5", + "type": "github" + }, + "original": { + "id": "nixpkgs", + "type": "indirect" + } + }, + "root": { + "inputs": { + "nixpkgs": "nixpkgs" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 0000000..019feeb --- /dev/null +++ b/flake.nix @@ -0,0 +1,39 @@ +{ + description = "A complete Supabase SDK for Elixir alchemists"; + + outputs = {nixpkgs, ...}: let + for-all-systems = function: + nixpkgs.lib.genAttrs [ + "x86_64-linux" + "aarch64-darwin" + ] (system: + function rec { + pkgs = nixpkgs.legacyPackages.${system}; + inherit (pkgs.beam.interpreters) erlang_26; + inherit (pkgs.beam) packagesWith; + beam-pkgs = packagesWith erlang_26; + deps = import ./nix/deps.nix { + inherit (pkgs) lib; + beamPackages = beam-pkgs; + }; + }); + in { + devShells = for-all-systems ({ + pkgs, + beam-pkgs, + ... + }: rec { + default = supabase-potion; + supabase-potion = pkgs.mkShell { + name = "supabase-potion"; + shellHook = "mkdir -p $PWD/.nix-mix"; + packages = with pkgs; + [beam-pkgs.elixir_1_15 earthly] + ++ lib.optional stdenv.isDarwin [ + darwin.apple_sdk.frameworks.CoreServices + darwin.apple_sdk.frameworks.CoreFoundation + ]; + }; + }); + }; +} diff --git a/lib/supabase.ex b/lib/supabase.ex new file mode 100644 index 0000000..6f1f3c5 --- /dev/null +++ b/lib/supabase.ex @@ -0,0 +1,170 @@ +defmodule Supabase do + @moduledoc """ + The main entrypoint for the Supabase SDK library. + + ## Installation + + The package can be installed by adding `supabase` to your list of dependencies in `mix.exs`: + + def deps do + [ + {:supabase_potion, "~> 0.1"} + ] + end + + ## Usage + + After installing `:supabase_potion`, you can easily and dynamically manage different `Supabase.Client`! + + ### Config + + The library offers a bunch of config options that can be used to control management of clients and other options. + + - `manage_clients` - whether to manage clients automatically, defaults to `true` + + You can set up the library on your `config.exs`: + + config :supabase, manage_clients: false + + ### Clients + + A `Supabase.Client` is an Agent that holds general information about Supabase, that can be used to intereact with any of the children integrations, for example: `Supabase.Storage` or `Supabase.UI`. + + Also a `Supabase.Client` holds a list of `Supabase.Connection` that can be used to perform operations on different buckets, for example. + + `Supabase.Client` is defined as: + + - `:name` - the name of the client, started by `start_link/1` + - `:conn` - connection information, the only required option as it is vital to the `Supabase.Client`. + - `:base_url` - The base url of the Supabase API, it is usually in the form `https://.supabase.io`. + - `:api_key` - The API key used to authenticate requests to the Supabase API. + - `:access_token` - Token with specific permissions to access the Supabase API, it is usually the same as the API key. + - `:db` - default database options + - `:schema` - default schema to use, defaults to `"public"` + - `:global` - global options config + - `:headers` - additional headers to use on each request + - `:auth` - authentication options + - `:auto_refresh_token` - automatically refresh the token when it expires, defaults to `true` + - `:debug` - enable debug mode, defaults to `false` + - `:detect_session_in_url` - detect session in URL, defaults to `true` + - `:flow_type` - authentication flow type, defaults to `"web"` + - `:persist_session` - persist session, defaults to `true` + - `:storage` - storage type + - `:storage_key` - storage key + + + ## Starting a Client + + You then can start a Client calling `Supabase.Client.start_link/1`: + + iex> Supabase.Client.start_link(name: :my_client, client_info: %{db: %{schema: "public"}}) + {:ok, #PID<0.123.0>} + + Notice that this way to start a Client is not recommended, since you will need to manage the `Supabase.Client` manually. Instead, you can use `Supabase.init_client!/1`, passing the Client options: + + iex> Supabase.Client.init_client!(%{conn: %{base_url: "", api_key: ""}}) + {:ok, #PID<0.123.0>} + + ## Acknowledgements + + This package represents the complete SDK for Supabase. That means + that it includes all of the functionality of the Supabase client integrations, as: + + - `Supabase.Fetcher` + - `Supabase.Storage` + - `supabase-postgrest` - TODO + - `supabase-realtime` - TODO + - `supabase-auth`- TODO + - `supabase-ui` - TODO + + ### Supabase Storage + + Supabase Storage is a service for developers to store large objects like images, videos, and other files. It is a hosted object storage service, like AWS S3, but with a simple API and strong consistency. + + ### Supabase PostgREST + + PostgREST is a web server that turns your PostgreSQL database directly into a RESTful API. The structural constraints and permissions in the database determine the API endpoints and operations. + + ### Supabase Realtime + + Supabase Realtime provides a realtime websocket API powered by PostgreSQL notifications. It allows you to listen to changes in your database, and instantly receive updates as soon as they happen. + + ### Supabase Auth + + Supabase Auth is a feature-complete user authentication system. It provides email & password sign in, email verification, password recovery, session management, and more, out of the box. + + ### Supabase UI + + Supabase UI is a set of UI components that help you quickly build Supabase-powered applications. It is built on top of Tailwind CSS and Headless UI, and is fully customizable. The package provides `Phoenix.LiveView` components! + + ### Supabase Fetcher + + Supabase Fetcher is a customized HTTP client for Supabase. Mainly used in Supabase Potion. If you want a complete control on how to make requests to any Supabase API, you would use this package directly. + """ + + alias Supabase.Client + alias Supabase.ClientRegistry + alias Supabase.ClientSupervisor + + alias Supabase.MissingSupabaseConfig + + @typep changeset :: Ecto.Changeset.t() + + @spec init_client(params) :: {:ok, pid} | {:error, changeset} + when params: Client.params() + def init_client(%{} = opts) do + conn = Map.get(opts, :conn, %{}) + opts = maybe_merge_config_from_application(conn, opts) + + with {:ok, opts} <- Client.parse(opts) do + name = ClientRegistry.named(opts.name) + client_opts = [name: name, client_info: opts] + ClientSupervisor.start_child({Client, client_opts}) + end + rescue + _ -> Client.parse(opts) + end + + def init_client!(%{} = opts) do + conn = Map.get(opts, :conn, %{}) + opts = maybe_merge_config_from_application(conn, opts) + + case init_client(opts) do + {:ok, pid} -> pid + {:error, changeset} -> raise Ecto.InvalidChangesetError, changeset: changeset, action: :init + end + end + + defp maybe_merge_config_from_application(%{base_url: _, api_key: _}, opts), do: opts + + defp maybe_merge_config_from_application(%{}, opts) do + base_url = + Application.get_env(:supabase_potion, :supabase_base_url) || + raise MissingSupabaseConfig, :url + + api_key = + Application.get_env(:supabase_potion, :supabase_api_key) || + raise MissingSupabaseConfig, :key + + Map.put(opts, :conn, %{base_url: base_url, api_key: api_key}) + end + + defmacro __using__(which) when is_atom(which) do + apply(__MODULE__, which, []) + end + + def schema do + quote do + use Ecto.Schema + import Ecto.Changeset + alias __MODULE__ + + @opaque changeset :: Ecto.Changeset.t() + + @callback changeset(__MODULE__.t(), map) :: changeset + @callback parse(map) :: {:ok, __MODULE__.t()} | {:error, changeset} + + @optional_callbacks changeset: 2, parse: 1 + end + end +end diff --git a/lib/supabase/application.ex b/lib/supabase/application.ex new file mode 100644 index 0000000..240aad2 --- /dev/null +++ b/lib/supabase/application.ex @@ -0,0 +1,26 @@ +defmodule Supabase.Application do + @moduledoc false + + use Application + + @finch_opts [name: Supabase.Finch, pools: %{:default => [size: 10]}] + + @impl true + def start(_start_type, _args) do + children = [ + {Finch, @finch_opts}, + if(manage_clients?(), do: Supabase.ClientSupervisor), + if(manage_clients?(), do: Supabase.ClientRegistry) + ] + + opts = [strategy: :one_for_one, name: Supabase.Supervisor] + + children + |> Enum.reject(&is_nil/1) + |> Supervisor.start_link(opts) + end + + defp manage_clients? do + Application.get_env(:supabase, :manage_clients, true) + end +end diff --git a/lib/supabase/client.ex b/lib/supabase/client.ex new file mode 100644 index 0000000..8591302 --- /dev/null +++ b/lib/supabase/client.ex @@ -0,0 +1,200 @@ +defmodule Supabase.Client do + @moduledoc """ + A client for interacting with Supabase. This module is responsible for + managing the connection pool and the connection options. + + ## Usage + + Usually you don't need to use this module directly, instead you should + use the `Supabase` module, available on `:supabase_potion` application. + + However, if you want to manage clients manually, you can leverage this + module to start and stop clients dynamically. To start a single + client manually, you need to add it to your supervision tree: + + defmodule MyApp.Application do + use Application + + def start(_type, _args) do + children = [ + {Supabase.Client, name: :supabase, client_info: %Supabase.Client{}} + ] + + opts = [strategy: :one_for_one, name: MyApp.Supervisor] + Supervisor.start_link(children, opts) + end + end + + Notice that starting a Client in this way, Client options will not be + validated, so you need to make sure that the options are correct. Otherwise + application will crash. + + ## Examples + + iex> Supabase.Client.start_link(name: :supabase, client_info: client_info) + {:ok, #PID<0.123.0>} + + iex> Supabase.Client.retrieve_client(:supabase) + %Supabase.Client{ + name: :supabase, + conn: %{ + base_url: "https://.supabase.io", + api_key: "", + access_token: "" + }, + db: %Supabase.Client.Db{ + schema: "public" + }, + global: %Supabase.Client.Global{ + headers: %{} + }, + auth: %Supabase.Client.Auth{ + auto_refresh_token: true, + debug: false, + detect_session_in_url: true, + flow_type: :implicit, + persist_session: true, + storage: nil, + storage_key: "sb--auth-token" + } + } + + iex> Supabase.Client.retrieve_connection(:supabase) + %Supabase.Client.Conn{ + base_url: "https://.supabase.io", + api_key: "", + access_token: "" + } + """ + + use Agent + use Ecto.Schema + + import Ecto.Changeset + + alias Supabase.Client.Auth + alias Supabase.Client.Conn + alias Supabase.Client.Db + alias Supabase.Client.Global + + alias Supabase.ClientRegistry + + defguard is_client(v) when is_atom(v) or is_pid(v) + + @type client :: atom | pid + + @type t :: %__MODULE__{ + name: atom, + conn: Conn.t(), + db: Db.t(), + global: Global.t(), + auth: Auth.t() + } + + @type params :: %{ + name: atom, + conn: Conn.params(), + db: Db.params(), + global: Global.params(), + auth: Auth.params() + } + + @primary_key false + embedded_schema do + field(:name, Supabase.Types.Atom) + + embeds_one(:conn, Conn) + embeds_one(:db, Db) + embeds_one(:global, Global) + embeds_one(:auth, Auth) + end + + @spec parse(params) :: {:ok, Supabase.Client.t()} | {:error, Ecto.Changeset.t()} + def parse(attrs) do + %__MODULE__{} + |> cast(attrs, [:name]) + |> cast_embed(:conn, required: true) + |> cast_embed(:db, required: false) + |> cast_embed(:global, required: false) + |> cast_embed(:auth, required: false) + |> validate_required([:name]) + |> maybe_put_assocs() + |> apply_action(:parse) + end + + @spec parse!(params) :: Supabase.Client.t() + def parse!(attrs) do + case parse(attrs) do + {:ok, changeset} -> + changeset + + {:error, changeset} -> + raise Ecto.InvalidChangesetError, changeset: changeset, action: :parse + end + end + + defp maybe_put_assocs(%{valid?: false} = changeset), do: changeset + + defp maybe_put_assocs(changeset) do + auth = get_change(changeset, :auth) + db = get_change(changeset, :db) + global = get_change(changeset, :global) + + changeset + |> maybe_put_assoc(:auth, auth, %Auth{}) + |> maybe_put_assoc(:db, db, %Db{}) + |> maybe_put_assoc(:global, global, %Global{}) + end + + defp maybe_put_assoc(changeset, key, nil, default), + do: put_change(changeset, key, default) + + defp maybe_put_assoc(changeset, _key, _assoc, _default), do: changeset + + def start_link(config) do + name = Keyword.get(config, :name) + client_info = Keyword.get(config, :client_info) + + Agent.start_link(fn -> maybe_parse(client_info) end, name: name || __MODULE__) + end + + defp maybe_parse(%__MODULE__{} = client), do: client + defp maybe_parse(params), do: parse!(params) + + @spec retrieve_client(name) :: {:ok, Supabase.Client.t()} | {:error, :client_not_started} + when name: atom | pid + def retrieve_client(source) do + if is_atom(source) do + pid = ClientRegistry.lookup(source) + {:ok, Agent.get(pid, & &1)} + else + {:ok, Agent.get(source, & &1)} + end + rescue + _ -> {:error, :client_not_started} + end + + @spec retrieve_connection(name) :: {:ok, Conn.t()} | {:error, :client_not_started} + when name: atom | pid + def retrieve_connection(source) do + with {:ok, client} <- retrieve_client(source) do + client.conn + end + end + + def retrieve_base_url(%__MODULE__{conn: conn}) do + conn.base_url + end + + def retrieve_url(%__MODULE__{} = client, uri) do + client + |> retrieve_base_url() + |> URI.merge(uri) + end + + def retrieve_auth_url(%__MODULE__{auth: auth} = client, uri \\ "/") do + client + |> retrieve_url(auth.uri) + |> URI.append_path(uri) + end +end diff --git a/lib/supabase/client/auth.ex b/lib/supabase/client/auth.ex new file mode 100644 index 0000000..9258393 --- /dev/null +++ b/lib/supabase/client/auth.ex @@ -0,0 +1,84 @@ +defmodule Supabase.Client.Auth do + @moduledoc """ + Auth configuration schema. This schema is used to configure the auth + options. This schema is embedded in the `Supabase.Client` schema. + + ## Fields + + - `:auto_refresh_token` - Automatically refresh the token when it expires. Defaults to `true`. + - `:debug` - Enable debug mode. Defaults to `false`. + - `:detect_session_in_url` - Detect session in URL. Defaults to `true`. + - `:flow_type` - Authentication flow type. Defaults to `"implicit"`. + - `:persist_session` - Persist session. Defaults to `true`. + - `:storage` - Storage type. + - `:storage_key` - Storage key. Default to `"sb-$host-auth-token"` where $host is the hostname of your Supabase URL. + + For more information about the auth options, see the documentation for + the [client](https://supabase.com/docs/reference/javascript/initializing) and + [auth guides](https://supabase.com/docs/guides/auth) + """ + + use Ecto.Schema + import Ecto.Changeset + + @type t :: %__MODULE__{ + uri: String.t(), + auto_refresh_token: boolean(), + debug: boolean(), + detect_session_in_url: boolean(), + flow_type: String.t(), + persist_session: boolean(), + storage: String.t(), + storage_key: String.t() + } + + @type params :: %{ + auto_refresh_token: boolean(), + debug: boolean(), + detect_session_in_url: boolean(), + flow_type: String.t(), + persist_session: boolean(), + storage: String.t(), + storage_key: String.t() + } + + @storage_key_template "sb-$host-auth-token" + + @primary_key false + embedded_schema do + field(:uri, :string, default: "/auth/v1") + field(:auto_refresh_token, :boolean, default: true) + field(:debug, :boolean, default: false) + field(:detect_session_in_url, :boolean, default: true) + field(:flow_type, Ecto.Enum, values: ~w[implicit pkce magicLink]a, default: :implicit) + field(:persist_session, :boolean, default: true) + field(:storage, :string) + field(:storage_key, :string) + end + + def changeset(schema, params, supabase_url) do + schema + |> cast( + params, + ~w[auto_refresh_token debug detect_session_in_url persist_session flow_type storage]a + ) + |> validate_required( + ~w[auto_refresh_token debug detect_session_in_url persist_session flow_type]a + ) + |> put_storage_key(supabase_url) + end + + defp put_storage_key(%{valid?: false} = changeset, _), do: changeset + + defp put_storage_key(changeset, url) do + host = + url + |> URI.new!() + |> Map.get(:host) + |> String.split(".") + |> List.first() + + storage_key = String.replace(@storage_key_template, "$host", host) + put_change(changeset, :storage_key, storage_key) + end +end diff --git a/lib/supabase/client/conn.ex b/lib/supabase/client/conn.ex new file mode 100644 index 0000000..0d1afb2 --- /dev/null +++ b/lib/supabase/client/conn.ex @@ -0,0 +1,54 @@ +defmodule Supabase.Client.Conn do + @moduledoc """ + Conn configuration for Supabase Client. This schema is used to configure + the connection options. This schema is embedded in the `Supabase.Client`. + + ## Fields + + - `:base_url` - The Supabase Project URL to use. This option is required. + - `:api_key` - The Supabase ProjectAPI Key to use. This option is required. + - `:access_token` - The access token to use. Default to the API key. + + For more information about the connection options, see the documentation for + the [client](https://supabase.com/docs/reference/javascript/initializing). + """ + + use Supabase, :schema + + @type t :: %__MODULE__{ + api_key: String.t(), + access_token: String.t(), + base_url: String.t() + } + + @type params :: %{ + api_key: String.t(), + access_token: String.t(), + base_url: String.t() + } + + @primary_key false + embedded_schema do + field(:api_key, :string) + field(:access_token, :string) + field(:base_url, :string) + end + + def changeset(schema \\ %__MODULE__{}, params) do + schema + |> cast(params, ~w[api_key access_token base_url]a) + |> maybe_put_access_token() + |> validate_required(~w[api_key base_url]a) + end + + defp maybe_put_access_token(changeset) do + api_key = get_change(changeset, :api_key) + token = get_change(changeset, :access_token) + + cond do + not changeset.valid? -> changeset + token -> changeset + true -> put_change(changeset, :access_token, api_key) + end + end +end diff --git a/lib/supabase/client/db.ex b/lib/supabase/client/db.ex new file mode 100644 index 0000000..7e9f172 --- /dev/null +++ b/lib/supabase/client/db.ex @@ -0,0 +1,29 @@ +defmodule Supabase.Client.Db do + @moduledoc """ + DB configuration schema. This schema is used to configure the database + options. This schema is embedded in the `Supabase.Client` schema. + + ## Fields + + - `:schema` - The default schema to use. Defaults to `"public"`. + + For more information about the database options, see the documentation for + the [client](https://supabase.com/docs/reference/javascript/initializing) and + [database guides](https://supabase.com/docs/guides/database). + """ + + use Ecto.Schema + import Ecto.Changeset + + @type t :: %__MODULE__{schema: String.t()} + @type params :: %{schema: String.t()} + + @primary_key false + embedded_schema do + field(:schema, :string, default: "public") + end + + def changeset(schema, params) do + cast(schema, params, [:schema]) + end +end diff --git a/lib/supabase/client/global.ex b/lib/supabase/client/global.ex new file mode 100644 index 0000000..22b616b --- /dev/null +++ b/lib/supabase/client/global.ex @@ -0,0 +1,27 @@ +defmodule Supabase.Client.Global do + @moduledoc """ + Global configuration schema. This schema is used to configure the global + options. This schema is embedded in the `Supabase.Client` schema. + + ## Fields + + - `:headers` - The default headers to use in any Supabase request. Defaults to `%{}`. + """ + + use Ecto.Schema + import Ecto.Changeset + + @type t :: %__MODULE__{headers: map} + @type params :: %{headers: map} + + @primary_key false + embedded_schema do + field(:headers, {:map, :string}, default: %{}) + end + + def changeset(schema, params) do + schema + |> cast(params, [:headers]) + |> validate_required([:headers]) + end +end diff --git a/lib/supabase/client_registry.ex b/lib/supabase/client_registry.ex new file mode 100644 index 0000000..1a9c027 --- /dev/null +++ b/lib/supabase/client_registry.ex @@ -0,0 +1,44 @@ +defmodule Supabase.ClientRegistry do + @moduledoc """ + Registry for the Supabase multiple Clients. This registry is used to + register and lookup the Supabase Clients defined by the user. + + This Registry is used by the `Supabase.ClientSupervisor` to register and + any `Supabase.Client` that is defined. That way, the `Supabase.ClientSupervisor` + can lookup the `Supabase.Client` by name and start it if it is not running. + + ## Usage + + This Registry is used internally by the `Supabase.Application` and should + start automatically when the application starts. + """ + + def start_link(_) do + Registry.start_link(keys: :unique, name: __MODULE__) + end + + def child_spec(opts) do + %{ + id: __MODULE__, + start: {__MODULE__, :start_link, [opts]}, + type: :worker, + restart: :permanent, + shutdown: 500 + } + end + + def named(key) when is_atom(key) do + {:via, Registry, {__MODULE__, key}} + end + + def register(key) when is_atom(key) do + Registry.register(__MODULE__, key, nil) + end + + def lookup(key) when is_atom(key) do + case Registry.lookup(__MODULE__, key) do + [{pid, _}] -> pid + [] -> nil + end + end +end diff --git a/lib/supabase/client_supervisor.ex b/lib/supabase/client_supervisor.ex new file mode 100644 index 0000000..6f96099 --- /dev/null +++ b/lib/supabase/client_supervisor.ex @@ -0,0 +1,54 @@ +defmodule Supabase.ClientSupervisor do + @moduledoc """ + A supervisor for all Clients. In most cases this should be started + automatically by the application supervisor and be used mainly by + the `Supabase` module, available on `:supabase_potion` application. + + Although if you want to manage Clients manually, you can leverage + this module to start and stop Clients dynamically. To start the supervisor + manually, you need to add it to your supervision tree: + + defmodule MyApp.Application do + use Application + + def start(_type, _args) do + children = [ + {Supabase.ClientSupervisor, []} + ] + + opts = [strategy: :one_for_one, name: MyApp.Supervisor] + Supervisor.start_link(children, opts) + end + end + + And then use the Supervisor to start custom clients: + + iex> Supabase.ClientSupervisor.start_child({Supabase.Client, opts}) + {:ok, #PID<0.123.0>} + + Notice that the Supabase Elixir SDK already starts a `Supabase.ClientSupervisor` + internally, so you don't need to start it manually. However, if you want to + manage clients manually, you can leverage this module to start and stop + clients dynamically. + + To manage manually the clients, you need to disable the internal management + into your application: + + config :supabase, :manage_clients, false + """ + + use DynamicSupervisor + + @impl true + def init(_init_arg) do + DynamicSupervisor.init(strategy: :one_for_one) + end + + def start_link(init) do + DynamicSupervisor.start_link(__MODULE__, init, name: __MODULE__) + end + + def start_child(child_spec) do + DynamicSupervisor.start_child(__MODULE__, child_spec) + end +end diff --git a/lib/supabase/fetcher.ex b/lib/supabase/fetcher.ex new file mode 100644 index 0000000..8b51770 --- /dev/null +++ b/lib/supabase/fetcher.ex @@ -0,0 +1,340 @@ +defmodule Supabase.Fetcher do + @moduledoc """ + A fundamental HTTP client for interfacing directly with Supabase services. + + `Supabase.Fetcher` provides the groundwork for sending HTTP requests to the Supabase infrastructure. This includes utilities for various HTTP methods such as GET, POST, PUT, DELETE, and functions to simplify the process of streaming data or uploading files. + + ## Key Features + + - **Low-level HTTP Interactions**: This module allows for raw HTTP requests to any URL, simplifying interactions with web resources. + - **Data Streaming**: Efficiently stream large data payloads, such as downloading files. + - **Request Customization**: Extensive header customization and utility functions for constructing requests tailored to your requirements. + - **Response Parsing**: Automatically converts JSON responses into Elixir maps and handles various response scenarios. + + ## Recommended Usage + + While `Supabase.Fetcher` is versatile and comprehensive, it operates at a very granular level. For most applications and needs, leveraging higher-level APIs that correspond to specific Supabase services is advisable: + + - `Supabase.Storage` - API to interact directly with buckets and objects in Supabase Storage. + + ## Disclaimer + + If your aim is to directly harness this module as a low-level HTTP client, due to missing features in other packages or a desire to craft a unique Supabase integration, you can certainly do so. However, always keep in mind that `Supabase.Storage` and other Supabase-oriented packages might offer better abstractions and ease-of-use. + + Use `Supabase.Fetcher` with a clear understanding of its features and operations. + """ + + @behaviour Supabase.FetcherBehaviour + + @spec version :: String.t() + def version do + {:ok, vsn} = :application.get_key(:supabase_potion, :vsn) + List.to_string(vsn) + end + + @spec new_connection(atom, url, body, headers) :: Finch.Request.t() + when url: String.t() | URI.t(), + body: binary | nil | {:stream, Enumerable.t()}, + headers: list(tuple) + defp new_connection(method, url, body, headers) do + headers = merge_headers(default_headers(), headers) + Finch.build(method, url, headers, body) + end + + @spec default_headers :: list(tuple) + defp default_headers do + [ + {"accept", "application/json"}, + {"x-client-info", "supabase-fetch-elixir/#{version()}"} + ] + end + + @doc """ + Makes a HTTP request to the desired URL, with default headers and + stream back the response. Good to stream large files downlaods. + + You can also pass custom `Finch` options directly to the underlying `Finch.stream/4` function. + Those options can be seen on the [Finch documentation](https://hexdocs.pm/finch/Finch.html#stream/5-options). + + ## Examples + + iex> {status, stream} = Supabase.Fetcher.stream("https://example.com") + iex> file = File.stream!("path/to/file", [], 4096) + Stream.run Stream.into(stream, file) + """ + @impl true + def stream(url, headers \\ [], opts \\ []) do + ref = make_ref() + task = spawn_stream_task(new_connection(:get, url, nil, headers), ref, opts) + status = receive(do: ({:chunk, {:status, status}, ^ref} -> status)) + + stream = + Stream.resource(fn -> {ref, task} end, &receive_stream(&1), fn {_ref, task} -> + Task.shutdown(task) + end) + + case {status, stream} do + {200, stream} -> {:ok, stream} + {s, _} when s >= 400 -> {:error, :not_found} + {s, _} when s >= 500 -> {:error, :server_error} + end + end + + defp spawn_stream_task(%Finch.Request{} = req, ref, opts) do + me = self() + + Task.async(fn -> + on_chunk = fn chunk, _acc -> send(me, {:chunk, chunk, ref}) end + Finch.stream(req, Supabase.Finch, nil, on_chunk, opts) + send(me, {:done, ref}) + end) + end + + defp receive_stream({ref, _task} = payload) do + receive do + {:chunk, {:data, data}, ^ref} -> {[data], payload} + {:done, ^ref} -> {:halt, payload} + end + end + + @doc """ + Simple GET request that format the response to a map or retrieve + the error reason as `String.t()`. + + ## Examples + + iex> Supabase.Fetcher.get("https://example.com") + {:ok, %{"key" => "value"}} + """ + def get(url) do + get(url, nil, [], []) + end + + def get(url, body) do + get(url, body, [], []) + end + + def get(url, body, headers) do + get(url, body, headers, []) + end + + @impl true + def get(url, body, headers, opts) do + resp = + :get + |> new_connection(url, Jason.encode_to_iodata!(body), headers) + |> Finch.request(Supabase.Finch) + + if opts[:resolve_json] do + format_response(resp) + else + resp + end + end + + @doc """ + Simple POST request that format the response to a map or retrieve + the error reason as `String.t()`. + + ## Examples + + iex> Supabase.Fetcher.post("https://example.com", %{key: "value"}) + {:ok, %{"key" => "value"}} + """ + @impl true + def post(url, body \\ nil, headers \\ []) do + headers = merge_headers(headers, [{"content-type", "application/json"}]) + + :post + |> new_connection(url, Jason.encode_to_iodata!(body), headers) + |> Finch.request(Supabase.Finch) + |> format_response() + end + + @doc """ + Simple PUT request that format the response to a map or retrieve + the error reason as `String.t()`. + + ## Examples + + iex> Supabase.Fetcher.put("https://example.com", %{key: "value"}) + {:ok, %{"key" => "value"}} + """ + @impl true + def put(url, body, headers \\ []) do + headers = merge_headers(headers, [{"content-type", "application/json"}]) + + :put + |> new_connection(url, Jason.encode_to_iodata!(body), headers) + |> Finch.request(Supabase.Finch) + |> format_response() + end + + @doc """ + Simple DELETE request that format the response to a map or retrieve + the error reason as `String.t()`. + + ## Examples + + iex> Supabase.Fetcher.delete("https://example.com", %{key: "value"}) + {:ok, %{"key" => "value"}} + + iex> Supabase.Fetcher.delete("https://example.com", %{key: "value"}) + {:error, :not_found} + """ + @impl true + def delete(url, body \\ nil, headers \\ []) do + headers = merge_headers(headers, [{"content-type", "application/json"}]) + + :delete + |> new_connection(url, Jason.encode_to_iodata!(body), headers) + |> Finch.request(Supabase.Finch) + |> format_response() + end + + @doc """ + Upload a binary to the desired URL. + + params: + - `method`: `:put` or `:post` + - `url`: the URL to upload the file + - `file`: the path to the file to upload + - `headers`: list of additional headers to append to the request + + ## Examples + + iex> Supabase.Fetcher.upload(:post, "https://example.com", "path/to/file") + {:ok, %{"key" => "value"}} + """ + @impl true + def upload(method, url, file, headers \\ []) do + body_stream = File.stream!(file, [{:read_ahead, 4096}], 1024) + %File.Stat{size: content_length} = File.stat!(file) + content_headers = [{"content-length", to_string(content_length)}] + headers = merge_headers(headers, content_headers) + conn = new_connection(method, url, {:stream, body_stream}, headers) + + conn + |> Finch.request(Supabase.Finch) + |> format_response() + end + + def get_full_url(base_url, path) do + URI.merge(base_url, path) + end + + @doc """ + Convenience function that given a `apikey` and a optional ` token`, it will return the headers + to be used in a request to your Supabase API. + + ## Examples + + iex> Supabase.Fetcher.apply_conn_headers("apikey-value") + [{"apikey", "apikey-value"}, {"authorization", "Bearer apikey-value"}] + + iex> Supabase.Fetcher.apply_conn_headers("apikey-value", "token-value") + [{"apikey", "apikey-value"}, {"authorization", "Bearer token-value"}] + """ + + def apply_headers(api_key, token \\ nil, headers \\ []) do + conn_headers = [ + {"apikey", api_key}, + {"authorization", "Bearer #{token || api_key}"} + ] + + merge_headers(conn_headers, headers) + end + + defp merge_headers(some, other) do + some = if is_list(some), do: some, else: Map.to_list(some) + other = if is_list(other), do: other, else: Map.to_list(other) + + some + |> Kernel.++(other) + |> Enum.dedup_by(fn {name, _} -> name end) + |> Enum.reject(fn {_, v} -> is_nil(v) end) + end + + def apply_client_headers(%Supabase.Client{} = client, token \\ nil, headers \\ []) do + client.conn.api_key + |> apply_headers(token || client.conn.access_token, client.global.headers) + |> merge_headers(headers) + end + + def get_header(%Finch.Response{headers: headers}, header) do + if h = Enum.find(headers, &(elem(&1, 0) == header)) do + elem(h, 1) + else + nil + end + end + + def get_header(%Finch.Response{} = resp, header, default) do + get_header(resp, header) || default + end + + def format_response({:error, %{reason: reason}}) do + {:error, reason} + end + + def format_response({:ok, %{status: 404}}) do + {:error, :not_found} + end + + def format_response({:ok, %{status: 401}}) do + {:error, :unauthorized} + end + + def format_response({:ok, %{status: 204}}) do + {:ok, :no_body} + end + + def format_response({:ok, %{status: s, body: body}}) when s in 200..300 do + result = + case Jason.decode(body) do + {:ok, body} -> body + {:error, _} when is_binary(body) -> body + end + + {:ok, result} + end + + def format_response({:ok, %{status: s, body: body}}) when s in 400..499 do + {:error, format_bad_request_error(Jason.decode!(body))} + end + + def format_response({:ok, %{status: s}}) when s >= 500 do + {:error, :server_error} + end + + defp format_bad_request_error(%{"message" => msg}) do + case msg do + "The resource was not found" -> :not_found + _ -> msg + end + end + + defp format_bad_request_error(%{"code" => 429, "msg" => msg}) do + if String.starts_with?(msg, "For security purposes,") do + [seconds] = Regex.run(~r/\d+/, msg, return: :binary) || ["undefined"] + {:error, {:rate_limit_until_seconds, seconds}} + else + case msg do + "Email rate limit exceeded" -> :email_rate_limit + end + end + end + + defp format_bad_request_error(%{"error" => err, "error_description" => desc}) do + case {err, desc} do + {"invalid_grant", nil} -> :invalid_grant + {"invalid_grant", "Invalid login credentials"} -> {:invalid_grant, :invalid_credentials} + {"invalid_grant", "Email not confirmed"} -> {:invalid_grant, :email_not_confirmed} + {"invalid_grant", err} -> {:invalid_grant, err} + end + end + + defp format_bad_request_error(err) do + err + end +end diff --git a/lib/supabase/fetcher_behaviour.ex b/lib/supabase/fetcher_behaviour.ex new file mode 100644 index 0000000..6c6b4be --- /dev/null +++ b/lib/supabase/fetcher_behaviour.ex @@ -0,0 +1,22 @@ +defmodule Supabase.FetcherBehaviour do + @moduledoc "Defines Supabase HTTP Clients callbacks" + + @typep nullable(a) :: a | nil + + @type url :: String.t() | URI.t() + @type body :: nullable(map) | map + @type headers :: [{String.t(), String.t()}] + @type response :: map | String.t() + @type reason :: String.t() | atom + @type method :: :get | :post + @type result :: {:ok, response} | {:error, reason} + + @callback get(url, body, headers, opts) :: result + when opts: [resolve_json: boolean] + @callback post(url, body, headers) :: result + @callback put(url, body, headers) :: result + @callback delete(url, body, headers) :: result + @callback upload(method, url, Path.t(), headers) :: result + @callback stream(url, headers, keyword) :: {:ok, stream} | {:error, reason} + when stream: Enumerable.t() +end diff --git a/lib/supabase/go_true.ex b/lib/supabase/go_true.ex new file mode 100644 index 0000000..c1a465b --- /dev/null +++ b/lib/supabase/go_true.ex @@ -0,0 +1,60 @@ +defmodule Supabase.GoTrue do + @moduledoc false + + import Supabase.Client, only: [is_client: 1] + + alias Supabase.Client + alias Supabase.GoTrue.Schemas.SignInWithIdToken + alias Supabase.GoTrue.Schemas.SignInWithOauth + alias Supabase.GoTrue.Schemas.SignInWithPassword + alias Supabase.GoTrue.Schemas.SignUpWithPassword + alias Supabase.GoTrue.Session + alias Supabase.GoTrue.User + alias Supabase.GoTrue.UserHandler + + @opaque client :: pid | module + + @behaviour Supabase.GoTrueBehaviour + + @impl true + def get_user(client, %Session{} = session) do + with {:ok, client} <- Client.retrieve_client(client), + {:ok, response} <- UserHandler.get_user(client, session.access_token) do + User.parse(response) + end + end + + @impl true + def sign_in_with_id_token(client, credentials) when is_client(client) do + with {:ok, client} <- Client.retrieve_client(client), + {:ok, credentials} <- SignInWithIdToken.parse(credentials) do + UserHandler.sign_in_with_id_token(client, credentials) + end + end + + @impl true + def sign_in_with_oauth(client, credentials) when is_client(client) do + with {:ok, client} <- Client.retrieve_client(client), + {:ok, credentials} <- SignInWithOauth.parse(credentials) do + url = UserHandler.get_url_for_provider(client, credentials) + {:ok, credentials.provider, url} + end + end + + @impl true + def sign_in_with_password(client, credentials) when is_client(client) do + with {:ok, client} <- Client.retrieve_client(client), + {:ok, credentials} <- SignInWithPassword.parse(credentials), + {:ok, response} <- UserHandler.sign_in_with_password(client, credentials) do + Session.parse(response) + end + end + + @impl true + def sign_up(client, credentials) when is_client(client) do + with {:ok, client} <- Client.retrieve_client(client), + {:ok, credentials} <- SignUpWithPassword.parse(credentials) do + UserHandler.sign_up(client, credentials) + end + end +end diff --git a/lib/supabase/go_true/admin.ex b/lib/supabase/go_true/admin.ex new file mode 100644 index 0000000..c975d51 --- /dev/null +++ b/lib/supabase/go_true/admin.ex @@ -0,0 +1,126 @@ +defmodule Supabase.GoTrue.Admin do + @moduledoc false + + import Supabase.Client, only: [is_client: 1] + + alias Supabase.Client + alias Supabase.Fetcher + alias Supabase.GoTrue.AdminHandler + alias Supabase.GoTrue.Schemas.AdminUserParams + alias Supabase.GoTrue.Schemas.GenerateLink + alias Supabase.GoTrue.Schemas.InviteUserParams + alias Supabase.GoTrue.Schemas.PaginationParams + alias Supabase.GoTrue.Session + alias Supabase.GoTrue.User + + @behaviour Supabase.GoTrue.AdminBehaviour + + @scopes ~w[global local others]a + + @impl true + def sign_out(client, %Session{} = session, scope) when is_client(client) and scope in @scopes do + with {:ok, client} <- Client.retrieve_client(client) do + case AdminHandler.sign_out(client, session.access_token, scope) do + {:ok, _} -> :ok + {:error, :not_found} -> :ok + {:error, :unauthorized} -> :ok + err -> err + end + end + end + + @impl true + def invite_user_by_email(client, email, options \\ %{}) when is_client(client) do + with {:ok, client} <- Client.retrieve_client(client), + {:ok, options} <- InviteUserParams.parse(options), + {:ok, response} <- AdminHandler.invite_user(client, email, options) do + User.parse(response) + end + end + + @impl true + def generate_link(client, attrs) when is_client(client) do + with {:ok, client} <- Client.retrieve_client(client), + {:ok, params} <- GenerateLink.parse(attrs), + {:ok, response} <- AdminHandler.generate_link(client, params) do + GenerateLink.properties(response) + end + end + + @impl true + def create_user(client, attrs) when is_client(client) do + with {:ok, client} <- Client.retrieve_client(client), + {:ok, params} <- AdminUserParams.parse(attrs), + {:ok, response} <- AdminHandler.create_user(client, params) do + User.parse(response) + end + end + + @impl true + def delete_user(client, user_id, opts \\ [should_soft_delete: false]) when is_client(client) do + with {:ok, client} <- Client.retrieve_client(client), + {:ok, _} <- AdminHandler.delete_user(client, user_id, opts) do + :ok + end + end + + @impl true + def get_user_by_id(client, user_id) when is_client(client) do + with {:ok, client} <- Client.retrieve_client(client), + {:ok, response} <- AdminHandler.get_user(client, user_id) do + User.parse(response) + end + end + + @impl true + def list_users(client, params \\ %{}) when is_client(client) do + with {:ok, client} <- Client.retrieve_client(client), + {:ok, params} <- PaginationParams.page_params(params), + {:ok, response} <- AdminHandler.list_users(client, params), + {:ok, users} <- User.parse_list(response.body["users"]) do + total = Fetcher.get_header(response, "x-total-count") + + links = + response + |> Fetcher.get_header("link", "") + |> String.split(",", trim: true) + + next = parse_next_page_count(links) + last = parse_last_page_count(links) + + attrs = %{next_page: (next != 0 && next) || nil, last_page: last, total: total} + {:ok, pagination} = PaginationParams.pagination(attrs) + + {:ok, users, pagination} + end + end + + @next_page_rg ~r/.+\?page=(\d).+rel=\"next\"/ + @last_page_rg ~r/.+\?page=(\d).+rel=\"last\"/ + + defp parse_next_page_count(links) do + parse_page_count(links, @next_page_rg) + end + + defp parse_last_page_count(links) do + parse_page_count(links, @last_page_rg) + end + + defp parse_page_count(links, regex) do + Enum.reduce_while(links, 0, fn link, acc -> + case Regex.run(regex, link) do + [_, page] -> {:halt, page} + _ -> {:cont, acc} + end + end) + end + + @impl true + def update_user_by_id(client, user_id, attrs) when is_client(client) do + with {:ok, client} <- Client.retrieve_client(client), + {:ok, params} <- AdminUserParams.parse(attrs), + {:ok, response} <- AdminHandler.update_user(client, user_id, params) do + User.parse(response) + end + end +end diff --git a/lib/supabase/go_true/admin_behaviour.ex b/lib/supabase/go_true/admin_behaviour.ex new file mode 100644 index 0000000..7161690 --- /dev/null +++ b/lib/supabase/go_true/admin_behaviour.ex @@ -0,0 +1,22 @@ +defmodule Supabase.GoTrue.AdminBehaviour do + @moduledoc false + + alias Supabase.Client + alias Supabase.GoTrue.Session + alias Supabase.GoTrue.User + + @type scope :: :global | :local | :others + @type invite_options :: %{data: map, redirect_to: String.t()} + + @callback sign_out(Client.client(), Session.t(), scope) :: :ok | {:error, atom} + @callback invite_user_by_email(Client.client(), email, invite_options) :: :ok | {:error, atom} + when email: String.t() + @callback generate_link(Client.client(), map) :: {:ok, String.t()} | {:error, atom} + @callback create_user(Client.client(), map) :: {:ok, User.t()} | {:error, atom} + @callback list_users(Client.client()) :: {:ok, list(User.t())} | {:error, atom} + @callback get_user_by_id(Client.client(), Ecto.UUID.t()) :: {:ok, User.t()} | {:error, atom} + @callback update_user_by_id(Client.client(), Ecto.UUID.t(), map) :: + {:ok, User.t()} | {:error, atom} + @callback delete_user(Client.client(), Ecto.UUID.t(), keyword) :: + {:ok, User.t()} | {:error, atom} +end diff --git a/lib/supabase/go_true/admin_handler.ex b/lib/supabase/go_true/admin_handler.ex new file mode 100644 index 0000000..b856eab --- /dev/null +++ b/lib/supabase/go_true/admin_handler.ex @@ -0,0 +1,96 @@ +defmodule Supabase.GoTrue.AdminHandler do + @moduledoc false + + alias Supabase.Client + alias Supabase.Fetcher + alias Supabase.GoTrue.Schemas.InviteUserParams + + @invite "/invite" + @generate_link "/admin/generate_link" + @users "/admin/users" + + defp single_user_endpoint(id) do + @users <> "/#{id}" + end + + defp sign_out(scope) do + "/logout?scope=#{scope}" + end + + def sign_out(%Client{} = client, access_token, scope) do + endpoint = Client.retrieve_auth_url(client, sign_out(scope)) + headers = Fetcher.apply_client_headers(client, access_token) + Fetcher.post(endpoint, nil, headers) + end + + def invite_user(%Client{} = client, email, %InviteUserParams{} = opts) do + headers = Fetcher.apply_client_headers(client, nil, %{"redirect_to" => opts.redirect_to}) + body = %{email: email, data: opts.data} + + client + |> Client.retrieve_auth_url(@invite) + |> Fetcher.post(body, headers) + end + + def generate_link(%Client{} = client, %{type: _, redirect_to: redirect_to} = params) do + headers = Fetcher.apply_client_headers(client, nil, %{"redirect_to" => redirect_to}) + + client + |> Client.retrieve_auth_url(@generate_link) + |> Fetcher.post(params, headers) + end + + def create_user(%Client{} = client, params) do + headers = Fetcher.apply_client_headers(client) + + client + |> Client.retrieve_auth_url(@users) + |> Fetcher.post(params, headers) + end + + def delete_user(%Client{} = client, id, params) do + headers = Fetcher.apply_client_headers(client) + body = %{should_soft_delete: params[:should_soft_delete] || false} + uri = single_user_endpoint(id) + + client + |> Client.retrieve_auth_url(uri) + |> Fetcher.delete(body, headers) + end + + def get_user(%Client{} = client, id) do + headers = Fetcher.apply_client_headers(client) + uri = single_user_endpoint(id) + + client + |> Client.retrieve_auth_url(uri) + |> Fetcher.delete(nil, headers) + end + + def list_users(%Client{} = client, params) do + body = %{ + page: to_string(Map.get(params, :page, 0)), + per_page: to_string(Map.get(params, :per_page, 0)) + } + + headers = Fetcher.apply_client_headers(client) + + client + |> Client.retrieve_auth_url(@users) + |> Fetcher.get(body, headers, resolve_json: false) + |> case do + {:ok, resp} when resp.status == 200 -> {:ok, Map.update!(resp, :body, &Jason.decode!/1)} + {:ok, resp} -> {:ok, Fetcher.format_response(resp)} + {:error, _} = err -> err + end + end + + def update_user(%Client{} = client, id, params) do + headers = Fetcher.apply_client_headers(client) + uri = single_user_endpoint(id) + + client + |> Client.retrieve_auth_url(uri) + |> Fetcher.put(params, headers) + end +end diff --git a/lib/supabase/go_true/auth.ex b/lib/supabase/go_true/auth.ex new file mode 100644 index 0000000..504b941 --- /dev/null +++ b/lib/supabase/go_true/auth.ex @@ -0,0 +1,97 @@ +defmodule Supabase.GoTrue.Auth do + @moduledoc false + + use Ecto.Schema + + import Ecto.Changeset + + @primary_key false + embedded_schema do + field(:url, :map) + field(:expiry_margin, :integer, default: 10) + + embeds_one :mfa, MFA, primary_key: false do + embeds_one :enroll, Enroll, primary_key: false do + field(:factor_type, Ecto.Enum, values: [:totp]) + field(:issue, :string) + field(:friendly_name, :string) + end + + embeds_one :unenroll, UnEnroll, primary_key: false do + field(:factor_id, :string) + end + + embeds_one :verify, Verify, primary_key: false do + field(:factor_id, :string) + field(:challenge_id, :string) + field(:code, :string) + end + + embeds_one :challenge, Challenge, primary_key: false do + field(:factor_id, :string) + end + + embeds_one :challenge_and_verify, ChallengeAndVerify, primary_key: false do + field(:factor_id, :string) + field(:code, :string) + end + end + + embeds_one :network_failure, NetWorkFailure, primary_key: false do + field(:max_retries, :integer, default: 10) + field(:retry_interval, :integer, default: 2) + end + end + + def parse(attrs, mfa \\ %{}) do + %__MODULE__{} + |> cast(attrs, ~w[expiry_margin]a) + |> put_assoc(:mfa, mfa, required: true) + |> cast_assoc(:network_failure, with: &network_failure_changeset/2, required: true) + end + + defp network_failure_changeset(failure, attrs) do + cast(failure, attrs, ~w[max_retries max_interval]) + end + + def parse_mfa(attrs) do + %__MODULE__.MFA{} + |> cast(attrs, []) + |> cast_assoc(:enroll, with: &enroll_changeset/2, required: true) + |> cast_assoc(:unenroll, with: &unenroll_changeset/2, required: true) + |> cast_assoc(:verify, with: &verify_changeset/2, required: true) + |> cast_assoc(:challenge, with: &challenge_changeset/2, required: true) + |> cast_assoc(:challenge_and_verify, with: &challenge_and_verify_changeset/2, required: true) + |> apply_action(:parse) + end + + defp enroll_changeset(enroll, attrs) do + enroll + |> cast(attrs, ~w[factor_type issuer friendly_name]a) + |> validate_required([:factor_type]) + end + + defp unenroll_changeset(unenroll, attrs) do + unenroll + |> cast(attrs, [:factor_id]) + |> validate_required([:factor_id]) + end + + defp verify_changeset(verify, attrs) do + verify + |> cast(attrs, [:factor_id, :challenge_id, :code]) + |> validate_required([:factor_id, :challenge_id, :code]) + end + + defp challenge_changeset(challenge, attrs) do + challenge + |> cast(attrs, [:factor_id]) + |> validate_required([:factor_id]) + end + + defp challenge_and_verify_changeset(challenge, attrs) do + challenge + |> cast(attrs, [:factor_id, :code]) + |> validate_required([:factor_id, :code]) + end +end diff --git a/lib/supabase/go_true/pkce.ex b/lib/supabase/go_true/pkce.ex new file mode 100644 index 0000000..c4cdc02 --- /dev/null +++ b/lib/supabase/go_true/pkce.ex @@ -0,0 +1,18 @@ +defmodule Supabase.GoTrue.PKCE do + @moduledoc false + + @verifier_length 56 + + def generate_verifier do + @verifier_length + |> :crypto.strong_rand_bytes() + |> Base.url_encode64(padding: false) + |> String.slice(0, @verifier_length) + end + + def generate_challenge(verifier) do + :sha256 + |> :crypto.hash(verifier) + |> Base.url_encode64(padding: false) + end +end diff --git a/lib/supabase/go_true/plug.ex b/lib/supabase/go_true/plug.ex new file mode 100644 index 0000000..060386d --- /dev/null +++ b/lib/supabase/go_true/plug.ex @@ -0,0 +1,61 @@ +defmodule Supabase.GoTrue.Plug do + @moduledoc false + + import Plug.Conn + import Supabase.Client, only: [is_client: 1] + + alias Plug.Conn + + @key "supabase_gotrue_token" + + def session_active?(%Conn{} = conn) do + key = :second |> System.os_time() |> to_string() + get_session(conn, key) == nil + rescue + ArgumentError -> false + end + + def authenticated?(%Conn{} = conn) do + not is_nil(conn.private[@key]) + end + + def put_current_token(%Conn{} = conn, token) do + put_private(conn, @key, token) + end + + def put_session_token(%Conn{} = conn, token) do + conn + |> put_session(@key, token) + |> configure_session(renew: true) + end + + def sig_in(%Conn{} = conn, client, attrs) when is_client(client) do + case maybe_sign_in(conn, client, attrs) do + {:ok, session} -> put_session_token(conn, session.access_token) + _ -> conn + end + end + + defp maybe_sign_in(conn, client, credentials) do + if session_active?(conn) do + Supabase.GoTrue.sign_in_with_password(client, credentials) + end + end + + def sign_out(%Conn{} = conn) do + if session_active?(conn) do + delete_session(conn, @key) + else + conn + end + end + + def fetch_token_from_cookies(%Conn{} = conn) do + token = conn.req_cookies[@key] || conn.req_cookies[to_string(@key)] + if token, do: {:ok, token}, else: {:error, :not_found} + end + + def current_token(%Conn{} = conn) do + conn.private[@key] + end +end diff --git a/lib/supabase/go_true/plug/verify_header.ex b/lib/supabase/go_true/plug/verify_header.ex new file mode 100644 index 0000000..0ce426e --- /dev/null +++ b/lib/supabase/go_true/plug/verify_header.ex @@ -0,0 +1,24 @@ +defmodule Supabase.GoTrue.Plug.VerifyHeader do + @moduledoc false + + import Plug.Conn + + alias Supabase.GoTrue + + @behaviour Plug + + @impl true + def init(opts \\ []), do: opts + + @impl true + def call(%Plug.Conn{} = conn, _opts) do + if GoTrue.Plug.current_token(conn) do + conn + else + case get_req_header(conn, :authorization) do + ["Bearer " <> token] -> GoTrue.Plug.put_current_token(conn, token) + _ -> halt(conn) + end + end + end +end diff --git a/lib/supabase/go_true/schemas/admin_user_params.ex b/lib/supabase/go_true/schemas/admin_user_params.ex new file mode 100644 index 0000000..ef68c53 --- /dev/null +++ b/lib/supabase/go_true/schemas/admin_user_params.ex @@ -0,0 +1,41 @@ +defmodule Supabase.GoTrue.Schemas.AdminUserParams do + @moduledoc false + + import Ecto.Changeset + + @types %{ + app_metadata: :map, + email_confirm: :boolean, + phone_confirm: :boolean, + ban_duration: :string, + role: :string, + email: :string, + phone: :string, + password: :string, + nonce: :string + } + + def parse(attrs) do + {%{}, @types} + |> cast(attrs, Map.keys(@types)) + |> validate_required_inclusion([:email, :phone]) + |> apply_action(:parse) + end + + defp validate_required_inclusion(%{valid?: false} = c, _), do: c + + defp validate_required_inclusion(changeset, fields) do + if Enum.any?(fields, &present?(changeset, &1)) do + changeset + else + changeset + |> add_error(:email, "at least an email or phone is required") + |> add_error(:phone, "at least an email or phone is required") + end + end + + defp present?(changeset, field) do + value = get_change(changeset, field) + value && value != "" + end +end diff --git a/lib/supabase/go_true/schemas/generate_link.ex b/lib/supabase/go_true/schemas/generate_link.ex new file mode 100644 index 0000000..9c18fbf --- /dev/null +++ b/lib/supabase/go_true/schemas/generate_link.ex @@ -0,0 +1,119 @@ +defmodule Supabase.GoTrue.Schemas.GenerateLink do + @moduledoc false + + import Ecto.Changeset + + @types ~w[signup invite magicLink recovery email_change_current email_change_new]a + + @options_types %{data: :map, redirect_to: :string} + + @base_types %{ + email: :string, + type: Ecto.ParameterizedType.init(Ecto.Enum, values: @types) + } + + @properties_types %{ + action_link: :string, + email_otp: :string, + hashed_token: :string, + redirect_to: :string, + verification_type: Ecto.ParameterizedType.init(Ecto.Enum, values: @types) + } + + def properties(attrs) do + {%{}, @properties_types} + |> cast(attrs, Map.keys(@properties_types)) + |> validate_required(Map.keys(@properties_types)) + |> apply_action(:parse) + end + + def parse(attrs) do + [ + &sign_up_params/1, + &invite_or_magic_link_params/1, + &recovery_params/1, + &email_change_params/1 + ] + |> Enum.reduce_while(nil, fn schema, _ -> + case result = schema.(attrs) do + {:ok, _} -> {:halt, result} + {:error, _} -> {:cont, result} + end + end) + end + + def sign_up_params(attrs) do + types = with_options(%{password: :string}) + + {%{}, types} + |> cast(attrs, Map.keys(types)) + |> validate_required([:email, :password, :type]) + |> validate_redirect_to() + |> validate_change(:type, fn _, type -> + check_type(type, :signup) + end) + |> apply_action(:parse) + end + + def invite_or_magic_link_params(attrs) do + types = with_options() + + {%{}, types} + |> cast(attrs, Map.keys(types) -- [:data]) + |> validate_required([:email, :type]) + |> validate_redirect_to() + |> validate_inclusion(:type, ~w[invite magicLink]a) + |> apply_action(:parse) + end + + def recovery_params(attrs) do + types = with_options() + + {%{}, types} + |> cast(attrs, Map.keys(types) -- [:data]) + |> validate_redirect_to() + |> validate_change(:type, fn _, type -> + check_type(type, :recovery) + end) + |> validate_required([:email, :type]) + |> apply_action(:parse) + end + + def email_change_params(attrs) do + types = with_options() + + {%{}, types} + |> cast(attrs, Map.keys(types) -- [:data]) + |> validate_required([:email, :type]) + |> validate_redirect_to() + |> validate_inclusion(:type, ~w[email_change_current email_change_new]a) + |> apply_action(:parse) + end + + defp with_options(types \\ %{}) do + @base_types + |> Map.merge(types) + |> Map.merge(@options_types) + end + + defp check_type(current, desired) do + if current == desired do + [] + else + [type: "need to be #{desired} for this schema"] + end + end + + defp validate_redirect_to(%{valid?: false} = changeset), do: changeset + + defp validate_redirect_to(changeset) do + if redirect_to = get_change(changeset, :redirect_to) do + case URI.new(redirect_to) do + {:ok, uri} -> put_change(changeset, :redirect_to, URI.to_string(uri)) + {:error, error} -> add_error(changeset, :redirect_to, error) + end + else + changeset + end + end +end diff --git a/lib/supabase/go_true/schemas/invite_user_params.ex b/lib/supabase/go_true/schemas/invite_user_params.ex new file mode 100644 index 0000000..9831f2e --- /dev/null +++ b/lib/supabase/go_true/schemas/invite_user_params.ex @@ -0,0 +1,35 @@ +defmodule Supabase.GoTrue.Schemas.InviteUserParams do + @moduledoc false + + use Supabase, :schema + + @type t :: %__MODULE__{ + data: map, + redirect_to: URI.t() + } + + embedded_schema do + field(:data, :map) + field(:redirect_to, :map) + end + + def parse(attrs) do + %__MODULE__{} + |> cast(attrs, [:data, :redirect_to]) + |> parse_uri() + |> apply_action(:parse) + end + + defp parse_uri(changeset) do + redirect_to = get_change(changeset, :redirect_to) + + if redirect_to do + case URI.new(redirect_to) do + {:ok, uri} -> put_change(changeset, :redirect_to, uri) + {:error, reason} -> add_error(changeset, :redirect_to, "Invalid URI: #{reason}") + end + else + changeset + end + end +end diff --git a/lib/supabase/go_true/schemas/pagination_params.ex b/lib/supabase/go_true/schemas/pagination_params.ex new file mode 100644 index 0000000..b5e6ecb --- /dev/null +++ b/lib/supabase/go_true/schemas/pagination_params.ex @@ -0,0 +1,24 @@ +defmodule Supabase.GoTrue.Schemas.PaginationParams do + @moduledoc false + + use Ecto.Schema + + import Ecto.Changeset + + def page_params(attrs) do + schema = %{page: :integer, per_page: :integer} + + {%{}, schema} + |> cast(attrs, Map.keys(schema)) + |> apply_action(:parse) + end + + def pagination(attrs) do + schema = %{next_page: :integer, last_page: :integer, total: :integer} + + {%{}, schema} + |> cast(attrs, Map.keys(schema)) + |> validate_required([:total, :last_page]) + |> apply_action(:parse) + end +end diff --git a/lib/supabase/go_true/schemas/sign_in_request.ex b/lib/supabase/go_true/schemas/sign_in_request.ex new file mode 100644 index 0000000..ebc2c5d --- /dev/null +++ b/lib/supabase/go_true/schemas/sign_in_request.ex @@ -0,0 +1,67 @@ +defmodule Supabase.GoTrue.Schemas.SignInRequest do + @moduledoc false + + use Ecto.Schema + + import Ecto.Changeset + + alias Supabase.GoTrue.Schemas.SignInWithIdToken + alias Supabase.GoTrue.Schemas.SignInWithPassword + + @derive Jason.Encoder + @primary_key false + embedded_schema do + field(:email, :string) + field(:phone, :string) + field(:password, :string) + field(:provider, :string) + field(:access_token, :string) + field(:nonce, :string) + field(:id_token, :string) + + embeds_one :gotrue_meta_security, GoTrueMetaSecurity, primary_key: false do + @derive Jason.Encoder + field(:captcha_token, :string) + end + end + + def create(%SignInWithIdToken{} = signin) do + attrs = SignInWithIdToken.to_sign_in_params(signin) + gotrue_meta = %__MODULE__.GoTrueMetaSecurity{captcha_token: signin.options.captcha_token} + + %__MODULE__{} + |> cast(attrs, [:provider, :id_token, :access_token, :nonce]) + |> put_embed(:gotrue_meta_security, gotrue_meta, required: true) + |> validate_required([:provider, :id_token]) + |> apply_action(:insert) + end + + def create(%SignInWithPassword{} = signin) do + attrs = SignInWithPassword.to_sign_in_params(signin) + gotrue_meta = %__MODULE__.GoTrueMetaSecurity{captcha_token: signin.options.captcha_token} + + %__MODULE__{} + |> cast(attrs, [:email, :phone, :password]) + |> put_embed(:gotrue_meta_security, gotrue_meta, required: true) + |> validate_required([:password]) + |> validate_required_inclusion([:email, :phone]) + |> apply_action(:insert) + end + + defp validate_required_inclusion(%{valid?: false} = c, _), do: c + + defp validate_required_inclusion(changeset, fields) do + if Enum.any?(fields, &present?(changeset, &1)) do + changeset + else + changeset + |> add_error(:email, "at least an email or phone is required") + |> add_error(:phone, "at least an email or phone is required") + end + end + + defp present?(changeset, field) do + value = get_change(changeset, field) + value && value != "" + end +end diff --git a/lib/supabase/go_true/schemas/sign_in_with_id_token.ex b/lib/supabase/go_true/schemas/sign_in_with_id_token.ex new file mode 100644 index 0000000..6a0d2b4 --- /dev/null +++ b/lib/supabase/go_true/schemas/sign_in_with_id_token.ex @@ -0,0 +1,60 @@ +defmodule Supabase.GoTrue.Schemas.SignInWithIdToken do + @moduledoc false + + use Supabase, :schema + + @type t :: %__MODULE__{ + provider: :google | :apple | :azure | :facebook, + token: String.t(), + access_token: String.t() | nil, + nonce: String.t() | nil, + options: + %__MODULE__.Options{ + captcha_token: String.t() | nil + } + | nil + } + + @providers ~w[google apple azure facebook]a + + embedded_schema do + field(:provider, Ecto.Enum, values: @providers) + field(:token, :string) + field(:access_token, :string) + field(:nonce, :string) + + embeds_one :options, Options, primary_key: false do + field(:captcha_token, :string) + end + end + + def to_sign_in_params(%__MODULE__{} = signin) do + signin + |> Map.take([:provider, :access_token, :nonce]) + |> Map.update!(:provider, &Atom.to_string/1) + |> Map.put(:id_token, signin.token) + end + + def parse(attrs) do + %__MODULE__{} + |> cast(attrs, ~w[provider token access_token nonce]a) + |> validate_required(~w[provider token]a) + |> cast_embed(:options, with: &options_changeset/2, required: false) + |> maybe_put_default_options() + |> apply_action(:parse) + end + + defp maybe_put_default_options(%{valid?: false} = c), do: c + + defp maybe_put_default_options(changeset) do + if get_embed(changeset, :options) do + changeset + else + put_embed(changeset, :options, %__MODULE__.Options{}) + end + end + + defp options_changeset(options, attrs) do + cast(options, attrs, ~w[email_redirect_to data captcha_token]a) + end +end diff --git a/lib/supabase/go_true/schemas/sign_in_with_oauth.ex b/lib/supabase/go_true/schemas/sign_in_with_oauth.ex new file mode 100644 index 0000000..a049b4e --- /dev/null +++ b/lib/supabase/go_true/schemas/sign_in_with_oauth.ex @@ -0,0 +1,85 @@ +defmodule Supabase.GoTrue.Schemas.SignInWithOauth do + @moduledoc false + + use Supabase, :schema + + alias Supabase.GoTrue.User.Identity + + @type t :: %__MODULE__{ + provider: Identity.providers(), + options: %__MODULE__.Options{ + redirect_to: String.t() | nil, + scopes: list(String.t()) | nil, + query_params: map, + skip_browser_redirect: boolean + } + } + + @primary_key false + embedded_schema do + field(:provider, Ecto.Enum, values: Identity.providers()) + + embeds_one :options, Options, primary_key: false do + field(:redirect_to, :string) + field(:scopes, {:array, :string}) + field(:query_params, :map, default: %{}) + field(:skip_browser_redirect, :boolean) + end + end + + def options_to_query(%__MODULE__{options: options, provider: provider}) do + query_params = Map.get(options, :query_params, %{}) + query = Map.take(options, [:redirect_to, :scopes]) + + query + |> Map.update!(:scopes, &join_scopes/1) + |> Map.put(:provider, provider) + |> Map.merge(query_params) + end + + defp join_scopes(nil), do: nil + + defp join_scopes(scopes) when is_list(scopes) do + Enum.join(scopes, " ") + end + + def parse(attrs) do + %__MODULE__{} + |> cast(attrs, [:provider]) + |> validate_required([:provider]) + |> cast_embed(:options, with: &options_changeset/2, required: false) + |> maybe_put_default_options() + |> apply_action(:parse) + end + + defp maybe_put_default_options(%{valid?: false} = c), do: c + + defp maybe_put_default_options(changeset) do + if get_embed(changeset, :options) do + changeset + else + put_embed(changeset, :options, %__MODULE__.Options{}) + end + end + + defp options_changeset(options, attrs) do + options + |> cast(attrs, ~w[redirect_to scopes query_params skip_browser_redirect]a) + |> parse_uri() + end + + defp parse_uri(%{valid?: false} = c), do: c + + defp parse_uri(changeset) do + redirect_to = get_change(changeset, :redirect_to) + + if redirect_to do + case URI.new(redirect_to) do + {:ok, uri} -> put_change(changeset, :redirect_to, uri) + {:error, reason} -> add_error(changeset, :redirect_to, "Invalid URI: #{reason}") + end + else + changeset + end + end +end diff --git a/lib/supabase/go_true/schemas/sign_in_with_password.ex b/lib/supabase/go_true/schemas/sign_in_with_password.ex new file mode 100644 index 0000000..cae15c8 --- /dev/null +++ b/lib/supabase/go_true/schemas/sign_in_with_password.ex @@ -0,0 +1,46 @@ +defmodule Supabase.GoTrue.Schemas.SignInWithPassword do + @moduledoc false + + use Ecto.Schema + + import Ecto.Changeset + + @primary_key false + embedded_schema do + field(:email, :string) + field(:phone, :string) + field(:password, :string) + + embeds_one :options, Options, primary_key: false do + field(:data, :map) + field(:captcha_token, :string) + end + end + + def to_sign_in_params(%__MODULE__{} = signin) do + Map.take(signin, [:email, :phone, :password]) + end + + def parse(attrs) do + %__MODULE__{} + |> cast(attrs, ~w[email phone password]a) + |> cast_embed(:options, with: &options_changeset/2, required: false) + |> validate_required([:password]) + |> maybe_put_default_options() + |> apply_action(:parse) + end + + defp maybe_put_default_options(%{valid?: false} = c), do: c + + defp maybe_put_default_options(changeset) do + if get_embed(changeset, :options) do + changeset + else + put_embed(changeset, :options, %__MODULE__.Options{}) + end + end + + defp options_changeset(options, attrs) do + cast(options, attrs, ~w[email_redirect_to data captcha_token]a) + end +end diff --git a/lib/supabase/go_true/schemas/sign_up_request.ex b/lib/supabase/go_true/schemas/sign_up_request.ex new file mode 100644 index 0000000..035a24d --- /dev/null +++ b/lib/supabase/go_true/schemas/sign_up_request.ex @@ -0,0 +1,50 @@ +defmodule Supabase.GoTrue.Schemas.SignUpRequest do + @moduledoc false + + use Ecto.Schema + + import Ecto.Changeset + + alias Supabase.GoTrue.Schemas.SignUpWithPassword + + @required_fields ~w[password]a + @optional_fields ~w[email phone data code_challenge code_challenge_method]a + + @derive Jason.Encoder + @primary_key false + embedded_schema do + field(:email, :string) + field(:phone, :string) + field(:password, :string) + field(:data, :map, default: %{}) + field(:code_challenge, :string) + field(:code_challenge_method, :string) + + embeds_one :gotrue_meta_security, GoTrueMetaSecurity, primary_key: false do + @derive Jason.Encoder + field(:captcha_token, :string) + end + end + + def changeset(signup \\ %__MODULE__{}, attrs, go_true_meta) do + signup + |> cast(attrs, @required_fields ++ @optional_fields) + |> put_embed(:gotrue_meta_security, go_true_meta) + |> validate_required(@required_fields) + |> apply_action(:insert) + end + + def create(%SignUpWithPassword{} = signup) do + attrs = SignUpWithPassword.to_sign_up_params(signup) + go_true_meta = %__MODULE__.GoTrueMetaSecurity{captcha_token: signup.options.captcha_token} + + changeset(attrs, go_true_meta) + end + + def create(%SignUpWithPassword{} = signup, code_challenge, code_method) do + attrs = SignUpWithPassword.to_sign_up_params(signup, code_challenge, code_method) + go_true_meta = %__MODULE__.GoTrueMetaSecurity{captcha_token: signup.options.captcha_token} + + changeset(attrs, go_true_meta) + end +end diff --git a/lib/supabase/go_true/schemas/sign_up_with_password.ex b/lib/supabase/go_true/schemas/sign_up_with_password.ex new file mode 100644 index 0000000..1482765 --- /dev/null +++ b/lib/supabase/go_true/schemas/sign_up_with_password.ex @@ -0,0 +1,98 @@ +defmodule Supabase.GoTrue.Schemas.SignUpWithPassword do + @moduledoc false + + use Ecto.Schema + + import Ecto.Changeset + + @type options :: %__MODULE__.Options{ + email_redirect_to: URI.t() | nil, + data: map | nil, + captcha_token: String.t() | nil + } + + @type t :: %__MODULE__{ + email: String.t() | nil, + password: String.t(), + phone: String.t() | nil, + options: list(options) | nil + } + + @derive Jason.Encoder + @primary_key false + embedded_schema do + field(:email, :string) + field(:password, :string) + field(:phone, :string) + + embeds_one :options, Options, primary_key: false do + field(:email_redirect_to, :map) + field(:data, :map) + field(:captcha_token, :string) + end + end + + def to_sign_up_params(%__MODULE__{} = signup) do + Map.take(signup, [:email, :password, :phone]) + end + + def to_sign_up_params(%__MODULE__{} = signup, code_challenge, code_method) do + signup + |> to_sign_up_params() + |> Map.merge(%{code_challange: code_challenge, code_challenge_method: code_method}) + end + + @spec validate(map) :: Ecto.Changeset.t() + def validate(attrs) do + %__MODULE__{} + |> cast(attrs, [:email, :password, :phone]) + |> cast_embed(:options, with: &options_changeset/2, required: false) + |> maybe_put_default_options() + |> validate_email_or_phone() + |> validate_required([:password]) + end + + defp maybe_put_default_options(%{valid?: false} = c), do: c + + defp maybe_put_default_options(changeset) do + if get_embed(changeset, :options) do + changeset + else + put_embed(changeset, :options, %__MODULE__.Options{}) + end + end + + defp options_changeset(options, attrs) do + cast(options, attrs, ~w[email_redirect_to data captcha_token]a) + end + + defp validate_email_or_phone(changeset) do + email = get_change(changeset, :email) + phone = get_change(changeset, :phone) + + case {email, phone} do + {nil, nil} -> + changeset + |> add_error(:email, "or phone can't be blank") + |> add_error(:phone, "or email can't be blank") + + {email, nil} when is_binary(email) -> + changeset + + {nil, phone} when is_binary(phone) -> + changeset + + {email, phone} when is_binary(email) and is_binary(phone) -> + changeset + |> add_error(:email, "can't be given with phone") + |> add_error(:phone, "can't be given with email") + end + end + + @spec parse(map) :: {:ok, t} | {:error, Ecto.Changeset.t()} + def parse(attrs) do + attrs + |> validate() + |> apply_action(:parse) + end +end diff --git a/lib/supabase/go_true/session.ex b/lib/supabase/go_true/session.ex new file mode 100644 index 0000000..4f19351 --- /dev/null +++ b/lib/supabase/go_true/session.ex @@ -0,0 +1,46 @@ +defmodule Supabase.GoTrue.Session do + @moduledoc false + + use Ecto.Schema + + import Ecto.Changeset + + alias Supabase.GoTrue.User + + @type t :: %__MODULE__{ + provider_token: String.t() | nil, + provider_refresh_token: String.t() | nil, + access_token: String.t(), + refresh_token: String.t(), + expires_in: integer, + expires_at: NaiveDateTime.t() | nil, + token_type: String.t(), + user: User.t() + } + + @required_fields ~w[access_token refresh_token expires_in token_type]a + @optional_fields ~w[provider_token provider_refresh_token expires_at]a + + @derive Jason.Encoder + @primary_key false + embedded_schema do + field(:provider_token, :string) + field(:provider_refresh_token, :string) + field(:access_token, :string) + field(:refresh_token, :string) + field(:expires_in, :integer) + field(:expires_at, :integer) + field(:token_type, :string) + + embeds_one(:user, User) + end + + @spec parse(map) :: {:ok, t} | {:error, Ecto.Changeset.t()} + def parse(attrs) do + %__MODULE__{} + |> cast(attrs, @required_fields ++ @optional_fields) + |> validate_required(@required_fields) + |> cast_embed(:user, required: false) + |> apply_action(:parse) + end +end diff --git a/lib/supabase/go_true/user.ex b/lib/supabase/go_true/user.ex new file mode 100644 index 0000000..cefe90e --- /dev/null +++ b/lib/supabase/go_true/user.ex @@ -0,0 +1,98 @@ +defmodule Supabase.GoTrue.User do + @moduledoc false + + use Ecto.Schema + + import Ecto.Changeset + + alias Supabase.GoTrue.User.Factor + alias Supabase.GoTrue.User.Identity + + @type t :: %__MODULE__{ + id: Ecto.UUID.t(), + app_metadata: map, + user_metadata: map, + aud: String.t(), + confirmation_sent_at: NaiveDateTime.t() | nil, + recovery_sent_at: NaiveDateTime.t() | nil, + email_change_sent_at: NaiveDateTime.t() | nil, + new_email: String.t() | nil, + new_phone: String.t() | nil, + invited_at: NaiveDateTime.t() | nil, + action_link: String.t() | nil, + email: String.t() | nil, + phone: String.t() | nil, + created_at: NaiveDateTime.t(), + confirmed_at: NaiveDateTime.t() | nil, + email_confirmed_at: NaiveDateTime.t() | nil, + phone_confirmed_at: NaiveDateTime.t() | nil, + last_sign_in_at: NaiveDateTime.t() | nil, + role: String.t() | nil, + updated_at: NaiveDateTime.t() | nil, + identities: list(Identity) | nil, + factors: list(Factor) | nil + } + + @required_fields ~w[id app_metadata app_metadata aud created_at]a + @optional_fields ~w[confirmation_sent_at recovery_sent_at email_change_sent_at new_email new_phone invited_at action_link email phone confirmed_at email_confirmed_at phone_confirmed_at last_sign_in_at role]a + + @primary_key {:id, :binary_id, autogenerate: false} + embedded_schema do + field(:app_metadata, :map) + field(:user_metadata, :map) + field(:aud, :string) + field(:confirmation_sent_at, :naive_datetime) + field(:recovery_sent_at, :naive_datetime) + field(:email_change_sent_at, :naive_datetime) + field(:new_email, :string) + field(:new_phone, :string) + field(:invited_at, :naive_datetime) + field(:action_link, :string) + field(:email, :string) + field(:phone, :string) + field(:confirmed_at, :naive_datetime) + field(:email_confirmed_at, :naive_datetime) + field(:phone_confirmed_at, :naive_datetime) + field(:last_sign_in_at, :naive_datetime) + field(:role, :string) + + embeds_many(:factors, Supabase.GoTrue.User.Factor) + embeds_many(:identities, Supabase.GoTrue.User.Identity) + + timestamps(inserted_at: :created_at) + end + + def changeset(user \\ %__MODULE__{}, attrs) do + user + |> cast(attrs, @required_fields ++ @optional_fields) + |> validate_required(@required_fields) + |> cast_embed(:identities, required: true) + |> cast_embed(:factors, required: false) + end + + def multiple_changeset(user \\ %__MODULE__{}, attrs) do + user + |> cast(attrs, @required_fields ++ @optional_fields) + |> validate_required(@required_fields) + end + + def parse(attrs) do + attrs + |> changeset() + |> apply_action(:parse) + end + + def parse_list(list_attrs) do + results = + Enum.reduce_while(list_attrs, [], fn attrs, acc -> + changeset = multiple_changeset(attrs) + + case result = apply_action(changeset, :parse) do + {:ok, user} -> {:cont, [user | acc]} + {:error, _} -> {:halt, result} + end + end) + + if is_list(results), do: {:ok, results}, else: results + end +end diff --git a/lib/supabase/go_true/user/factor.ex b/lib/supabase/go_true/user/factor.ex new file mode 100644 index 0000000..04cc512 --- /dev/null +++ b/lib/supabase/go_true/user/factor.ex @@ -0,0 +1,32 @@ +defmodule Supabase.GoTrue.User.Factor do + @moduledoc false + + use Ecto.Schema + + import Ecto.Changeset + + @type t :: %__MODULE__{ + id: Ecto.UUID.t(), + friendly_name: String.t() | nil, + factor_type: :totp, + status: :verified | :unverified, + created_at: NaiveDateTime.t(), + updated_at: NaiveDateTime.t() + } + + @derive Jason.Encoder + @primary_key {:id, :binary_id, autogenerate: false} + embedded_schema do + field(:friendly_name, :string) + field(:factor_type, Ecto.Enum, values: ~w[totp]a) + field(:status, Ecto.Enum, values: ~w[verified unverified]a) + + timestamps(inserted_at: :created_at) + end + + def changeset(factor \\ %__MODULE__{}, attrs) do + factor + |> cast(attrs, ~w[id friendly_name factor_type status created_at updated_at]a) + |> validate_required(~w[id factor_type status created_at updated_at]a) + end +end diff --git a/lib/supabase/go_true/user/identity.ex b/lib/supabase/go_true/user/identity.ex new file mode 100644 index 0000000..6b02ef6 --- /dev/null +++ b/lib/supabase/go_true/user/identity.ex @@ -0,0 +1,45 @@ +defmodule Supabase.GoTrue.User.Identity do + @moduledoc false + + use Ecto.Schema + + import Ecto.Changeset + + @type t :: %__MODULE__{ + id: Ecto.UUID.t(), + user_id: Ecto.UUID.t(), + provider: providers, + created_at: NaiveDateTime.t(), + updated_at: NaiveDateTime.t(), + identity_data: map, + last_sign_in_at: NaiveDateTime.t() | nil + } + + @providers ~w[apple azure bitbucket discord email facebook figma github gitlab google kakao keycloak linkedin linkedin_oidc notion phone slack spotify twitch twitter workos zoom fly]a + + @type providers :: + unquote(@providers |> Enum.map_join(" | ", &inspect/1) |> Code.string_to_quoted!()) + + @required_fields ~w[id provider]a + @optional_fields ~w[identity_data last_sign_in_at created_at updated_at user_id]a + + @derive Jason.Encoder + @primary_key {:id, :binary_id, autogenerate: false} + embedded_schema do + field(:identity_data, :map) + field(:provider, Ecto.Enum, values: @providers) + field(:last_sign_in_at, :naive_datetime) + + belongs_to(:user, Supabase.GoTrue.User, type: :binary_id) + + timestamps(inserted_at: :created_at) + end + + def changeset(identifty \\ %__MODULE__{}, attrs) do + identifty + |> cast(attrs, @required_fields ++ @optional_fields) + |> validate_required(@required_fields) + end + + def providers, do: @providers +end diff --git a/lib/supabase/go_true/user_handler.ex b/lib/supabase/go_true/user_handler.ex new file mode 100644 index 0000000..26d6e6f --- /dev/null +++ b/lib/supabase/go_true/user_handler.ex @@ -0,0 +1,127 @@ +defmodule Supabase.GoTrue.UserHandler do + @moduledoc false + + alias Supabase.Client + alias Supabase.Fetcher + alias Supabase.GoTrue.PKCE + alias Supabase.GoTrue.Schemas.SignInRequest + alias Supabase.GoTrue.Schemas.SignInWithIdToken + alias Supabase.GoTrue.Schemas.SignInWithOauth + alias Supabase.GoTrue.Schemas.SignInWithPassword + alias Supabase.GoTrue.Schemas.SignUpRequest + alias Supabase.GoTrue.Schemas.SignUpWithPassword + alias Supabase.GoTrue.User + + @single_user_uri "/user" + @sign_in_uri "/token" + @sign_up_uri "/signup" + @oauth_uri "/authorize" + @sso_uri "/sso" + + def get_user(%Client{} = client, access_token) do + headers = Fetcher.apply_client_headers(client, access_token) + + client + |> Client.retrieve_auth_url(@single_user_uri) + |> Fetcher.get(nil, headers, resolve_json: true) + end + + def sign_in_with_sso(%Client{} = client, %{} = signin) when client.auth.flow_type == :pkce do + {challenge, method} = generate_pkce() + + with {:ok, request} <- %{}, + headers = Fetcher.apply_client_headers(client), + endpoint = Client.retrieve_auth_url(client, @sso_uri), + {:ok, response} <- Fetcher.post(endpoint, request, headers) do + {:ok, response["data"]["url"]} + end + end + + def sign_in_with_sso(%Client{} = client, %{} = signin) do + with {:ok, request} <- %{}, + headers = Fetcher.apply_client_headers(client), + endpoint = Client.retrieve_auth_url(client, @sso_uri), + {:ok, response} <- Fetcher.post(endpoint, request, headers) do + {:ok, response["data"]["url"]} + end + end + + @grant_types ~w[password id_token] + + def sign_in_with_password(%Client{} = client, %SignInWithPassword{} = signin) do + with {:ok, request} <- SignInRequest.create(signin) do + sign_in_request(client, request, "password") + end + end + + def sign_in_with_id_token(%Client{} = client, %SignInWithIdToken{} = signin) do + with {:ok, request} <- SignInRequest.create(signin) do + sign_in_request(client, request, "id_token") + end + end + + defp sign_in_request(%Client{} = client, %SignInRequest{} = request, grant_type) + when grant_type in @grant_types do + query = URI.encode_query(%{grant_type: grant_type}) + headers = Fetcher.apply_client_headers(client) + + client + |> Client.retrieve_auth_url(@sign_in_uri) + |> URI.append_query(query) + |> Fetcher.post(request, headers) + end + + def sign_up(%Client{} = client, %SignUpWithPassword{} = signup) + when client.auth.flow_type == :pkce do + {challenge, method} = generate_pkce() + + with {:ok, request} <- SignUpRequest.create(signup, challenge, method), + headers = Fetcher.apply_client_headers(client), + endpoint = Client.retrieve_auth_url(client, @sign_up_uri), + {:ok, response} <- Fetcher.post(endpoint, request, headers), + {:ok, user} <- User.parse(response) do + {:ok, user, challenge} + end + end + + def sign_up(%Client{} = client, %SignUpWithPassword{} = signup) do + with {:ok, request} <- SignUpRequest.create(signup), + headers = Fetcher.apply_client_headers(client), + endpoint = Client.retrieve_auth_url(client, @sign_up_uri), + {:ok, response} <- Fetcher.post(endpoint, request, headers) do + User.parse(response) + end + end + + def get_url_for_provider(%Client{} = client, %SignInWithOauth{} = oauth) + when client.auth.flow_type == :pkce do + {challenge, method} = generate_pkce() + pkce_query = %{code_challenge: challenge, code_challenge_method: method} + oauth_query = SignInWithOauth.options_to_query(oauth) + + client + |> Client.retrieve_auth_url(@oauth_uri) + |> append_query(Map.merge(pkce_query, oauth_query)) + end + + def get_url_for_provider(%Client{} = client, %SignInWithOauth{} = oauth) do + oauth_query = SignInWithOauth.options_to_query(oauth) + + client + |> Client.retrieve_auth_url(@oauth_uri) + |> append_query(oauth_query) + end + + defp append_query(%URI{} = uri, query) do + query = Map.filter(query, &(not is_nil(elem(&1, 1)))) + encoded = URI.encode_query(query) + URI.append_query(uri, encoded) + end + + defp generate_pkce do + verifier = PKCE.generate_verifier() + challenge = PKCE.generate_challenge(verifier) + method = if verifier == challenge, do: "plain", else: "s256" + {challenge, method} + end +end diff --git a/lib/supabase/go_true_behaviour.ex b/lib/supabase/go_true_behaviour.ex new file mode 100644 index 0000000..349977d --- /dev/null +++ b/lib/supabase/go_true_behaviour.ex @@ -0,0 +1,24 @@ +defmodule Supabase.GoTrueBehaviour do + @moduledoc false + + alias Supabase.Client + alias Supabase.GoTrue.Schemas.SignInWithIdToken + alias Supabase.GoTrue.Schemas.SignInWithOauth + alias Supabase.GoTrue.Schemas.SignInWithPassword + alias Supabase.GoTrue.Schemas.SignUpWithPassword + alias Supabase.GoTrue.Session + alias Supabase.GoTrue.User + + @type sign_in_response :: + {:ok, Session.t()} + | {:error, :invalid_grant} + | {:error, {:invalid_grant, :invalid_credentials}} + + @callback get_user(Client.client(), Session.t()) :: {:ok, User.t()} | {:error, atom} + @callback sign_in_with_oauth(Client.client(), SignInWithOauth.t()) :: {:ok, atom, URI.t()} + @callback sign_in_with_id_token(Client.client(), SignInWithIdToken.t()) :: sign_in_response + @callback sign_in_with_password(Client.client(), SignInWithPassword.t()) :: + sign_in_response + @callback sign_up(Client.client(), SignUpWithPassword.t()) :: + {:ok, User.t(), binary} | {:error, atom} +end diff --git a/lib/supabase/missing_supabase_config.ex b/lib/supabase/missing_supabase_config.ex new file mode 100644 index 0000000..1aff82a --- /dev/null +++ b/lib/supabase/missing_supabase_config.ex @@ -0,0 +1,60 @@ +defmodule Supabase.MissingSupabaseConfig do + defexception [:message] + + @impl true + def exception(config) do + message = """ + Missing #{if config == :key, do: "API Key", else: "Base URL"} configuration for Supabase.Fetch + Please ensure or add the following to your config/runtime.exs file: + + import Config + + config :supabase, + supabase_url: System.fetch_env!("SUPABASE_BASE_URL"), + supabase_key: System.fetch_env!("SUPABASE_API_KEY"), + + Remember to set the environment variables SUPABASE_BASE_URL and SUPABASE_API_KEY + if you choose this option. Otherwise you can pass the values directly to the config file. + + Alternatively you can pass the values directly to the `Supabase.Client.init_client!/1` function: + + iex> Supabase.init_client!(%{ + conn: %{ + base_url: System.fetch_env!("SUPABASE_BASE_URL"), + api_key: System.fetch_env!("SUPABASE_API_KEY") + } + }) + + #{if config == :key, do: missing_key_config_walktrough(), else: missing_url_config_walktrough()} + """ + + %__MODULE__{message: message} + end + + defp missing_url_config_walktrough do + """ + You can find your Supabase base URL in the Settings page of your project. + Firstly select your project from the initial Dashboard. + On the left sidebar, click on the Settings icon, then select API. + The base URL is the first field on the page. + """ + end + + defp missing_key_config_walktrough do + """ + You can find your Supabase API key in the Settings page of your project. + Firstly select your project from the initial Dashboard. + On the left sidebar, click on the Settings icon, then select API. + The API key is the second field on the page. + + There two types of API keys, the public and the private. The last one + bypass any Row Level Security (RLS) rules you have set up. + So you shouldn't use it in your frontend application. + + If you don't know what RLS is, you can read more about it here: + https://supabase.com/docs/guides/auth/row-level-security + + For most cases you should prefer to use the public "anon" Key. + """ + end +end diff --git a/lib/supabase/storage.ex b/lib/supabase/storage.ex new file mode 100644 index 0000000..4f53ebc --- /dev/null +++ b/lib/supabase/storage.ex @@ -0,0 +1,636 @@ +defmodule Supabase.Storage do + @moduledoc """ + Supabase.Storage Elixir Package + + This module provides integration with the Supabase Storage API, enabling developers + to perform a multitude of operations related to buckets and objects with ease. + + ## Features + + 1. **Bucket Operations**: Methods that allow the creation, listing, and removal of buckets. + 2. **Object Operations**: Functions designed to upload, download, retrieve object information, + and perform move, copy, and remove actions on objects. + + ## Usage + + You can start by creating or managing buckets: + + Supabase.Storage.create_bucket(client, "my_new_bucket") + + Once a bucket is set up, objects within the bucket can be managed: + + Supabase.Storage.upload_object(client, "my_bucket", "path/on/server.png", "path/on/local.png") + + ## Examples + + Here are some basic examples: + + # Removing an object + Supabase.Storage.remove_object(client, "my_bucket", "path/on/server.png") + + # Moving an object + Supabase.Storage.move_object(client, "my_bucket", "path/on/server1.png", "path/on/server2.png") + + Ensure to refer to method-specific documentation for detailed examples and explanations. + + ## Permissions + + Do remember to check and set the appropriate permissions in Supabase to make sure that the + operations can be performed without any hitches. + """ + + import Supabase.Client, only: [is_client: 1] + + alias Supabase.Client + alias Supabase.Client.Conn + alias Supabase.Storage.Bucket + alias Supabase.Storage.BucketHandler + alias Supabase.Storage.Object + alias Supabase.Storage.ObjectHandler + alias Supabase.Storage.ObjectOptions + alias Supabase.Storage.SearchOptions + + @behaviour Supabase.StorageBehaviour + + @doc """ + Retrieves information about all buckets in the current project. + + ## Notes + + * Policy permissions required + * `buckets` permissions: `select` + * `objects` permissions: none + + ## Examples + + iex> Supabase.Storage.list_buckets(client) + {:ok, [%Supabase.Storage.Bucket{...}, ...]} + + iex> Supabase.Storage.list_buckets(invalid_conn) + {:error, reason} + + """ + @impl true + def list_buckets(client) when is_client(client) do + case Client.retrieve_connection(client) do + nil -> + {:error, :invalid_client} + + %Conn{access_token: token, api_key: api_key, base_url: base_url} -> + {:ok, BucketHandler.list(base_url, api_key, token)} + end + end + + @doc """ + Retrieves information about a bucket in the current project. + + ## Notes + + * Policy permissions required + * `buckets` permissions: `select` + * `objects` permissions: none + + ## Examples + + iex> Supabase.Storage.retrieve_bucket_info(client, "avatars") + {:ok, %Supabase.Storage.Bucket{...}} + + iex> Supabase.Storage.retrieve_bucket_info(invalid_conn, "avatars") + {:error, reason} + + """ + @impl true + def retrieve_bucket_info(client, id) when is_client(client) do + case Client.retrieve_connection(client) do + nil -> + {:error, :invalid_client} + + %Conn{access_token: token, api_key: api_key, base_url: base_url} -> + BucketHandler.retrieve_info(base_url, api_key, token, id) + end + end + + @doc """ + Creates a new bucket in the current project given a map of attributes. + + ## Attributes + + * `id`: the id of the bucket to be created, required + * `name`: the name of the bucket to be created, defaults to the `id` provided + * `file_size_limit`: the maximum size of a file in bytes + * `allowed_mime_types`: a list of allowed mime types, defaults to allow all MIME types + * `public`: whether the bucket is public or not, defaults to `false` + + ## Notes + + * Policy permissions required + * `buckets` permissions: `insert` + * `objects` permissions: none + + ## Examples + + iex> Supabase.Storage.create_bucket(client, %{id: "avatars"}) + {:ok, %Supabase.Storage.Bucket{...}} + + iex> Supabase.Storage.create_bucket(invalid_conn, %{id: "avatars"}) + {:error, reason} + + """ + @impl true + def create_bucket(client, attrs) when is_client(client) do + with {:ok, bucket_params} <- Bucket.create_changeset(attrs), + %Conn{access_token: token, api_key: api_key, base_url: base_url} <- + Client.retrieve_connection(client), + {:ok, _} <- BucketHandler.create(base_url, api_key, token, bucket_params) do + retrieve_bucket_info(client, bucket_params.id) + else + nil -> + {:error, :invalid_client} + + {:error, changeset} -> + {:error, changeset} + end + end + + @doc """ + Updates a bucket in the current project given a map of attributes. + + ## Attributes + + * `file_size_limit`: the maximum size of a file in bytes + * `allowed_mime_types`: a list of allowed mime types, defaults to allow all MIME types + * `public`: whether the bucket is public or not, defaults to `false` + + Isn't possible to update a bucket's `id` or `name`. If you want or need this, you should + firstly delete the bucket and then create a new one. + + ## Notes + + * Policy permissions required + * `buckets` permissions: `update` + * `objects` permissions: none + + ## Examples + + iex> Supabase.Storage.update_bucket(client, bucket, %{public: true}) + {:ok, %Supabase.Storage.Bucket{...}} + + iex> Supabase.Storage.update_bucket(invalid_conn, bucket, %{public: true}) + {:error, reason} + + """ + @impl true + def update_bucket(client, bucket, attrs) when is_client(client) do + with {:ok, bucket_params} <- Bucket.update_changeset(bucket, attrs), + %Conn{access_token: token, api_key: api_key, base_url: base_url} <- + Client.retrieve_connection(client), + {:ok, _} <- BucketHandler.update(base_url, api_key, token, bucket.id, bucket_params) do + retrieve_bucket_info(client, bucket.id) + else + nil -> + {:error, :invalid_client} + + {:error, changeset} -> + {:error, changeset} + end + end + + @doc """ + Empties a bucket in the current project. This action deletes all objects in the bucket. + + ## Notes + + * Policy permissions required + * `buckets` permissions: `update` + * `objects` permissions: `delete` + + ## Examples + + iex> Supabase.Storage.empty_bucket(client, bucket) + {:ok, :emptied} + + iex> Supabase.Storage.empty_bucket(invalid_conn, bucket) + {:error, reason} + + """ + @impl true + def empty_bucket(client, %Bucket{} = bucket) when is_client(client) do + case Client.retrieve_connection(client) do + nil -> + {:error, :invalid_client} + + %Conn{access_token: token, api_key: api_key, base_url: base_url} -> + BucketHandler.empty(base_url, api_key, token, bucket.id) + end + end + + @doc """ + Deletes a bucket in the current project. Notice that this also deletes all objects in the bucket. + + ## Notes + + * Policy permissions required + * `buckets` permissions: `delete` + * `objects` permissions: `delete` + + ## Examples + + iex> Supabase.Storage.delete_bucket(client, bucket) + {:ok, :deleted} + + iex> Supabase.Storage.delete_bucket(invalid_conn, bucket) + {:error, reason} + + """ + @impl true + def delete_bucket(client, %Bucket{} = bucket) when is_client(client) do + with %Conn{access_token: token, api_key: api_key, base_url: base_url} <- + Client.retrieve_connection(client), + {:ok, _} <- BucketHandler.delete(base_url, api_key, token, bucket.id) do + {:ok, :deleted} + else + nil -> + {:error, :invalid_client} + + {:error, changeset} -> + {:error, changeset} + end + end + + @doc """ + Removes an object from a bucket in the current project. + + ## Notes + + * Policy permissions required + * `buckets` permissions: none + * `objects` permissions: `delete` + + ## Examples + + iex> Supabase.Storage.remove_object(client, bucket, object) + {:ok, :deleted} + + iex> Supabase.Storage.remove_object(invalid_conn, bucket, object) + {:error, reason} + + """ + @impl true + def remove_object(client, %Bucket{} = bucket, %Object{} = object) when is_client(client) do + case Client.retrieve_connection(client) do + nil -> + {:error, :invalid_client} + + %Conn{access_token: token, api_key: api_key, base_url: base_url} -> + ObjectHandler.remove(base_url, api_key, token, bucket.name, object.path) + end + end + + @doc """ + Moves a object from a bucket and send it to another bucket, in the current project. + Notice that isn't necessary to pass the current bucket, because the object already + contains this information. + + ## Notes + + * Policy permissions required + * `buckets` permissions: none + * `objects` permissions: `delete` and `create` + + ## Examples + + iex> Supabase.Storage.move_object(client, bucket, object) + {:ok, :moved} + + iex> Supabase.Storage.move_object(invalid_conn, bucket, object) + {:error, reason} + + """ + @impl true + def move_object(client, %Bucket{} = bucket, %Object{} = object, to) when is_client(client) do + case Client.retrieve_connection(client) do + nil -> + {:error, :invalid_client} + + %Conn{access_token: token, api_key: api_key, base_url: base_url} -> + ObjectHandler.move(base_url, api_key, token, bucket.name, object.path, to) + end + end + + @doc """ + Copies a object from a bucket and send it to another bucket, in the current project. + Notice that isn't necessary to pass the current bucket, because the object already + contains this information. + + ## Notes + + * Policy permissions required + * `buckets` permissions: none + * `objects` permissions: `create` + + ## Examples + + iex> Supabase.Storage.copy_object(client, bucket, object) + {:ok, :copied} + + iex> Supabase.Storage.copy_object(invalid_conn, bucket, object) + {:error, reason} + + """ + @impl true + def copy_object(client, %Bucket{} = bucket, %Object{} = object, to) when is_client(client) do + case Client.retrieve_connection(client) do + nil -> + {:error, :invalid_client} + + %Conn{access_token: token, api_key: api_key, base_url: base_url} -> + ObjectHandler.copy(base_url, api_key, token, bucket.name, object.path, to) + end + end + + @doc """ + Retrieves information about an object in a bucket in the current project. + + ## Notes + + * Policy permissions required + * `buckets` permissions: none + * `objects` permissions: `select` + + ## Examples + + iex> Supabase.Storage.retrieve_object_info(client, bucket, "some.png") + {:ok, %Supabase.Storage.Object{...}} + + iex> Supabase.Storage.retrieve_object_info(invalid_conn, bucket, "some.png") + {:error, reason} + + """ + @impl true + def retrieve_object_info(client, %Bucket{} = bucket, wildcard) when is_client(client) do + case Client.retrieve_connection(client) do + nil -> + {:error, :invalid_client} + + %Conn{access_token: token, api_key: api_key, base_url: base_url} -> + ObjectHandler.get_info(base_url, api_key, token, bucket.name, wildcard) + end + end + + @doc """ + Lists a set of objects in a bucket in the current project. + + ## Searching + + You can pass a prefix to filter the objects returned. For example, if you have the following + objects in your bucket: + + . + └── bucket/ + ├── avatars/ + │ └── some.png + ├── other.png + └── some.pdf + + And you want to list only the objects inside the `avatars` folder, you can do: + + iex> Supabase.Storage.list_objects(client, bucket, "avatars/") + {:ok, [%Supabase.Storage.Object{...}]} + + Also you can pass some search options as a `Supabase.Storage.SearchOptions` struct. Available + options are: + + * `limit`: the maximum number of objects to return + * `offset`: the number of objects to skip + * `sort_by`: + * `column`: the column to sort by, defaults to `created_at` + * `order`: the order to sort by, defaults to `desc` + + ## Notes + + * Policy permissions required + * `buckets` permissions: none + * `objects` permissions: `select` + + ## Examples + + iex> Supabase.Storage.list_objects(client, bucket) + {:ok, [%Supabase.Storage.Object{...}, ...]} + + iex> Supabase.Storage.list_objects(invalid_conn, bucket) + {:error, reason} + + """ + @impl true + def list_objects(client, %Bucket{} = bucket, prefix \\ "", opts \\ %SearchOptions{}) + when is_client(client) do + case Client.retrieve_connection(client) do + nil -> + {:error, :invalid_client} + + %Conn{access_token: token, api_key: api_key, base_url: base_url} -> + ObjectHandler.list(base_url, api_key, token, bucket.name, prefix, opts) + end + end + + @doc """ + Uploads a file to a bucket in the current project. Notice that you only need to + pass the path to the file you want to upload, as the file will be read in a stream way + to be sent to the server. + + ## Options + + You can pass some options as a `Supabase.Storage.ObjectOptions` struct. Available + options are: + + * `cache_control`: the cache control header value, defaults to `3600` + * `content_type`: the content type header value, defaults to `text/plain;charset=UTF-8` + * `upsert`: whether to overwrite the object if it already exists, defaults to `false` + + ## Notes + + * Policy permissions required + * `buckets` permissions: none + * `objects` permissions: `insert` + + ## Examples + + iex> Supabase.Storage.upload_object(client, bucket, "avatars/some.png", "path/to/file.png") + {:ok, %Supabase.Storage.Object{...}} + + iex> Supabase.Storage.upload_object(invalid_conn, bucket, "avatars/some.png", "path/to/file.png") + {:error, reason} + + """ + @impl true + def upload_object(client, %Bucket{} = bucket, path, file, opts \\ %ObjectOptions{}) + when is_client(client) do + case Client.retrieve_connection(client) do + nil -> + {:error, :invalid_client} + + %Conn{access_token: token, api_key: api_key, base_url: base_url} -> + file = Path.expand(file) + ObjectHandler.create_file(base_url, api_key, token, bucket.name, path, file, opts) + end + end + + @doc """ + Downloads an object from a bucket in the current project. That return a binary that + represents the object content. + + ## Notes + + * Policy permissions required + * `buckets` permissions: none + * `objects` permissions: `select` + + ## Examples + + iex> Supabase.Storage.download_object(client, %Bucket{}, "avatars/some.png") + {:ok, <<>>} + + iex> Supabase.Storage.download_object(invalid_conn, %Bucket{}, "avatars/some.png") + {:error, reason} + + """ + @impl true + def download_object(client, %Bucket{} = bucket, wildcard) when is_client(client) do + case Client.retrieve_connection(client) do + nil -> + {:error, :invalid_client} + + %Conn{access_token: token, api_key: api_key, base_url: base_url} -> + ObjectHandler.get(base_url, api_key, token, bucket.name, wildcard) + end + end + + @doc """ + Downloads an object from a bucket in the current project. That return a stream that + represents the object content. Notice that the request to the server is only made + when you start to consume the stream. + + ## Notes + + * Policy permissions required + * `buckets` permissions: none + * `objects` permissions: `select` + + ## Examples + + iex> Supabase.Storage.download_object_lazy(client, %Bucket{}, "avatars/some.png") + {:ok, #Function<59.128620087/2 in Stream.resource/3>} + + iex> Supabase.Storage.download_object_lazy(invalid_conn, %Bucket{}, "avatars/some.png") + {:error, reason} + + """ + @impl true + def download_object_lazy(client, %Bucket{} = bucket, wildcard) when is_client(client) do + case Client.retrieve_connection(client) do + nil -> + {:error, :invalid_client} + + %Conn{access_token: token, api_key: api_key, base_url: base_url} -> + ObjectHandler.get_lazy(base_url, api_key, token, bucket.name, wildcard) + end + end + + @doc """ + Saves an object from a bucket in the current project to a file in the local filesystem. + + ## Notes + + * Policy permissions required + * `buckets` permissions: none + * `objects` permissions: `select` + + ## Examples + + iex> Supabase.Storage.save_object(client, "./some.png", %Bucket{}, "avatars/some.png") + :ok + + iex> Supabase.Storage.save_object(client, "./some.png", %Bucket{}, "do_not_exist.png") + {:error, reason} + + """ + @impl true + def save_object(client, path, %Bucket{} = bucket, wildcard) when is_client(client) do + with {:ok, bin} <- download_object(client, bucket, wildcard) do + File.write(Path.expand(path), bin) + end + end + + @doc """ + Saves an object from a bucket in the current project to a file in the local filesystem. + Notice that the request to the server is only made when you start to consume the stream. + + ## Notes + + * Policy permissions required + * `buckets` permissions: none + * `objects` permissions: `select` + + ## Examples + + iex> Supabase.Storage.save_object_stream(client, "./some.png", %Bucket{}, "avatars/some.png") + :ok + + iex> Supabase.Storage.save_object_stream(client, "./some.png", %Bucket{}, "do_not_exist.png") + {:error, reason} + + """ + @impl true + def save_object_stream(client, path, %Bucket{} = bucket, wildcard) when is_client(client) do + with {:ok, stream} <- download_object_lazy(client, bucket, wildcard) do + fs = File.stream!(Path.expand(path)) + + stream + |> Stream.into(fs) + |> Stream.run() + end + end + + @doc """ + Creates a signed URL for an object in a bucket in the current project. This URL can + be used to perform an HTTP request to the object, without the need of authentication. + Usually this is used to allow users to download objects from a bucket. + + ## Notes + + * Policy permissions required + * `buckets` permissions: none + * `objects` permissions: `select` + + ## Examples + + iex> Supabase.Storage.create_signed_url(client, bucket, "avatars/some.png", 3600) + {:ok, "https://.supabase.co"/object/sign//?token=} + + iex> Supabase.Storage.create_signed_url(invalid_client, bucket, "avatars/some.png", 3600) + {:error, :invalid_client} + + """ + @impl true + def create_signed_url(client, %Bucket{} = bucket, path, expires_in) when is_client(client) do + with %Conn{access_token: token, api_key: api_key, base_url: base_url} <- + Client.retrieve_connection(client), + {:ok, sign_url} <- + ObjectHandler.create_signed_url( + base_url, + api_key, + token, + bucket.name, + path, + expires_in + ) do + {:ok, URI.to_string(URI.merge(base_url, sign_url))} + else + nil -> + {:error, :invalid_client} + + err -> + err + end + end +end diff --git a/lib/supabase/storage/action_error.ex b/lib/supabase/storage/action_error.ex new file mode 100644 index 0000000..811f371 --- /dev/null +++ b/lib/supabase/storage/action_error.ex @@ -0,0 +1,5 @@ +defmodule Supabase.Storage.ActionError do + @moduledoc "Represents an Error on a Supabase Storage Action" + + defexception [:message] +end diff --git a/lib/supabase/storage/application.ex b/lib/supabase/storage/application.ex new file mode 100644 index 0000000..8b83f1e --- /dev/null +++ b/lib/supabase/storage/application.ex @@ -0,0 +1,34 @@ +defmodule Supabase.Storage.Application do + @moduledoc false + + use Application + + @default_cache_size 100 + @default_buckets_reload_interval 60_000 + + @impl true + def start(_type, _args) do + children = [ + if(start_cache?(), do: {Storage.Cache, cache_max_size: cache_max_size()}), + if(start_cache?(), do: {Storage.CacheSupervisor, reload_interval: reload_interval()}) + ] + + opts = [strategy: :one_for_one, name: Supabase.Storage.Supervisor] + + children + |> Enum.reject(&is_nil/1) + |> Supervisor.start_link(opts) + end + + defp cache_max_size do + Application.get_env(:supabase, :storage)[:cache_max_size] || @default_cache_size + end + + defp start_cache? do + Application.get_env(:supabase, :storage)[:cache_buckets?] + end + + defp reload_interval do + Application.get_env(:supabase, :storage)[:reload_interval] || @default_buckets_reload_interval + end +end diff --git a/lib/supabase/storage/bucket.ex b/lib/supabase/storage/bucket.ex new file mode 100644 index 0000000..c362b93 --- /dev/null +++ b/lib/supabase/storage/bucket.ex @@ -0,0 +1,127 @@ +defmodule Supabase.Storage.Bucket do + @moduledoc """ + Represents a Bucket on Supabase Storage. + + This module defines the structure and operations related to a storage bucket on Supabase. + + ## Structure + + A `Bucket` consists of: + + - `id`: The unique identifier for the bucket. + - `name`: The display name of the bucket. + - `owner`: The owner of the bucket. + - `file_size_limit`: The maximum file size allowed in the bucket (in bytes). Can be `nil` for no limit. + - `allowed_mime_types`: List of MIME types permitted in this bucket. Can be `nil` for no restrictions. + - `created_at`: Timestamp indicating when the bucket was created. + - `updated_at`: Timestamp indicating the last update to the bucket. + - `public`: Boolean flag determining if the bucket is publicly accessible or not. + + ## Functions + + - `parse!/1`: Parses and returns a bucket structure. + - `create_changeset/1`: Generates a changeset for creating a bucket. + - `update_changeset/2`: Generates a changeset for updating an existing bucket. + + ## Examples + + ### Parsing a bucket + + bucket_attrs = %{ + id: "bucket_id", + name: "My Bucket", + ... + } + Supabase.Storage.Bucket.parse!(bucket_attrs) + + ### Creating a bucket changeset + + new_bucket_attrs = %{ + id: "new_bucket_id", + ... + } + Supabase.Storage.Bucket.create_changeset(new_bucket_attrs) + + ### Updating a bucket + + existing_bucket = %Supabase.Storage.Bucket{ + id: "existing_bucket_id", + ... + } + updated_attrs = %{ + public: true + } + Supabase.Storage.Bucket.update_changeset(existing_bucket, updated_attrs) + """ + + use Ecto.Schema + + import Ecto.Changeset + + @type t :: %__MODULE__{ + id: String.t(), + name: String.t(), + owner: String.t(), + file_size_limit: integer | nil, + allowed_mime_types: list(String.t()) | nil, + created_at: NaiveDateTime.t(), + updated_at: NaiveDateTime.t(), + public: boolean + } + + @fields ~w(id name created_at updated_at file_size_limit allowed_mime_types public owner)a + @create_fields ~w(id name file_size_limit allowed_mime_types public)a + @update_fields ~w(file_size_limit allowed_mime_types public)a + + @primary_key false + embedded_schema do + field(:id, :string) + field(:name, :string) + field(:owner, :string) + field(:file_size_limit, :integer) + field(:allowed_mime_types, {:array, :string}) + field(:created_at, :naive_datetime) + field(:updated_at, :naive_datetime) + field(:public, :boolean, default: false) + end + + @spec parse!(map) :: t + def parse!(attrs) do + %__MODULE__{} + |> cast(attrs, @fields) + |> apply_action!(:parse) + end + + @spec create_changeset(map) :: {:ok, map} | {:error, Ecto.Changeset.t()} + def create_changeset(attrs) do + %__MODULE__{} + |> cast(attrs, @create_fields) + |> validate_required([:id]) + |> maybe_put_name() + |> apply_action(:create) + |> case do + {:ok, data} -> {:ok, Map.take(data, @create_fields)} + err -> err + end + end + + defp maybe_put_name(changeset) do + if get_change(changeset, :name) do + changeset + else + id = get_change(changeset, :id) + put_change(changeset, :name, id) + end + end + + @spec update_changeset(t, map) :: {:ok, map} | {:error, Ecto.Changeset.t()} + def update_changeset(%__MODULE__{} = bucket, attrs) do + bucket + |> cast(attrs, @update_fields) + |> apply_action(:update) + |> case do + {:ok, data} -> {:ok, Map.take(data, @update_fields)} + err -> err + end + end +end diff --git a/lib/supabase/storage/cache.ex b/lib/supabase/storage/cache.ex new file mode 100644 index 0000000..6152a7d --- /dev/null +++ b/lib/supabase/storage/cache.ex @@ -0,0 +1,97 @@ +defmodule Supabase.Storage.Cache do + @moduledoc """ + Provides caching mechanisms for Supabase Storage Buckets. + + This module acts as a GenServer that offers caching capabilities, especially for bucket-related operations in Supabase Storage. The caching is backed by the `:ets` (Erlang Term Storage) to provide in-memory storage and fast retrieval of cached data. + + ## Features + + - **Bucket Caching**: Store and retrieve buckets by their unique identifier. + - **Cache Flushing**: Clear the cache when necessary. + - **Configurable Cache Size**: Limit the number of items that can be stored in the cache. + + ## Usage + + ### Starting the Cache Server + + Supabase.Storage.Cache.start_link(%{cache_max_size: 200}) + + ### Caching Buckets + + buckets = [%{id: "bucket_1", ...}, %{id: "bucket_2", ...}] + Supabase.Storage.Cache.cache_buckets(buckets) + + ### Retrieving a Cached Bucket by ID + + Supabase.Storage.Cache.find_bucket_by_id("bucket_1") + + ### Clearing the Cache + + Supabase.Storage.Cache.flush() + + ## Implementation Details + + The cache uses the `:ets` module for in-memory storage of buckets. The number of buckets cached is controlled by the `:cache_max_size` option (default: 100). When the cache is close to exceeding its maximum size, older entries are removed to accommodate new ones. + """ + + use GenServer + + ## Client + + def start_link(args) do + GenServer.start_link(__MODULE__, args, name: __MODULE__) + end + + def find_bucket_by_id(id) do + GenServer.call(__MODULE__, {:find_bucket, id: id}) + end + + def cache_buckets(buckets) do + GenServer.cast(__MODULE__, {:cache_buckets, buckets}) + end + + def flush do + GenServer.cast(__MODULE__, :flush) + end + + ## API + + @impl true + def init(args) do + Process.flag(:trap_exit, true) + table = :ets.new(:buckets_cache, [:set, :public, :named_table]) + max_size = Keyword.get(args, :cache_max_size, 100) + {:ok, %{table: table, max_size: max_size, size: 0}} + end + + @impl true + def handle_cast(:flush, table) do + :ets.delete_all_objects(table) + {:noreply, table} + end + + def handle_cast({:cache_buckets, buckets}, state) do + if overflowed_max_size?(state, buckets) do + :ets.delete_all_objects(state.table) + end + + # prefer atomic operations + for bucket <- buckets do + :ets.insert_new(state.table, {bucket.id, bucket}) + end + + {:noreply, %{state | size: length(buckets)}} + end + + defp overflowed_max_size?(state, buckets) do + state.size + length(buckets) > state.max_size + end + + @impl true + def handle_call({:find_bucket, id: id}, _from, state) do + bucket = :ets.lookup_element(state.table, id, 2) + {:reply, bucket, state} + rescue + _ -> {:reply, nil, state} + end +end diff --git a/lib/supabase/storage/cache_reloader.ex b/lib/supabase/storage/cache_reloader.ex new file mode 100644 index 0000000..137ade1 --- /dev/null +++ b/lib/supabase/storage/cache_reloader.ex @@ -0,0 +1,51 @@ +defmodule Supabase.Storage.CacheReloader do + @moduledoc """ + Periodically reloads and updates the bucket cache for Supabase Storage. + + This module acts as a GenServer that schedules periodic tasks to reload and update the cache for Supabase Storage Buckets. It collaborates with the `Supabase.Storage.Cache` to ensure that the cached data remains fresh and updated. + + ## Features + + - **Automatic Cache Reloading**: Periodically reloads the buckets from Supabase Storage and updates the cache. + - **Configurable Reload Interval**: The time interval between successive cache reloads can be specified. + + ## Usage + + ### Starting the CacheReloader Server + + Supabase.Storage.CacheReloader.start_link(%{reload_interval: 2_000}) + + ## Implementation Details + + By default, the reload interval is set to 1 second (`@ttl`). This means the cache will be updated every second with the latest data from Supabase Storage. This interval can be configured during the server start using the `:reload_interval` option. + + The server interacts with `Supabase.Storage.list_buckets/1` to fetch the list of buckets and then updates the cache using `Supabase.Storage.Cache.cache_buckets/1`. + """ + + use GenServer + + alias Supabase.Storage.Cache + + # @ttl 60_000 + @ttl 1_000 + + def start_link(args) do + GenServer.start_link(__MODULE__, args, name: __MODULE__) + end + + @impl true + def init(args) do + Process.flag(:trap_exit, true) + interval = Keyword.get(args, :reload_interval, @ttl) + Process.send_after(self(), :reload, interval) + {:ok, interval} + end + + @impl true + def handle_info(:reload, interval) do + {:ok, buckets} = Supabase.Storage.list_buckets(Supabase.Connection) + :ok = Cache.cache_buckets(buckets) + Process.send_after(self(), :reload, interval) + {:noreply, interval} + end +end diff --git a/lib/supabase/storage/endpoints.ex b/lib/supabase/storage/endpoints.ex new file mode 100644 index 0000000..f3271bf --- /dev/null +++ b/lib/supabase/storage/endpoints.ex @@ -0,0 +1,51 @@ +defmodule Supabase.Storage.Endpoints do + @moduledoc "Defines the Endpoints for the Supabase Storage API" + + def bucket_path do + "/storage/v1/bucket" + end + + def bucket_path_with_id(id) do + "/storage/v1/bucket/#{id}" + end + + def bucket_path_to_empty(id) do + bucket_path_with_id(id) <> "/empty" + end + + def file_upload_url(path) do + "/storage/v1/object/upload/sign/#{path}" + end + + def file_move do + "/storage/v1/object/move" + end + + def file_copy do + "/storage/v1/object/copy" + end + + def file_upload(bucket, path) do + "/storage/v1/object/#{bucket}/#{path}" + end + + def file_info(bucket, wildcard) do + "/storage/v1/object/info/authenticated/#{bucket}/#{wildcard}" + end + + def file_list(bucket) do + "/storage/v1/object/list/#{bucket}" + end + + def file_remove(bucket) do + "/storage/v1/object/#{bucket}" + end + + def file_signed_url(bucket, path) do + "/storage/v1/object/sign/#{bucket}/#{path}" + end + + def file_download(bucket, wildcard) do + "/storage/v1/object/authenticated/#{bucket}/#{wildcard}" + end +end diff --git a/lib/supabase/storage/handlers/bucket_handler.ex b/lib/supabase/storage/handlers/bucket_handler.ex new file mode 100644 index 0000000..02dbe0e --- /dev/null +++ b/lib/supabase/storage/handlers/bucket_handler.ex @@ -0,0 +1,133 @@ +defmodule Supabase.Storage.BucketHandler do + @moduledoc """ + Provides low-level API functions for managing Supabase Storage buckets. + + The `BucketHandler` module offers a collection of functions that directly interact with the Supabase Storage API for managing buckets. This module works closely with the `Supabase.Fetcher` for sending HTTP requests and the `Supabase.Storage.Cache` for caching bucket information. + + ## Features + + - **Bucket Listing**: Fetch a list of all the buckets available in the storage. + - **Bucket Retrieval**: Retrieve detailed information about a specific bucket. + - **Bucket Creation**: Create a new bucket with specified attributes. + - **Bucket Update**: Modify the attributes of an existing bucket. + - **Bucket Emptying**: Empty the contents of a bucket without deleting the bucket itself. + - **Bucket Deletion**: Permanently remove a bucket and its contents. + + ## Caution + + This module provides a low-level interface to Supabase Storage buckets and is designed for internal use by the `Supabase.Storage` module. Direct use is discouraged unless you need to perform custom or unsupported actions that are not available through the higher-level API. Incorrect use can lead to unexpected results or data loss. + + ## Implementation Details + + All functions within the module expect a base URL, API key, and access token as their initial arguments, followed by any additional arguments required for the specific operation. Responses are usually in the form of `{:ok, result}` or `{:error, message}` tuples. + """ + + alias Supabase.Connection, as: Conn + alias Supabase.Fetcher + alias Supabase.Storage.Bucket + alias Supabase.Storage.Cache + alias Supabase.Storage.Endpoints + + @type bucket_id :: String.t() + @type bucket_name :: String.t() + @type create_attrs :: %{ + id: String.t(), + name: String.t(), + file_size_limit: integer | nil, + allowed_mime_types: list(String.t()) | nil, + public: boolean + } + @type update_attrs :: %{ + public: boolean | nil, + file_size_limit: integer | nil, + allowed_mime_types: list(String.t()) | nil + } + + @spec list(Conn.base_url(), Conn.api_key(), Conn.access_token()) :: + {:ok, [Bucket.t()]} | {:error, String.t()} + def list(base_url, api_key, token) do + url = Fetcher.get_full_url(base_url, Endpoints.bucket_path()) + headers = Fetcher.apply_headers(api_key, token) + + url + |> Fetcher.get(headers) + |> case do + {:ok, body} -> {:ok, Enum.map(body, &Bucket.parse!/1)} + {:error, msg} -> {:error, msg} + end + end + + @spec retrieve_info(Conn.base_url(), Conn.api_key(), Conn.access_token(), bucket_id) :: + {:ok, Bucket.t()} | {:error, String.t()} + def retrieve_info(base_url, api_key, token, bucket_id) do + if bucket = Cache.find_bucket_by_id(bucket_id) do + {:ok, bucket} + else + url = Fetcher.get_full_url(base_url, Endpoints.bucket_path_with_id(bucket_id)) + headers = Fetcher.apply_headers(api_key, token) + + url + |> Fetcher.get(headers) + |> case do + {:ok, body} -> {:ok, Bucket.parse!(body)} + {:error, msg} -> {:error, msg} + end + end + end + + @spec create(Conn.base_url(), Conn.api_key(), Conn.access_token(), create_attrs) :: + {:ok, Bucket.t()} | {:error, String.t()} + def create(base_url, api_key, token, attrs) do + url = Fetcher.get_full_url(base_url, Endpoints.bucket_path()) + headers = Fetcher.apply_headers(api_key, token) + + url + |> Fetcher.post(attrs, headers) + |> case do + {:ok, resp} -> {:ok, resp} + {:error, msg} -> {:error, msg} + end + end + + @spec update(Conn.base_url(), Conn.api_key(), Conn.access_token(), bucket_id, update_attrs) :: + {:ok, Bucket.t()} | {:error, String.t()} + def update(base_url, api_key, token, id, attrs) do + url = Fetcher.get_full_url(base_url, Endpoints.bucket_path_with_id(id)) + headers = Fetcher.apply_headers(api_key, token) + + url + |> Fetcher.put(attrs, headers) + |> case do + {:ok, message} -> {:ok, message} + {:error, msg} -> {:error, msg} + end + end + + @spec empty(Conn.base_url(), Conn.api_key(), Conn.access_token(), bucket_id) :: + {:ok, :successfully_emptied} | {:error, String.t()} + def empty(base_url, api_key, token, id) do + url = Fetcher.get_full_url(base_url, Endpoints.bucket_path_to_empty(id)) + headers = Fetcher.apply_headers(api_key, token) + + url + |> Fetcher.post(nil, headers) + |> case do + {:ok, _message} -> {:ok, :successfully_emptied} + {:error, msg} -> {:error, msg} + end + end + + @spec delete(Conn.base_url(), Conn.api_key(), Conn.access_token(), bucket_id) :: + {:ok, String.t()} | {:error, String.t()} + def delete(base_url, api_key, token, id) do + url = Fetcher.get_full_url(base_url, Endpoints.bucket_path_with_id(id)) + headers = Fetcher.apply_headers(api_key, token) + + url + |> Fetcher.delete(nil, headers) + |> case do + {:ok, body} -> {:ok, body} + {:error, msg} -> {:error, msg} + end + end +end diff --git a/lib/supabase/storage/handlers/object_handler.ex b/lib/supabase/storage/handlers/object_handler.ex new file mode 100644 index 0000000..111eda0 --- /dev/null +++ b/lib/supabase/storage/handlers/object_handler.ex @@ -0,0 +1,226 @@ +defmodule Supabase.Storage.ObjectHandler do + @moduledoc """ + A low-level API interface for managing objects within a Supabase bucket. + + ## Responsibilities + + - **File Management**: Create, move, copy, and get information about files in a bucket. + - **Object Listing**: List objects based on certain criteria, like a prefix. + - **Object Removal**: Delete specific objects or a list of objects. + - **URL Management**: Generate signed URLs for granting temporary access to objects. + - **Content Access**: Retrieve the content of an object or stream it. + + ## Usage Warning + + This module is meant for internal use or for developers requiring more control over object management in Supabase. In general, users should work with the higher-level Supabase.Storage API when possible, as it may offer better abstractions and safety mechanisms. + + Directly interfacing with this module bypasses any additional logic the main API might provide. Use it with caution and ensure you understand its operations. + """ + + alias Supabase.Connection, as: Conn + alias Supabase.Fetcher + alias Supabase.Storage.Endpoints + alias Supabase.Storage.Object + alias Supabase.Storage.ObjectOptions, as: Opts + alias Supabase.Storage.SearchOptions, as: Search + + @type bucket_name :: String.t() + @type object_path :: Path.t() + @type file_path :: Path.t() + @type opts :: Opts.t() + @type search_opts :: Search.t() + @type wildcard :: String.t() + @type prefix :: String.t() + + @spec create_file( + Conn.base_url(), + Conn.api_key(), + Conn.access_token(), + bucket_name, + object_path, + file_path, + opts + ) :: + {:ok, Object.t()} | {:error, String.t()} + def create_file(url, api_key, token, bucket, object_path, file_path, %Opts{} = opts) do + url = Fetcher.get_full_url(url, Endpoints.file_upload(bucket, object_path)) + + headers = + Fetcher.apply_headers(api_key, token, [ + {"cache-control", "max-age=#{opts.cache_control}"}, + {"content-type", opts.content_type}, + {"x-upsert", to_string(opts.upsert)} + ]) + + Fetcher.upload(:post, url, file_path, headers) + rescue + File.Error -> {:error, :file_not_found} + end + + @spec move( + Conn.base_url(), + Conn.api_key(), + Conn.access_token(), + bucket_name, + object_path, + object_path + ) :: + {:ok, :moved} | {:error, String.t()} + def move(base_url, api_key, token, bucket_id, path, to) do + url = Fetcher.get_full_url(base_url, Endpoints.file_move()) + headers = Fetcher.apply_headers(api_key, token) + body = %{bucket_id: bucket_id, source_key: path, destination_key: to} + + url + |> Fetcher.post(body, headers) + |> case do + {:ok, _} -> {:ok, :moved} + {:error, msg} -> {:error, msg} + end + end + + @spec copy( + Conn.base_url(), + Conn.api_key(), + Conn.access_token(), + bucket_name, + object_path, + object_path + ) :: + {:ok, :copied} | {:error, String.t()} + def copy(base_url, api_key, token, bucket_id, path, to) do + url = Fetcher.get_full_url(base_url, Endpoints.file_copy()) + headers = Fetcher.apply_headers(api_key, token) + body = %{bucket_id: bucket_id, source_key: path, destination_key: to} + + url + |> Fetcher.post(body, headers) + |> case do + {:ok, _} -> {:ok, :copied} + {:error, msg} -> {:error, msg} + end + end + + @spec get_info( + Conn.base_url(), + Conn.api_key(), + Conn.access_token(), + bucket_name, + wildcard + ) :: + {:ok, Object.t()} | {:error, String.t()} + def get_info(base_url, api_key, token, bucket_name, wildcard) do + url = Fetcher.get_full_url(base_url, Endpoints.file_info(bucket_name, wildcard)) + headers = Fetcher.apply_headers(api_key, token) + + url + |> Fetcher.get(headers) + |> case do + {:ok, data} -> {:ok, Object.parse!(data)} + {:error, msg} -> {:error, msg} + end + end + + @spec list( + Conn.base_url(), + Conn.api_key(), + Conn.access_token(), + bucket_name, + prefix, + search_opts + ) :: + {:ok, [Object.t()]} | {:error, String.t()} + def list(base_url, api_key, token, bucket_name, prefix, %Search{} = opts) do + url = Fetcher.get_full_url(base_url, Endpoints.file_list(bucket_name)) + headers = Fetcher.apply_headers(api_key, token) + body = Map.merge(%{prefix: prefix}, Map.from_struct(opts)) + + url + |> Fetcher.post(body, headers) + |> case do + {:ok, data} -> {:ok, Enum.map(data, &Object.parse!/1)} + {:error, msg} -> {:error, msg} + end + end + + @spec remove( + Conn.base_url(), + Conn.api_key(), + Conn.access_token(), + bucket_name, + object_path + ) :: + {:ok, :deleted} | {:error, String.t()} + def remove(base_url, api_key, token, bucket_name, path) do + remove_list(base_url, api_key, token, bucket_name, [path]) + end + + @spec remove_list( + Conn.base_url(), + Conn.api_key(), + Conn.access_token(), + bucket_name, + list(object_path) + ) :: + {:ok, :deleted} | {:error, String.t()} + def remove_list(base_url, api_key, token, bucket_name, paths) do + url = Fetcher.get_full_url(base_url, Endpoints.file_remove(bucket_name)) + headers = Fetcher.apply_headers(api_key, token) + + url + |> Fetcher.delete(%{prefixes: paths}, headers) + |> case do + {:ok, _} -> {:ok, :deleted} + {:error, msg} -> {:error, msg} + end + end + + @spec create_signed_url( + Conn.base_url(), + Conn.api_key(), + Conn.access_token(), + bucket_name, + object_path, + integer + ) :: + {:ok, String.t()} | {:error, String.t()} + def create_signed_url(base_url, api_key, token, bucket_name, path, expires_in) do + url = Fetcher.get_full_url(base_url, Endpoints.file_signed_url(bucket_name, path)) + headers = Fetcher.apply_headers(api_key, token) + + url + |> Fetcher.post(%{expiresIn: expires_in}, headers) + |> case do + {:ok, data} -> {:ok, data["signedURL"]} + {:error, msg} -> {:error, msg} + end + end + + @spec get(Conn.base_url(), Conn.api_key(), Conn.access_token(), bucket_name, object_path) :: + {:ok, binary} | {:error, String.t()} + def get(base_url, api_key, token, bucket_name, wildcard) do + url = Fetcher.get_full_url(base_url, Endpoints.file_download(bucket_name, wildcard)) + headers = Fetcher.apply_headers(api_key, token) + + url + |> Fetcher.get(headers) + |> case do + {:ok, data} -> {:ok, data} + {:error, msg} -> {:error, msg} + end + end + + @spec get_lazy( + Conn.base_url(), + Conn.api_key(), + Conn.access_token(), + bucket_name, + wildcard + ) :: + {:ok, Stream.t()} | {:error, atom} + def get_lazy(base_url, api_key, token, bucket_name, wildcard) do + url = Fetcher.get_full_url(base_url, Endpoints.file_download(bucket_name, wildcard)) + headers = Fetcher.apply_headers(api_key, token) + Fetcher.stream(url, headers) + end +end diff --git a/lib/supabase/storage/object.ex b/lib/supabase/storage/object.ex new file mode 100644 index 0000000..3ca142d --- /dev/null +++ b/lib/supabase/storage/object.ex @@ -0,0 +1,75 @@ +defmodule Supabase.Storage.Object do + @moduledoc """ + Represents an Object within a Supabase Storage Bucket. + + This module encapsulates the structure and operations related to an object or file stored within a Supabase Storage bucket. + + ## Structure + + An `Object` has the following attributes: + + - `id`: The unique identifier for the object. + - `path`: The path to the object within its storage bucket. + - `bucket_id`: The ID of the bucket that houses this object. + - `name`: The name or title of the object. + - `owner`: The owner or uploader of the object. + - `metadata`: A map containing meta-information about the object (e.g., file type, size). + - `created_at`: Timestamp indicating when the object was first uploaded or created. + - `updated_at`: Timestamp indicating the last time the object was updated. + - `last_accessed_at`: Timestamp of when the object was last accessed or retrieved. + + ## Functions + + - `parse!/1`: Accepts a map of attributes and constructs a structured `Object`. + + ## Examples + + ### Parsing an object + + object_attrs = %{ + id: "obj_id", + path: "/folder/my_file.txt", + bucket_id: "bucket_123", + ... + } + Supabase.Storage.Object.parse!(object_attrs) + """ + + use Ecto.Schema + + import Ecto.Changeset, only: [cast: 3, apply_action!: 2] + + @type t :: %__MODULE__{ + id: String.t(), + path: Path.t(), + bucket_id: String.t(), + name: String.t(), + owner: String.t(), + metadata: map(), + created_at: NaiveDateTime.t(), + updated_at: NaiveDateTime.t(), + last_accessed_at: NaiveDateTime.t() + } + + @fields ~w(id path bucket_id name owner created_at updated_at metadata last_accessed_at)a + + @primary_key false + embedded_schema do + field(:path, :string) + field(:id, :string) + field(:bucket_id, :string) + field(:name, :string) + field(:owner, :string) + field(:metadata, :map) + field(:created_at, :naive_datetime) + field(:updated_at, :naive_datetime) + field(:last_accessed_at, :naive_datetime) + end + + @spec parse!(map) :: t + def parse!(attrs) do + %__MODULE__{} + |> cast(attrs, @fields) + |> apply_action!(:parse) + end +end diff --git a/lib/supabase/storage/object_options.ex b/lib/supabase/storage/object_options.ex new file mode 100644 index 0000000..253c506 --- /dev/null +++ b/lib/supabase/storage/object_options.ex @@ -0,0 +1,57 @@ +defmodule Supabase.Storage.ObjectOptions do + @moduledoc """ + Represents the configurable options for an Object within Supabase Storage. + + This module encapsulates options that can be set or modified for a storage object. These options help in controlling behavior such as caching, content type, and whether to upsert an object. + + ## Structure + + An `ObjectOptions` consists of the following attributes: + + - `cache_control`: Specifies directives for caching mechanisms in both requests and responses. Default is `"3600"`. + - `content_type`: Specifies the media type of the resource or data. Default is `"text/plain;charset=UTF-8"`. + - `upsert`: A boolean that, when set to `true`, will insert the object if it does not exist, or update it if it does. Default is `true`. + + ## Functions + + - `parse!/1`: Accepts a map of attributes and constructs a structured `ObjectOptions`. + + ## Examples + + ### Parsing object options + + options_attrs = %{ + cache_control: "no-cache", + content_type: "application/json", + upsert: false + } + Supabase.Storage.ObjectOptions.parse!(options_attrs) + """ + + use Ecto.Schema + + import Ecto.Changeset, only: [cast: 3, apply_action!: 2] + + @type t :: %__MODULE__{ + cache_control: String.t(), + content_type: String.t(), + upsert: boolean() + } + + @fields ~w(cache_control content_type upsert)a + + @derive Jason.Encoder + @primary_key false + embedded_schema do + field(:cache_control, :string, default: "3600") + field(:content_type, :string, default: "text/plain;charset=UTF-8") + field(:upsert, :boolean, default: true) + end + + @spec parse!(map) :: t + def parse!(attrs) do + %__MODULE__{} + |> cast(attrs, @fields) + |> apply_action!(:parse) + end +end diff --git a/lib/supabase/storage/search_options.ex b/lib/supabase/storage/search_options.ex new file mode 100644 index 0000000..25f8339 --- /dev/null +++ b/lib/supabase/storage/search_options.ex @@ -0,0 +1,60 @@ +defmodule Supabase.Storage.SearchOptions do + @moduledoc """ + Represents the search options for querying objects within Supabase Storage. + + This module encapsulates various options that aid in fetching and sorting storage objects. These options include specifying the limit on the number of results, an offset for pagination, and a sorting directive. + + ## Structure + + A `SearchOptions` consists of the following attributes: + + - `limit`: Specifies the maximum number of results to return. Default is `100`. + - `offset`: Specifies the number of results to skip before starting to fetch the result set. Useful for implementing pagination. Default is `0`. + - `sort_by`: A map that provides a sorting directive. It defines which column should be used for sorting and the order (ascending or descending). Default is `%{column: "name", order: "asc"}`. + + ## Functions + + - `parse!/1`: Accepts a map of attributes and constructs a structured `SearchOptions`. + + ## Examples + + ### Parsing search options + + search_attrs = %{ + limit: 50, + offset: 10, + sort_by: %{column: "created_at", order: "desc"} + } + Supabase.Storage.SearchOptions.parse!(search_attrs) + """ + + use Ecto.Schema + + import Ecto.Changeset, only: [cast: 3, apply_action!: 2] + + @type t :: %__MODULE__{ + limit: integer(), + offset: integer(), + sort_by: %{ + column: String.t(), + order: String.t() + } + } + + @fields ~w(limit offset sort_by)a + + @primary_key false + @derive Jason.Encoder + embedded_schema do + field(:limit, :integer, default: 100) + field(:offset, :integer, default: 0) + field(:sort_by, :map, default: %{column: "name", order: "asc"}) + end + + @spec parse!(map) :: t + def parse!(attrs) do + %__MODULE__{} + |> cast(attrs, @fields) + |> apply_action!(:parse) + end +end diff --git a/lib/supabase/storage_behaviour.ex b/lib/supabase/storage_behaviour.ex new file mode 100644 index 0000000..7dee4fb --- /dev/null +++ b/lib/supabase/storage_behaviour.ex @@ -0,0 +1,43 @@ +defmodule Supabase.StorageBehaviour do + @moduledoc "Defines Supabase Storage Client callbacks" + + alias Supabase.Storage.Bucket + alias Supabase.Storage.Object + alias Supabase.Storage.ObjectOptions, as: Opts + alias Supabase.Storage.SearchOptions, as: Search + + @type conn :: atom + @type reason :: String.t() | atom + @type result(a) :: {:ok, a} | {:error, reason} | {:error, :invalid_client} + + @callback list_buckets(conn) :: result([Bucket.t()]) + @callback retrieve_bucket_info(conn, id) :: result(Bucket.t()) + when id: String.t() + @callback create_bucket(conn, map) :: result(Bucket.t()) + @callback update_bucket(conn, Bucket.t(), map) :: result(Bucket.t()) + @callback empty_bucket(conn, Bucket.t()) :: result(:emptied) + @callback delete_bucket(conn, Bucket.t()) :: result(:deleted) + + @callback remove_object(conn, Bucket.t(), Object.t()) :: result(:deleted) + @callback move_object(conn, Bucket.t(), Object.t(), String.t()) :: result(:moved) + @callback copy_object(conn, Bucket.t(), Object.t(), String.t()) :: result(:copied) + @callback retrieve_object_info(conn, Bucket.t(), String.t()) :: result(Object.t()) + @callback list_objects(conn, Bucket.t(), prefix, Search.t()) :: result([Object.t()]) + when prefix: String.t() + @callback upload_object(conn, Bucket.t(), dest, source, Opts.t()) :: result(Object.t()) + when dest: String.t(), + source: Path.t() + @callback download_object(conn, Bucket.t(), wildcard) :: result(binary) + when wildcard: String.t() + @callback download_object_lazy(conn, Bucket.t(), wildcard) :: result(Stream.t()) + when wildcard: String.t() + @callback save_object(conn, dest, Bucket.t(), wildcard) :: + :ok | {:error, atom} | {:error, :invalid_client} + when wildcard: String.t(), + dest: Path.t() + @callback save_object_stream(conn, dest, Bucket.t(), wildcard) :: + :ok | {:error, atom} | {:error, :invalid_client} + when wildcard: String.t(), + dest: Path.t() + @callback create_signed_url(conn, Bucket.t(), String.t(), integer) :: result(String.t()) +end diff --git a/lib/supabase/types/atom.ex b/lib/supabase/types/atom.ex new file mode 100644 index 0000000..940a082 --- /dev/null +++ b/lib/supabase/types/atom.ex @@ -0,0 +1,30 @@ +defmodule Supabase.Types.Atom do + @moduledoc """ + A custom type for Ecto that allows atoms to be used as fields in schemas. + """ + + use Ecto.ParameterizedType + + @impl true + def type(_), do: :string + + @impl true + def init(_opts) do + [] + end + + @impl true + def cast(v, _opts) when is_atom(v), do: {:ok, v} + + def cast(v, _opts) when is_binary(v), + do: {:ok, Module.concat(Elixir, String.to_existing_atom(v))} + + @impl true + def dump(v, _opts, _) when is_atom(v), do: {:ok, Atom.to_string(v)} + + @impl true + def load(v, _opts, _) when is_binary(v), + do: {:ok, Module.concat(Elixir, String.to_existing_atom(v))} + + def load(v, _opts, _), do: {:ok, v} +end diff --git a/mix.exs b/mix.exs new file mode 100644 index 0000000..1cb28c0 --- /dev/null +++ b/mix.exs @@ -0,0 +1,61 @@ +defmodule SupabaseAuth.MixProject do + use Mix.Project + + @version "0.1.0" + @source_url "https://github.com/zoedsoupe/supabase" + + def project do + [ + app: :supabase_auth, + version: @version, + elixir: "~> 1.15", + start_permanent: Mix.env() == :prod, + deps: deps(), + docs: docs(), + package: package(), + description: description() + ] + end + + # Run "mix help compile.app" to learn about applications. + def application do + [ + extra_applications: [:logger] + ] + end + + # Run "mix help deps" to learn about dependencies. + defp deps do + [ + {:plug, "~> 1.15"}, + {:supabase_potion, "~> 0.3"}, + {:ex_doc, ">= 0.0.0", runtime: false} + ] + end + + defp package do + %{ + licenses: ["MIT"], + contributors: ["zoedsoupe"], + links: %{ + "GitHub" => @source_url, + "Docs" => "https://hexdocs.pm/supabase_auth" + }, + files: ~w[lib mix.exs README.md LICENSE] + } + end + + defp docs do + [ + main: "Supabase.GoTrue", + extras: ["README.md"] + ] + end + + defp description do + """ + Integration with the GoTrue API from Supabase services. + Provide authentication with MFA, password and magic link. + """ + end +end diff --git a/mix.lock b/mix.lock new file mode 100644 index 0000000..511401b --- /dev/null +++ b/mix.lock @@ -0,0 +1,27 @@ +%{ + "bunt": {:hex, :bunt, "1.0.0", "081c2c665f086849e6d57900292b3a161727ab40431219529f13c4ddcf3e7a44", [:mix], [], "hexpm", "dc5f86aa08a5f6fa6b8096f0735c4e76d54ae5c9fa2c143e5a1fc7c1cd9bb6b5"}, + "castore": {:hex, :castore, "1.0.5", "9eeebb394cc9a0f3ae56b813459f990abb0a3dedee1be6b27fdb50301930502f", [:mix], [], "hexpm", "8d7c597c3e4a64c395980882d4bca3cebb8d74197c590dc272cfd3b6a6310578"}, + "credo": {:hex, :credo, "1.7.3", "05bb11eaf2f2b8db370ecaa6a6bda2ec49b2acd5e0418bc106b73b07128c0436", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "35ea675a094c934c22fb1dca3696f3c31f2728ae6ef5a53b5d648c11180a4535"}, + "decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"}, + "dialyxir": {:hex, :dialyxir, "1.4.3", "edd0124f358f0b9e95bfe53a9fcf806d615d8f838e2202a9f430d59566b6b53b", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "bf2cfb75cd5c5006bec30141b131663299c661a864ec7fbbc72dfa557487a986"}, + "earmark_parser": {:hex, :earmark_parser, "1.4.39", "424642f8335b05bb9eb611aa1564c148a8ee35c9c8a8bba6e129d51a3e3c6769", [:mix], [], "hexpm", "06553a88d1f1846da9ef066b87b57c6f605552cfbe40d20bd8d59cc6bde41944"}, + "ecto": {:hex, :ecto, "3.11.1", "4b4972b717e7ca83d30121b12998f5fcdc62ba0ed4f20fd390f16f3270d85c3e", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "ebd3d3772cd0dfcd8d772659e41ed527c28b2a8bde4b00fe03e0463da0f1983b"}, + "erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"}, + "ex_doc": {:hex, :ex_doc, "0.31.1", "8a2355ac42b1cc7b2379da9e40243f2670143721dd50748bf6c3b1184dae2089", [:mix], [{:earmark_parser, "~> 1.4.39", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.1", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "3178c3a407c557d8343479e1ff117a96fd31bafe52a039079593fb0524ef61b0"}, + "file_system": {:hex, :file_system, "1.0.0", "b689cc7dcee665f774de94b5a832e578bd7963c8e637ef940cd44327db7de2cd", [:mix], [], "hexpm", "6752092d66aec5a10e662aefeed8ddb9531d79db0bc145bb8c40325ca1d8536d"}, + "finch": {:hex, :finch, "0.17.0", "17d06e1d44d891d20dbd437335eebe844e2426a0cd7e3a3e220b461127c73f70", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.3", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2.6 or ~> 1.0", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "8d014a661bb6a437263d4b5abf0bcbd3cf0deb26b1e8596f2a271d22e48934c7"}, + "hpax": {:hex, :hpax, "0.1.2", "09a75600d9d8bbd064cdd741f21fc06fc1f4cf3d0fcc335e5aa19be1a7235c84", [:mix], [], "hexpm", "2c87843d5a23f5f16748ebe77969880e29809580efdaccd615cd3bed628a8c13"}, + "jason": {:hex, :jason, "1.4.1", "af1504e35f629ddcdd6addb3513c3853991f694921b1b9368b0bd32beb9f1b63", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "fbb01ecdfd565b56261302f7e1fcc27c4fb8f32d56eab74db621fc154604a7a1"}, + "makeup": {:hex, :makeup, "1.1.1", "fa0bc768698053b2b3869fa8a62616501ff9d11a562f3ce39580d60860c3a55e", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "5dc62fbdd0de44de194898b6710692490be74baa02d9d108bc29f007783b0b48"}, + "makeup_elixir": {:hex, :makeup_elixir, "0.16.1", "cc9e3ca312f1cfeccc572b37a09980287e243648108384b97ff2b76e505c3555", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "e127a341ad1b209bd80f7bd1620a15693a9908ed780c3b763bccf7d200c767c6"}, + "makeup_erlang": {:hex, :makeup_erlang, "0.1.3", "d684f4bac8690e70b06eb52dad65d26de2eefa44cd19d64a8095e1417df7c8fd", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "b78dc853d2e670ff6390b605d807263bf606da3c82be37f9d7f68635bd886fc9"}, + "mime": {:hex, :mime, "2.0.5", "dc34c8efd439abe6ae0343edbb8556f4d63f178594894720607772a041b04b02", [:mix], [], "hexpm", "da0d64a365c45bc9935cc5c8a7fc5e49a0e0f9932a761c55d6c52b142780a05c"}, + "mint": {:hex, :mint, "1.5.2", "4805e059f96028948870d23d7783613b7e6b0e2fb4e98d720383852a760067fd", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "d77d9e9ce4eb35941907f1d3df38d8f750c357865353e21d335bdcdf6d892a02"}, + "nimble_options": {:hex, :nimble_options, "1.1.0", "3b31a57ede9cb1502071fade751ab0c7b8dbe75a9a4c2b5bbb0943a690b63172", [:mix], [], "hexpm", "8bbbb3941af3ca9acc7835f5655ea062111c9c27bcac53e004460dfd19008a99"}, + "nimble_parsec": {:hex, :nimble_parsec, "1.4.0", "51f9b613ea62cfa97b25ccc2c1b4216e81df970acd8e16e8d1bdc58fef21370d", [:mix], [], "hexpm", "9c565862810fb383e9838c1dd2d7d2c437b3d13b267414ba6af33e50d2d1cf28"}, + "nimble_pool": {:hex, :nimble_pool, "1.0.0", "5eb82705d138f4dd4423f69ceb19ac667b3b492ae570c9f5c900bb3d2f50a847", [:mix], [], "hexpm", "80be3b882d2d351882256087078e1b1952a28bf98d0a287be87e4a24a710b67a"}, + "plug": {:hex, :plug, "1.15.2", "94cf1fa375526f30ff8770837cb804798e0045fd97185f0bb9e5fcd858c792a3", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "02731fa0c2dcb03d8d21a1d941bdbbe99c2946c0db098eee31008e04c6283615"}, + "plug_crypto": {:hex, :plug_crypto, "2.0.0", "77515cc10af06645abbfb5e6ad7a3e9714f805ae118fa1a70205f80d2d70fe73", [:mix], [], "hexpm", "53695bae57cc4e54566d993eb01074e4d894b65a3766f1c43e2c61a1b0f45ea9"}, + "supabase_potion": {:hex, :supabase_potion, "0.3.0", "9c63eda160d5eaece75c04caf892dbfa9ab19065deff6ce0eca81301f69f44e1", [:mix], [{:ecto, "~> 3.10", [hex: :ecto, repo: "hexpm", optional: false]}, {:ex_doc, ">= 0.0.0", [hex: :ex_doc, repo: "hexpm", optional: false]}, {:finch, "~> 0.16", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "456d3f3238a1eb959981beaa979270e9d71a01da3fc47d99e5a745a290bc9ea8"}, + "telemetry": {:hex, :telemetry, "1.2.1", "68fdfe8d8f05a8428483a97d7aab2f268aaff24b49e0f599faa091f1d4e7f61c", [:rebar3], [], "hexpm", "dad9ce9d8effc621708f99eac538ef1cbe05d6a874dd741de2e689c47feafed5"}, +} diff --git a/test/supabase_test.exs b/test/supabase_test.exs new file mode 100644 index 0000000..05a7b89 --- /dev/null +++ b/test/supabase_test.exs @@ -0,0 +1,74 @@ +defmodule SupabaseTest do + use ExUnit.Case, async: true + + alias Supabase.Client + alias Supabase.ClientRegistry + alias Supabase.MissingSupabaseConfig + + describe "init_client/1" do + test "should return a valid PID on valid attrs" do + {:ok, pid} = + Supabase.init_client(%{ + name: :test, + conn: %{ + base_url: "https://test.supabase.co", + api_key: "test" + } + }) + + assert pid == ClientRegistry.lookup(:test) + assert {:ok, client} = Client.retrieve_client(:test) + assert client.name == :test + assert client.conn.base_url == "https://test.supabase.co" + assert client.conn.api_key == "test" + end + + test "should return an error changeset on invalid attrs" do + {:error, changeset} = Supabase.init_client(%{}) + + assert changeset.errors == [ + name: {"can't be blank", [validation: :required]}, + conn: {"can't be blank", [validation: :required]} + ] + + {:error, changeset} = Supabase.init_client(%{name: :test, conn: %{}}) + assert conn = changeset.changes.conn + + assert conn.errors == [ + api_key: {"can't be blank", [validation: :required]}, + base_url: {"can't be blank", [validation: :required]} + ] + end + end + + describe "init_client!/1" do + test "should return a valid PID on valid attrs" do + pid = + Supabase.init_client!(%{ + name: :test2, + conn: %{ + base_url: "https://test.supabase.co", + api_key: "test" + } + }) + + assert pid == ClientRegistry.lookup(:test2) + assert {:ok, client} = Client.retrieve_client(:test2) + assert client.name == :test2 + assert client.conn.base_url == "https://test.supabase.co" + assert client.conn.api_key == "test" + end + + test "should raise MissingSupabaseConfig on missing base_url" do + assert_raise MissingSupabaseConfig, fn -> + Supabase.init_client!(%{name: :test, conn: %{api_key: "test"}}) + end + end + + test "should raise MissingSupabaseConfig on missing api_key" do + assert_raise MissingSupabaseConfig, fn -> + Supabase.init_client!(%{name: :test, conn: %{base_url: "https://test.supabase.co"}}) + end + end + end +end diff --git a/test/test_helper.exs b/test/test_helper.exs new file mode 100644 index 0000000..869559e --- /dev/null +++ b/test/test_helper.exs @@ -0,0 +1 @@ +ExUnit.start()