diff --git a/.tmp/data.db b/.tmp/data.db new file mode 100644 index 00000000..53bce2d9 Binary files /dev/null and b/.tmp/data.db differ diff --git a/README.md b/README.md index 3000e0e4..eacc10dc 100644 --- a/README.md +++ b/README.md @@ -8,14 +8,20 @@ Inside this project, you'll see the following folders and files: ``` . +├── cms/ # Strapi CMS for content management ├── public/ ├── src/ │ ├── components/ │ ├── content/ │ │ ├── docs/ -│ ├── content.config/ +│ │ ├── blog/ # Blog posts (MDX files generated by CMS) +│ │ └── press/ # Press content (MDX files generated by CMS) +│ ├── content.config.ts │ ├── layouts/ │ ├── pages/ +│ │ ├── blog/ # Blog listing and individual post pages +│ │ ├── index.astro +│ │ └── press.astro # Press page │ ├── styles/ │ └── env.d.ts ├── astro.config.mjs @@ -33,6 +39,18 @@ Static assets, like favicons or images, can be placed in the `public/` directory For more information about the way our documentation projects are set up, please refer to our [documentation style guide](https://interledger.tech/#docs-site-building). +### Running the CMS + +```bash +cd cms +npm install # If not already installed +npm run develop +``` + +The Strapi admin panel will be available at: + +For detailed CMS documentation, see [cms/README.md](cms/README.md). + ## Local Development We are using [Bun](https://bun.sh/) in this repository, but you could theoretically use the package manager of your choice. To install Bun, run diff --git a/astro.config.mjs b/astro.config.mjs index c73984cc..12abf0d5 100644 --- a/astro.config.mjs +++ b/astro.config.mjs @@ -8,7 +8,6 @@ import mdx from '@astrojs/mdx' // https://astro.build/config export default defineConfig({ site: 'https://interledger.org', - base: '/developers', integrations: [ starlight({ title: 'Interledger', @@ -29,14 +28,14 @@ export default defineConfig({ { tag: 'script', attrs: { - src: '/developers/scripts/highlight.min.js', + src: '/scripts/highlight.min.js', defer: true } }, { tag: 'script', attrs: { - src: '/developers/scripts/init.js', + src: '/scripts/init.js', defer: true } }, diff --git a/cms/.gitignore b/cms/.gitignore new file mode 100644 index 00000000..91e0eb7a --- /dev/null +++ b/cms/.gitignore @@ -0,0 +1,115 @@ +############################ +# OS X +############################ + +.DS_Store +.AppleDouble +.LSOverride +Icon +.Spotlight-V100 +.Trashes +._* + + +############################ +# Linux +############################ + +*~ + + +############################ +# Windows +############################ + +Thumbs.db +ehthumbs.db +Desktop.ini +$RECYCLE.BIN/ +*.cab +*.msi +*.msm +*.msp + + +############################ +# Packages +############################ + +*.7z +*.csv +*.dat +*.dmg +*.gz +*.iso +*.jar +*.rar +*.tar +*.zip +*.com +*.class +*.dll +*.exe +*.o +*.seed +*.so +*.swo +*.swp +*.swn +*.swm +*.out +*.pid + + +############################ +# Logs and databases +############################ + +.tmp +*.log +*.sql +*.sqlite +*.sqlite3 + + +############################ +# Misc. +############################ + +*# +ssl +.idea +nbproject +public/uploads/* +!public/uploads/.gitkeep + +############################ +# Node.js +############################ + +lib-cov +lcov.info +pids +logs +results +node_modules +.node_history + +############################ +# Tests +############################ + +coverage + +############################ +# Strapi +############################ + +.env +license.txt +exports +.strapi +dist +build +.strapi-updater.json +.strapi-cloud.json diff --git a/cms/.strapirc.json b/cms/.strapirc.json new file mode 100644 index 00000000..c5d1a2ad --- /dev/null +++ b/cms/.strapirc.json @@ -0,0 +1,3 @@ +{ + "autoReload": true +} diff --git a/cms/QUICKSTART.md b/cms/QUICKSTART.md new file mode 100644 index 00000000..b352b2fc --- /dev/null +++ b/cms/QUICKSTART.md @@ -0,0 +1,116 @@ +# Quick Start Guide + +## First Time Setup + +### 1. Generate Secrets + +Before running the CMS for the first time, you need to generate secure secrets. Run this command from the `cms` directory: + +```bash +node -e "console.log('APP_KEYS=' + Array(4).fill(0).map(() => require('crypto').randomBytes(16).toString('base64')).join(','))" +node -e "console.log('API_TOKEN_SALT=' + require('crypto').randomBytes(16).toString('base64'))" +node -e "console.log('ADMIN_JWT_SECRET=' + require('crypto').randomBytes(16).toString('base64'))" +node -e "console.log('TRANSFER_TOKEN_SALT=' + require('crypto').randomBytes(16).toString('base64'))" +node -e "console.log('JWT_SECRET=' + require('crypto').randomBytes(16).toString('base64'))" +``` + +Copy the output and replace the values in your `.env` file. + +### 2. Install Dependencies + +```bash +npm install +``` + +### 3. Start the CMS + +```bash +npm run develop +``` + +### 4. Create Admin User + +On first run, navigate to http://localhost:1337/admin and create your admin account. + +## Creating Your First Press Item + +1. Log in to the admin panel at http://localhost:1337/admin +2. Click on "Press Items" in the left sidebar +3. Click "Create new entry" +4. Fill in the required fields: + - **Title**: The headline + - **Description**: A short excerpt (1-2 sentences) + - **Publish Date**: When the item was published + - **Slug**: Auto-generated from title, but can be customized +5. Optional fields: + - **Publication**: Name of the publication (e.g., "TechCrunch") + - **Publication Logo**: URL to publication's logo + - **External URL**: Link to the external article + - **Content**: Full article content (rich text) + - **Featured**: Check to highlight this item + - **Category**: Choose press-release, media-mention, or announcement +6. Click "Save" to create a draft +7. Click "Publish" to make it live + +Once published, an MDX file will be automatically created in `../src/content/press/` and will appear on the `/developers/press` page. + +## Viewing Your Content + +1. Make sure the Astro dev server is running: + ```bash + cd .. # Go back to root + bun run start + ``` + +2. Visit http://localhost:1103/developers/press to see your press items + +## Editing Content + +1. Find the press item in the Strapi admin +2. Make your changes +3. Click "Save" and "Publish" +4. The MDX file will be automatically updated + +## Unpublishing Content + +1. Find the press item in the Strapi admin +2. Click the "Unpublish" button +3. The MDX file will be automatically deleted + +## Tips + +- **Drafts**: Save items as drafts to work on them before publishing +- **Featured Items**: Use sparingly - featured items appear in a prominent card grid +- **External Links**: If you provide an External URL, the press item will link to that instead of showing local content +- **Publication Logos**: For best display, use square logos with transparent backgrounds +- **Rich Text**: The content field supports formatting, links, headings, etc. + +## Troubleshooting + +### Port Already in Use + +If port 1337 is already in use, you can change it in `.env`: + +``` +PORT=1338 +``` + +### MDX Files Not Generating + +1. Ensure the item is **published** (not just saved) +2. Check the Strapi console for errors +3. Verify the `MDX_OUTPUT_PATH` in `.env` is correct +4. Check file permissions on `src/content/press/` + +### Can't Access Admin Panel + +1. Make sure the CMS is running (`npm run develop`) +2. Check that nothing else is using port 1337 +3. Try accessing http://127.0.0.1:1337/admin instead + +## Next Steps + +- Customize the press item schema in `src/api/press-item/content-types/press-item/schema.json` +- Modify the MDX generation logic in `src/api/press-item/content-types/press-item/lifecycles.ts` +- Update the press page styling in `../src/pages/press.astro` +- Configure additional content types for other sections diff --git a/cms/README.md b/cms/README.md new file mode 100644 index 00000000..c1794833 --- /dev/null +++ b/cms/README.md @@ -0,0 +1,183 @@ +# Interledger Developers Portal - Strapi CMS + +This is the Strapi CMS for managing content that will be rendered on the Interledger Developers Portal. The CMS automatically generates MDX files that are read by the Astro site. + +## Features + +- **Press Management**: Create and manage press releases, media mentions, and announcements +- **Automatic MDX Generation**: Content is automatically written to MDX files when published +- **Draft & Publish Workflow**: Content can be drafted and published when ready +- **SQLite Database**: Lightweight database for easy development and deployment + +## Getting Started + +### Prerequisites + +- Node.js >= 18.0.0 <= 22.x.x +- npm >= 6.0.0 + +### Installation + +The dependencies should already be installed. If not, run: + +```bash +cd cms +npm install +``` + +### Configuration + +The CMS is configured via environment variables in `.env`. Key settings: + +- `PORT`: CMS runs on port 1337 (default) +- `DATABASE_CLIENT`: Using better-sqlite3 +- `MDX_OUTPUT_PATH`: Where MDX files are written (`../src/content/press`) + +### Running the CMS + +Start the development server: + +```bash +cd cms +npm run develop +``` + +The Strapi admin panel will be available at: http://localhost:1337/admin + +On first run, you'll be prompted to create an admin user. + +### Production Build + +To build for production: + +```bash +cd cms +npm run build +npm run start +``` + +## Content Types + +### Press Item + +The Press Item content type includes the following fields: + +- **Title** (required): Headline of the press item +- **Description** (required): Short description or excerpt +- **Publish Date** (required): When the item was published +- **Slug** (required, auto-generated): URL-friendly identifier +- **Publication** (optional): Name of the publication (e.g., "TechCrunch") +- **Publication Logo** (optional): URL to the publication's logo image +- **External URL** (optional): Link to the external article +- **Content** (optional): Full article content (rich text) +- **Featured** (boolean): Whether to feature this item prominently +- **Category** (enum): Type of press item + - `press-release`: Official press releases + - `media-mention`: Coverage by external media + - `announcement`: General announcements + +## How It Works + +### MDX File Generation + +When you publish or update a Press Item in Strapi: + +1. The lifecycle hooks in `src/api/press-item/content-types/press-item/lifecycles.ts` are triggered +2. The content is converted to MDX format with frontmatter +3. An MDX file is created/updated in `../src/content/press/` with the slug as the filename +4. The Astro site automatically picks up the new content + +### File Naming + +MDX files are named using the slug: `{slug}.mdx` + +Example: If slug is `interledger-launches-new-platform`, the file will be `interledger-launches-new-platform.mdx` + +### Unpublishing Content + +When you unpublish a Press Item in Strapi, the corresponding MDX file is automatically deleted. + +## Astro Integration + +The generated MDX files are consumed by the Astro site at `/press`. The press page: + +- Displays all published press items sorted by date +- Shows featured items in a prominent card grid +- Lists regular items in a clean timeline format +- Includes a "Featured In" section with publication logos + +## Development Workflow + +1. **Start the CMS**: `cd cms && npm run develop` +2. **Access Admin Panel**: http://localhost:1337/admin +3. **Create Content**: Add new Press Items through the UI +4. **Publish**: When ready, publish the content +5. **View on Site**: The content automatically appears at `/developers/press` + +## File Structure + +``` +cms/ +├── config/ # Strapi configuration files +│ ├── admin.ts +│ ├── database.ts +│ ├── middlewares.ts +│ └── server.ts +├── src/ +│ ├── api/ +│ │ └── press-item/ # Press Item API +│ │ ├── content-types/ +│ │ │ └── press-item/ +│ │ │ ├── schema.json +│ │ │ └── lifecycles.ts # MDX generation logic +│ │ ├── controllers/ +│ │ ├── routes/ +│ │ └── services/ +│ └── index.ts +├── .env # Environment variables +├── .gitignore +├── package.json +├── tsconfig.json +└── README.md +``` + +## Tips + +- **Rich Text Content**: The content field supports rich text. It will be converted to markdown when generating MDX files. +- **External Links**: If you provide an `externalUrl`, the press item will link to the external article instead of a local page. +- **Featured Items**: Use the featured flag to highlight important press coverage. +- **Publication Logos**: For best results, use square logos with transparent backgrounds. +- **Slugs**: Slugs are auto-generated from the title but can be customized. Ensure they're unique. + +## Troubleshooting + +### MDX files not generating + +1. Check that the Press Item is **published** (not just saved as draft) +2. Verify the `MDX_OUTPUT_PATH` in `.env` points to the correct directory +3. Check file permissions on the `src/content/press` directory +4. Look for errors in the Strapi console output + +### Database issues + +The SQLite database is stored in `.tmp/data.db`. To reset: + +```bash +rm -rf cms/.tmp +``` + +Then restart Strapi. You'll need to create a new admin user. + +## Security Notes + +- The `.env` file contains secrets - never commit it to version control +- Change the default secrets in `.env` before deploying to production +- The CMS is configured to allow CORS from `localhost:1103` (the Astro dev server) +- Update `FRONTEND_ORIGINS` in `.env` and `config/middlewares.ts` for production + +## Support + +For issues related to: +- **Strapi CMS**: Check [Strapi Documentation](https://docs.strapi.io/) +- **Content Issues**: Check the Strapi console logs +- **Astro Integration**: Check the main README in the repository root diff --git a/cms/config/admin.ts b/cms/config/admin.ts new file mode 100644 index 00000000..f1799fa3 --- /dev/null +++ b/cms/config/admin.ts @@ -0,0 +1,17 @@ +export default ({ env }) => ({ + auth: { + secret: env('ADMIN_JWT_SECRET'), + }, + apiToken: { + salt: env('API_TOKEN_SALT'), + }, + transfer: { + token: { + salt: env('TRANSFER_TOKEN_SALT'), + }, + }, + flags: { + nps: env.bool('FLAG_NPS', true), + promoteEE: env.bool('FLAG_PROMOTE_EE', true), + }, +}); diff --git a/cms/config/database.ts b/cms/config/database.ts new file mode 100644 index 00000000..e0486493 --- /dev/null +++ b/cms/config/database.ts @@ -0,0 +1,14 @@ +import path from 'path'; + +export default ({ env }) => ({ + connection: { + client: 'sqlite', + connection: { + filename: path.resolve( + process.cwd(), + env('DATABASE_FILENAME', '.tmp/data.db') + ), + }, + useNullAsDefault: true, + }, +}); diff --git a/cms/config/middlewares.ts b/cms/config/middlewares.ts new file mode 100644 index 00000000..f2c02889 --- /dev/null +++ b/cms/config/middlewares.ts @@ -0,0 +1,36 @@ +export default [ + 'strapi::logger', + 'strapi::errors', + { + name: 'strapi::security', + config: { + contentSecurityPolicy: { + useDefaults: true, + directives: { + 'connect-src': ["'self'", 'https:'], + 'img-src': ["'self'", 'data:', 'blob:', 'market-assets.strapi.io'], + 'media-src': [ + "'self'", + 'data:', + 'blob:', + 'market-assets.strapi.io', + ], + upgradeInsecureRequests: null, + }, + }, + }, + }, + { + name: 'strapi::cors', + config: { + enabled: true, + origin: ['http://localhost:1103', 'http://127.0.0.1:1103'], + }, + }, + 'strapi::poweredBy', + 'strapi::query', + 'strapi::body', + 'strapi::session', + 'strapi::favicon', + 'strapi::public', +]; diff --git a/cms/config/plugins.ts b/cms/config/plugins.ts new file mode 100644 index 00000000..473b5342 --- /dev/null +++ b/cms/config/plugins.ts @@ -0,0 +1,5 @@ +export default () => ({ + ckeditor: { + enabled: true, + }, +}); diff --git a/cms/config/server.ts b/cms/config/server.ts new file mode 100644 index 00000000..a54a2414 --- /dev/null +++ b/cms/config/server.ts @@ -0,0 +1,10 @@ +export default ({ env }) => ({ + host: env('HOST', '0.0.0.0'), + port: env.int('PORT', 1337), + app: { + keys: env.array('APP_KEYS'), + }, + webhooks: { + populateRelations: env.bool('WEBHOOKS_POPULATE_RELATIONS', false), + }, +}); diff --git a/cms/copy-schemas.js b/cms/copy-schemas.js new file mode 100644 index 00000000..bd62c423 --- /dev/null +++ b/cms/copy-schemas.js @@ -0,0 +1,32 @@ +const fs = require('fs'); +const path = require('path'); + +function copyDir(src, dest) { + // Create destination directory + if (!fs.existsSync(dest)) { + fs.mkdirSync(dest, { recursive: true }); + } + + // Read source directory + const entries = fs.readdirSync(src, { withFileTypes: true }); + + for (const entry of entries) { + const srcPath = path.join(src, entry.name); + const destPath = path.join(dest, entry.name); + + if (entry.isDirectory()) { + copyDir(srcPath, destPath); + } else if (entry.name.endsWith('.json')) { + fs.copyFileSync(srcPath, destPath); + console.log(`✓ Copied ${srcPath} to ${destPath}`); + } + } +} + +// Copy all JSON files from src to dist/src +const srcDir = path.join(__dirname, 'src'); +const destDir = path.join(__dirname, 'dist', 'src'); + +console.log('📋 Copying schema JSON files...'); +copyDir(srcDir, destDir); +console.log('✅ Schema files copied successfully!'); diff --git a/cms/package.json b/cms/package.json new file mode 100644 index 00000000..86aebf40 --- /dev/null +++ b/cms/package.json @@ -0,0 +1,34 @@ +{ + "name": "interledger-cms", + "private": true, + "version": "0.1.0", + "description": "Strapi CMS for Interledger developer portal", + "scripts": { + "develop": "strapi develop", + "start": "strapi start", + "build": "strapi build", + "strapi": "strapi" + }, + "strapi": { + "uuid": "d5c8f4e2-9a1b-4c3d-8e7f-6a5b4c3d2e1f" + }, + "dependencies": { + "@_sh/strapi-plugin-ckeditor": "^6.0.3", + "@ckeditor/strapi-plugin-ckeditor": "^1.1.1", + "@strapi/strapi": "5.31.3", + "better-sqlite3": "11.5.0", + "esbuild": "0.25.11", + "react": "^18.3.1", + "react-dom": "^18.3.1", + "react-router-dom": "^6.30.2", + "styled-components": "^6.1.19" + }, + "devDependencies": { + "@types/node": "^22.10.1", + "typescript": "^5.7.2" + }, + "engines": { + "node": ">=18.0.0 <=22.x.x", + "npm": ">=6.0.0" + } +} diff --git a/cms/public/uploads/.gitkeep b/cms/public/uploads/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/cms/src/admin/app.tsx b/cms/src/admin/app.tsx new file mode 100644 index 00000000..f25af1b0 --- /dev/null +++ b/cms/src/admin/app.tsx @@ -0,0 +1,45 @@ +import { setPluginConfig, defaultMarkdownPreset } from '@_sh/strapi-plugin-ckeditor'; +import type { PluginConfig, Preset } from '@_sh/strapi-plugin-ckeditor'; + +const myCustomPreset: Preset = { + ...defaultMarkdownPreset, + description: 'Markdown editor without H1', + editorConfig: { + ...defaultMarkdownPreset.editorConfig, + heading: { + options: defaultMarkdownPreset.editorConfig.heading?.options?.filter( + (option) => option.model !== 'heading1' + ), + }, + }, +}; + +const myPluginConfig: PluginConfig = { + presets: [myCustomPreset], +}; + +export default { + register(app: any) { + setPluginConfig(myPluginConfig); + }, + + bootstrap(app: any) { + // Override button labels using DOM manipulation + const interval = setInterval(() => { + // Find all buttons in the admin panel + const buttons = document.querySelectorAll('button'); + buttons.forEach((button) => { + const span = button.querySelector('span'); + if (span && span.textContent === 'Save') { + span.textContent = 'Save as Draft'; + } + if (span && span.textContent === 'Publish') { + span.textContent = 'Publish to Netlify'; + } + }); + }, 100); + + // Store interval for cleanup if needed + (window as any).__strapiButtonInterval = interval; + }, +}; diff --git a/cms/src/admin/webpack.config.js b/cms/src/admin/webpack.config.js new file mode 100644 index 00000000..ae1ab141 --- /dev/null +++ b/cms/src/admin/webpack.config.js @@ -0,0 +1,18 @@ +module.exports = (config) => { + // Copy JSON files after build + const CopyPlugin = require('copy-webpack-plugin'); + + config.plugins.push( + new CopyPlugin({ + patterns: [ + { + from: 'src/**/*.json', + to: '../dist/src/[path][name][ext]', + context: '.' + } + ] + }) + ); + + return config; +}; diff --git a/cms/src/api/blog-post/content-types/blog-post/lifecycles.ts b/cms/src/api/blog-post/content-types/blog-post/lifecycles.ts new file mode 100644 index 00000000..9996d8fa --- /dev/null +++ b/cms/src/api/blog-post/content-types/blog-post/lifecycles.ts @@ -0,0 +1,254 @@ +/** + * Lifecycle callbacks for blog-post + * Generates MDX files that match the blog post format used on the site + * Then commits and pushes to trigger Netlify preview builds + */ + +import fs from 'fs'; +import path from 'path'; +import { exec } from 'child_process'; + +interface MediaFile { + id: number; + url: string; + alternativeText?: string; + name?: string; + width?: number; + height?: number; + formats?: { + thumbnail?: { url: string }; + small?: { url: string }; + medium?: { url: string }; + large?: { url: string }; + }; +} + +interface BlogPost { + id: number; + title: string; + description: string; + slug: string; + date: string; + content: string; + featuredImage?: MediaFile; + lang?: string; + ogImageUrl?: string; + publishedAt?: string; +} + +interface Event { + result?: BlogPost; +} + +/** + * Converts HTML content to markdown-like format + */ +function htmlToMarkdown(html: string): string { + if (!html) return ''; + + return html + .replace(/ /gi, ' ') + .replace(/]*>([\s\S]*?)<\/h1>/gi, '# $1\n\n') + .replace(/]*>([\s\S]*?)<\/h2>/gi, '## $1\n\n') + .replace(/]*>([\s\S]*?)<\/h3>/gi, '### $1\n\n') + .replace(/]*>([\s\S]*?)<\/h4>/gi, '#### $1\n\n') + .replace(/]*>([\s\S]*?)<\/h5>/gi, '##### $1\n\n') + .replace(/]*>([\s\S]*?)<\/h6>/gi, '###### $1\n\n') + .replace(/]*>([\s\S]*?)<\/p>/gi, '$1\n\n') + .replace(/]*>([\s\S]*?)<\/strong>/gi, '**$1**') + .replace(/]*>([\s\S]*?)<\/b>/gi, '**$1**') + .replace(/]*>([\s\S]*?)<\/em>/gi, '*$1*') + .replace(/]*>([\s\S]*?)<\/i>/gi, '*$1*') + .replace(/]*href="([^"]*)"[^>]*>([\s\S]*?)<\/a>/gi, '[$2]($1)') + .replace(/]*>]*>([\s\S]*?)<\/code><\/pre>/gi, '```\n$1\n```') + .replace(/]*>([\s\S]*?)<\/code>/gi, '`$1`') + .replace(/]*>/gi, '\n') + .replace(/<\/ul>/gi, '\n') + .replace(/]*>/gi, '\n') + .replace(/<\/ol>/gi, '\n') + .replace(/]*>([\s\S]*?)<\/li>/gi, '- $1\n') + .replace(//gi, '\n') + .replace(/]*>([\s\S]*?)<\/blockquote>/gi, '> $1\n') + .replace(/]*src="([^"]*)"[^>]*alt="([\s\S]*?)"[^>]*>/gi, '![$2]($1)') + .replace(/]*src="([^"]*)"[^>]*>/gi, '![]($1)') + .replace(/<[^>]+>/g, '') + .trim(); +} + +function escapeQuotes(value: string): string { + return value.replace(/"/g, '\\"'); +} + +function formatDate(dateString: string): string { + if (!dateString) return ''; + const date = new Date(dateString); + if (isNaN(date.getTime())) return dateString; + return date.toISOString().split('T')[0]; +} + +function generateFilename(post: BlogPost): string { + const date = formatDate(post.date); + const prefix = date ? `${date}-` : ''; + return `${prefix}${post.slug}.mdx`; +} + +/** + * Gets the image URL from a media field + * Returns the full Strapi URL for local files, or the full URL for external + */ +function getImageUrl(media: MediaFile | undefined): string | undefined { + if (!media?.url) return undefined; + + // If it's a relative URL (starts with /uploads/), prepend the Strapi server URL + if (media.url.startsWith('/uploads/')) { + const strapiUrl = process.env.STRAPI_URL || 'http://localhost:1337'; + return `${strapiUrl}${media.url}`; + } + + // Return the URL as-is for external images + return media.url; +} + +function generateMDX(post: BlogPost): string { + const imageUrl = getImageUrl(post.featuredImage); + + const frontmatterLines = [ + `title: "${escapeQuotes(post.title)}"`, + `description: "${escapeQuotes(post.description)}"`, + post.ogImageUrl ? `ogImageUrl: "${escapeQuotes(post.ogImageUrl)}"` : undefined, + `date: ${formatDate(post.date)}`, + `slug: ${post.slug}`, + post.lang ? `lang: "${escapeQuotes(post.lang)}"` : undefined, + imageUrl ? `image: "${escapeQuotes(imageUrl)}"` : undefined, + ].filter(Boolean) as string[]; + + const frontmatter = frontmatterLines.join('\n'); + const content = post.content ? htmlToMarkdown(post.content) : ''; + + return `---\n${frontmatter}\n---\n\n${content}\n`; +} + +async function writeMDXFile(post: BlogPost): Promise { + const outputPath = process.env.BLOG_MDX_OUTPUT_PATH || '../src/content/blog'; + // Resolve from dist/src/api/blog-post/content-types/blog-post/ up to cms root then project root + const baseDir = path.resolve(__dirname, '../../../../../../', outputPath); + + if (!fs.existsSync(baseDir)) { + fs.mkdirSync(baseDir, { recursive: true }); + } + + const filename = generateFilename(post); + const filepath = path.join(baseDir, filename); + const mdxContent = generateMDX(post); + + fs.writeFileSync(filepath, mdxContent, 'utf-8'); + console.log(`✅ Generated Blog Post MDX file: ${filepath}`); +} + +async function deleteMDXFile(post: BlogPost): Promise { + const outputPath = process.env.BLOG_MDX_OUTPUT_PATH || '../src/content/blog'; + // Resolve from dist/src/api/blog-post/content-types/blog-post/ up to cms root then project root + const baseDir = path.resolve(__dirname, '../../../../../../', outputPath); + const filename = generateFilename(post); + const filepath = path.join(baseDir, filename); + + if (fs.existsSync(filepath)) { + fs.unlinkSync(filepath); + console.log(`🗑️ Deleted Blog Post MDX file: ${filepath}`); + } +} + +/** + * Escapes a string for safe use in shell commands (single-quoted context) + */ +function escapeForShell(str: string): string { + // For single-quoted strings, we only need to handle single quotes + // Replace ' with '\'' (end quote, escaped quote, start quote) + return str.replace(/'/g, "'\\''"); +} + +/** + * Commits and pushes changes to git to trigger Netlify preview builds + */ +async function gitCommitAndPush(filepath: string, message: string): Promise { + // Skip git operations if disabled via env var + if (process.env.STRAPI_DISABLE_GIT_SYNC === 'true') { + console.log('⏭️ Git sync disabled via STRAPI_DISABLE_GIT_SYNC'); + return; + } + + // Get the project root (where .git lives) + const projectRoot = path.resolve(__dirname, '../../../../../../'); + + // Escape the message for shell (handles quotes and special chars) + const safeMessage = escapeForShell(message); + const safeFilepath = escapeForShell(filepath); + + return new Promise((resolve, reject) => { + // Stage the specific file, commit, and push + // Use single quotes to avoid issues with double quotes in titles + const commands = [ + `git add '${safeFilepath}'`, + `git commit -m '${safeMessage}'`, + `git push` + ].join(' && '); + + exec(commands, { cwd: projectRoot }, (error, stdout, stderr) => { + if (error) { + // Don't fail the lifecycle if git operations fail + // This allows content to still be saved even if git has issues + console.error(`⚠️ Git sync failed: ${error.message}`); + console.error(`stderr: ${stderr}`); + resolve(); // Resolve anyway to not block Strapi + return; + } + console.log(`✅ Git sync complete: ${message}`); + if (stdout) console.log(stdout); + resolve(); + }); + }); +} + +export default { + async afterCreate(event: Event) { + const { result } = event; + if (result && result.publishedAt) { + await writeMDXFile(result); + const filename = generateFilename(result); + const outputPath = process.env.BLOG_MDX_OUTPUT_PATH || '../src/content/blog'; + const baseDir = path.resolve(__dirname, '../../../../../../', outputPath); + const filepath = path.join(baseDir, filename); + await gitCommitAndPush(filepath, `blog: add "${result.title}"`); + } + }, + + async afterUpdate(event: Event) { + const { result } = event; + if (result) { + const filename = generateFilename(result); + const outputPath = process.env.BLOG_MDX_OUTPUT_PATH || '../src/content/blog'; + const baseDir = path.resolve(__dirname, '../../../../../../', outputPath); + const filepath = path.join(baseDir, filename); + + if (result.publishedAt) { + await writeMDXFile(result); + await gitCommitAndPush(filepath, `blog: update "${result.title}"`); + } else { + await deleteMDXFile(result); + await gitCommitAndPush(filepath, `blog: unpublish "${result.title}"`); + } + } + }, + + async afterDelete(event: Event) { + const { result } = event; + if (result) { + await deleteMDXFile(result); + const filename = generateFilename(result); + const outputPath = process.env.BLOG_MDX_OUTPUT_PATH || '../src/content/blog'; + const baseDir = path.resolve(__dirname, '../../../../../../', outputPath); + const filepath = path.join(baseDir, filename); + await gitCommitAndPush(filepath, `blog: delete "${result.title}"`); + } + }, +}; diff --git a/cms/src/api/blog-post/content-types/blog-post/schema.json b/cms/src/api/blog-post/content-types/blog-post/schema.json new file mode 100644 index 00000000..d1cb60fe --- /dev/null +++ b/cms/src/api/blog-post/content-types/blog-post/schema.json @@ -0,0 +1,55 @@ +{ + "kind": "collectionType", + "collectionName": "blog_posts", + "info": { + "singularName": "blog-post", + "pluralName": "blog-posts", + "displayName": "Blog Post", + "description": "Engineering blog posts that sync to MDX" + }, + "options": { + "draftAndPublish": true + }, + "pluginOptions": {}, + "attributes": { + "title": { + "type": "string", + "maxLength": 255, + "required": true + }, + "description": { + "type": "text", + "required": true + }, + "slug": { + "type": "uid", + "targetField": "title", + "required": true + }, + "date": { + "type": "date", + "required": true + }, + "lang": { + "type": "string", + "maxLength": 10 + }, + "featuredImage": { + "type": "media", + "multiple": false, + "required": false, + "allowedTypes": ["images"] + }, + "ogImageUrl": { + "type": "string", + "maxLength": 255 + }, + "content": { + "type": "customField", + "customField": "plugin::ckeditor5.CKEditor", + "options": { + "preset": "defaultMarkdown" + } + } + } +} diff --git a/cms/src/api/blog-post/controllers/blog-post.ts b/cms/src/api/blog-post/controllers/blog-post.ts new file mode 100644 index 00000000..4e7c76b4 --- /dev/null +++ b/cms/src/api/blog-post/controllers/blog-post.ts @@ -0,0 +1,7 @@ +/** + * blog-post controller + */ + +import { factories } from '@strapi/strapi' + +export default factories.createCoreController('api::blog-post.blog-post'); diff --git a/cms/src/api/blog-post/routes/blog-post.ts b/cms/src/api/blog-post/routes/blog-post.ts new file mode 100644 index 00000000..6d9096bf --- /dev/null +++ b/cms/src/api/blog-post/routes/blog-post.ts @@ -0,0 +1,7 @@ +/** + * blog-post router + */ + +import { factories } from '@strapi/strapi' + +export default factories.createCoreRouter('api::blog-post.blog-post'); diff --git a/cms/src/api/blog-post/services/blog-post.ts b/cms/src/api/blog-post/services/blog-post.ts new file mode 100644 index 00000000..e31b779a --- /dev/null +++ b/cms/src/api/blog-post/services/blog-post.ts @@ -0,0 +1,7 @@ +/** + * blog-post service + */ + +import { factories } from '@strapi/strapi' + +export default factories.createCoreService('api::blog-post.blog-post'); diff --git a/cms/src/api/financial-services-page/content-types/financial-services-page/lifecycles.ts b/cms/src/api/financial-services-page/content-types/financial-services-page/lifecycles.ts new file mode 100644 index 00000000..3fc5790c --- /dev/null +++ b/cms/src/api/financial-services-page/content-types/financial-services-page/lifecycles.ts @@ -0,0 +1,88 @@ +import fs from 'fs'; +import path from 'path'; + +interface FinancialServicesPage { + heroTitle: string; + heroDescription: string; + introText: string; + ctaTitle?: string; + ctaDescription?: string; + ctaEmailLabel?: string; + ctaSubscribeLabel?: string; + publishedAt?: string; +} + +interface Event { + result?: FinancialServicesPage; +} + +function escapeQuotes(value: string | undefined): string { + if (!value) return ''; + return value.replace(/"/g, '\\"'); +} + +function generateMDX(page: FinancialServicesPage): string { + const frontmatter = [ + `heroTitle: "${escapeQuotes(page.heroTitle)}"`, + `heroDescription: "${escapeQuotes(page.heroDescription)}"`, + `introText: "${escapeQuotes(page.introText)}"`, + `ctaTitle: "${escapeQuotes(page.ctaTitle || '')}"`, + `ctaDescription: "${escapeQuotes(page.ctaDescription || '')}"`, + `ctaEmailLabel: "${escapeQuotes(page.ctaEmailLabel || 'Contact Us')}"`, + `ctaSubscribeLabel: "${escapeQuotes(page.ctaSubscribeLabel || 'Subscribe for Updates')}"`, + ].join('\n'); + + return `---\n${frontmatter}\n---\n`; +} + +async function writeMDXFile(page: FinancialServicesPage): Promise { + const outputPath = + process.env.FINANCIAL_SERVICES_PAGE_MDX_OUTPUT_PATH || '../src/content/financial-services'; + const baseDir = path.resolve(__dirname, '../../../../../../', outputPath); + + if (!fs.existsSync(baseDir)) { + fs.mkdirSync(baseDir, { recursive: true }); + } + + const filename = `financial-services-page.mdx`; + const filepath = path.join(baseDir, filename); + const mdxContent = generateMDX(page); + + fs.writeFileSync(filepath, mdxContent, 'utf-8'); + console.log(`✅ Generated Financial Services Page MDX file: ${filepath}`); +} + +async function deleteMDXFile(): Promise { + const outputPath = + process.env.FINANCIAL_SERVICES_PAGE_MDX_OUTPUT_PATH || '../src/content/financial-services'; + const baseDir = path.resolve(__dirname, '../../../../../../', outputPath); + const filename = `financial-services-page.mdx`; + const filepath = path.join(baseDir, filename); + + if (fs.existsSync(filepath)) { + fs.unlinkSync(filepath); + console.log(`🗑️ Deleted Financial Services Page MDX file: ${filepath}`); + } +} + +export default { + async afterCreate(event: Event) { + const { result } = event; + if (result && result.publishedAt) { + await writeMDXFile(result); + } + }, + async afterUpdate(event: Event) { + const { result } = event; + if (result) { + if (result.publishedAt) { + await writeMDXFile(result); + } else { + await deleteMDXFile(); + } + } + }, + async afterDelete() { + await deleteMDXFile(); + }, +}; diff --git a/cms/src/api/financial-services-page/content-types/financial-services-page/schema.json b/cms/src/api/financial-services-page/content-types/financial-services-page/schema.json new file mode 100644 index 00000000..9f9c43b3 --- /dev/null +++ b/cms/src/api/financial-services-page/content-types/financial-services-page/schema.json @@ -0,0 +1,47 @@ +{ + "kind": "singleType", + "collectionName": "financial_services_page", + "info": { + "singularName": "financial-services-page", + "pluralName": "financial-services-pages", + "displayName": "Financial Services Page", + "description": "Manage the Financial Services landing page content" + }, + "options": { + "draftAndPublish": true + }, + "pluginOptions": {}, + "attributes": { + "heroTitle": { + "type": "string", + "required": true, + "default": "Unlock payment potential" + }, + "heroDescription": { + "type": "text", + "required": true + }, + "introText": { + "type": "text", + "required": true + }, + "applicationNotice": { + "type": "text" + }, + "ctaTitle": { + "type": "string", + "default": "Ready to Apply?" + }, + "ctaDescription": { + "type": "text" + }, + "ctaEmailLabel": { + "type": "string", + "default": "Contact Us" + }, + "ctaSubscribeLabel": { + "type": "string", + "default": "Subscribe for Updates" + } + } +} diff --git a/cms/src/api/grant-track/content-types/grant-track/lifecycles.ts b/cms/src/api/grant-track/content-types/grant-track/lifecycles.ts new file mode 100644 index 00000000..53adb02a --- /dev/null +++ b/cms/src/api/grant-track/content-types/grant-track/lifecycles.ts @@ -0,0 +1,111 @@ +import fs from 'fs'; +import path from 'path'; + +interface GrantTrack { + id: number; + name: string; + amount: string; + description: string; + order?: number; + publishedAt?: string; +} + +interface Event { + result?: GrantTrack; +} + +function htmlToMarkdown(html: string): string { + if (!html) return ''; + + return html + .replace(/]*>(.*?)<\/h1>/gi, '# $1\n\n') + .replace(/]*>(.*?)<\/h2>/gi, '## $1\n\n') + .replace(/]*>(.*?)<\/h3>/gi, '### $1\n\n') + .replace(/]*>(.*?)<\/p>/gi, '$1\n\n') + .replace(/]*>(.*?)<\/strong>/gi, '**$1**') + .replace(/]*>(.*?)<\/b>/gi, '**$1**') + .replace(/]*>(.*?)<\/em>/gi, '*$1*') + .replace(/]*>(.*?)<\/i>/gi, '*$1*') + .replace(/]*href="([^"]*)"[^>]*>(.*?)<\/a>/gi, '[$2]($1)') + .replace(//gi, '\n') + .replace(/<[^>]+>/g, '') + .trim(); +} + +function slugify(value: string): string { + return value + .toLowerCase() + .trim() + .replace(/[^a-z0-9]+/g, '-') + .replace(/(^-|-$)+/g, ''); +} + +function escapeQuotes(value: string): string { + return value.replace(/"/g, '\\"'); +} + +function generateMDX(grant: GrantTrack): string { + const frontmatter = [ + `name: "${escapeQuotes(grant.name)}"`, + `amount: "${escapeQuotes(grant.amount)}"`, + `order: ${grant.order ?? 0}`, + `description: "${escapeQuotes(grant.description)}"`, + ].join('\n'); + + const content = htmlToMarkdown(grant.description); + + return `---\n${frontmatter}\n---\n\n${content}\n`; +} + +async function writeMDXFile(grant: GrantTrack): Promise { + const outputPath = process.env.GRANT_TRACK_MDX_OUTPUT_PATH || '../src/content/grant-tracks'; + const baseDir = path.resolve(__dirname, '../../../../../../', outputPath); + + if (!fs.existsSync(baseDir)) { + fs.mkdirSync(baseDir, { recursive: true }); + } + + const filename = `${slugify(grant.name)}-${grant.id}.mdx`; + const filepath = path.join(baseDir, filename); + const mdxContent = generateMDX(grant); + + fs.writeFileSync(filepath, mdxContent, 'utf-8'); + console.log(`✅ Generated Grant Track MDX file: ${filepath}`); +} + +async function deleteMDXFile(grant: GrantTrack): Promise { + const outputPath = process.env.GRANT_TRACK_MDX_OUTPUT_PATH || '../src/content/grant-tracks'; + const baseDir = path.resolve(__dirname, '../../../../../../', outputPath); + const filename = `${slugify(grant.name)}-${grant.id}.mdx`; + const filepath = path.join(baseDir, filename); + + if (fs.existsSync(filepath)) { + fs.unlinkSync(filepath); + console.log(`🗑️ Deleted Grant Track MDX file: ${filepath}`); + } +} + +export default { + async afterCreate(event: Event) { + const { result } = event; + if (result && result.publishedAt) { + await writeMDXFile(result); + } + }, + async afterUpdate(event: Event) { + const { result } = event; + if (result) { + if (result.publishedAt) { + await writeMDXFile(result); + } else { + await deleteMDXFile(result); + } + } + }, + async afterDelete(event: Event) { + const { result } = event; + if (result) { + await deleteMDXFile(result); + } + }, +}; diff --git a/cms/src/api/grant-track/content-types/grant-track/schema.json b/cms/src/api/grant-track/content-types/grant-track/schema.json new file mode 100644 index 00000000..91f3618b --- /dev/null +++ b/cms/src/api/grant-track/content-types/grant-track/schema.json @@ -0,0 +1,32 @@ +{ + "kind": "collectionType", + "collectionName": "grant_tracks", + "info": { + "singularName": "grant-track", + "pluralName": "grant-tracks", + "displayName": "Grant Track", + "description": "Individual grant programs" + }, + "options": { + "draftAndPublish": true + }, + "pluginOptions": {}, + "attributes": { + "name": { + "type": "string", + "required": true + }, + "amount": { + "type": "string", + "required": true + }, + "description": { + "type": "richtext", + "required": true + }, + "order": { + "type": "integer", + "default": 0 + } + } +} diff --git a/cms/src/api/news-event/content-types/news-event/lifecycles.ts b/cms/src/api/news-event/content-types/news-event/lifecycles.ts new file mode 100644 index 00000000..8c3afc38 --- /dev/null +++ b/cms/src/api/news-event/content-types/news-event/lifecycles.ts @@ -0,0 +1,202 @@ +/** + * Lifecycle callbacks for event + * Generates MDX files that match the events content format used on the site + * Then commits and pushes to trigger Netlify preview builds + */ + +import fs from 'fs'; +import path from 'path'; +import { exec } from 'child_process'; + +interface NewsEvent { + id: number; + title: string; + slug: string; + order: number; + content: string; + publishedAt?: string; +} + +interface Event { + result?: NewsEvent; +} + +/** + * Converts HTML content to markdown-like format + */ +function htmlToMarkdown(html: string): string { + if (!html) return ''; + + return html + .replace(/ /gi, ' ') + .replace(/]*>([\s\S]*?)<\/h1>/gi, '# $1\n\n') + .replace(/]*>([\s\S]*?)<\/h2>/gi, '## $1\n\n') + .replace(/]*>([\s\S]*?)<\/h3>/gi, '### $1\n\n') + .replace(/]*>([\s\S]*?)<\/h4>/gi, '#### $1\n\n') + .replace(/]*>([\s\S]*?)<\/h5>/gi, '##### $1\n\n') + .replace(/]*>([\s\S]*?)<\/h6>/gi, '###### $1\n\n') + .replace(/]*>([\s\S]*?)<\/p>/gi, '$1\n\n') + .replace(/]*>([\s\S]*?)<\/strong>/gi, '**$1**') + .replace(/]*>([\s\S]*?)<\/b>/gi, '**$1**') + .replace(/]*>([\s\S]*?)<\/em>/gi, '*$1*') + .replace(/]*>([\s\S]*?)<\/i>/gi, '*$1*') + .replace(/]*href="([^"]*)"[^>]*>([\s\S]*?)<\/a>/gi, '[$2]($1)') + .replace(/]*>]*>([\s\S]*?)<\/code><\/pre>/gi, '```\n$1\n```') + .replace(/]*>([\s\S]*?)<\/code>/gi, '`$1`') + .replace(/]*>/gi, '\n') + .replace(/<\/ul>/gi, '\n') + .replace(/]*>/gi, '\n') + .replace(/<\/ol>/gi, '\n') + .replace(/]*>([\s\S]*?)<\/li>/gi, '- $1\n') + .replace(//gi, '\n') + .replace(/]*>([\s\S]*?)<\/blockquote>/gi, '> $1\n') + .replace(/]*src="([^"]*)"[^>]*alt="([\s\S]*?)"[^>]*>/gi, '![$2]($1)') + .replace(/]*src="([^"]*)"[^>]*>/gi, '![]($1)') + .replace(/<[^>]+>/g, '') + .trim(); +} + +function escapeQuotes(value: string): string { + return value.replace(/"/g, '\\"'); +} + +function generateFilename(event: NewsEvent): string { + return `${event.slug}.mdx`; +} + +function generateMDX(event: NewsEvent): string { + const frontmatterLines = [ + `title: "${escapeQuotes(event.title)}"`, + `order: ${event.order || 0}`, + ].filter(Boolean) as string[]; + + const frontmatter = frontmatterLines.join('\n'); + const content = event.content ? htmlToMarkdown(event.content) : ''; + + return `---\n${frontmatter}\n---\n\n${content}\n`; +} + +async function writeMDXFile(event: NewsEvent): Promise { + const outputPath = process.env.EVENTS_MDX_OUTPUT_PATH || '../src/content/events'; + // Resolve from dist/src/api/news-event/content-types/news-event/ up to cms root then project root + const baseDir = path.resolve(__dirname, '../../../../../../', outputPath); + + if (!fs.existsSync(baseDir)) { + fs.mkdirSync(baseDir, { recursive: true }); + } + + const filename = generateFilename(event); + const filepath = path.join(baseDir, filename); + const mdxContent = generateMDX(event); + + fs.writeFileSync(filepath, mdxContent, 'utf-8'); + console.log(`✅ Generated Event MDX file: ${filepath}`); +} + +async function deleteMDXFile(event: NewsEvent): Promise { + const outputPath = process.env.EVENTS_MDX_OUTPUT_PATH || '../src/content/events'; + // Resolve from dist/src/api/news-event/content-types/news-event/ up to cms root then project root + const baseDir = path.resolve(__dirname, '../../../../../../', outputPath); + const filename = generateFilename(event); + const filepath = path.join(baseDir, filename); + + if (fs.existsSync(filepath)) { + fs.unlinkSync(filepath); + console.log(`🗑️ Deleted Event MDX file: ${filepath}`); + } +} + +/** + * Escapes a string for safe use in shell commands (single-quoted context) + */ +function escapeForShell(str: string): string { + // For single-quoted strings, we only need to handle single quotes + // Replace ' with '\'' (end quote, escaped quote, start quote) + return str.replace(/'/g, "'\\''"); +} + +/** + * Commits and pushes changes to git to trigger Netlify preview builds + */ +async function gitCommitAndPush(filepath: string, message: string): Promise { + // Skip git operations if disabled via env var + if (process.env.STRAPI_DISABLE_GIT_SYNC === 'true') { + console.log('⏭️ Git sync disabled via STRAPI_DISABLE_GIT_SYNC'); + return; + } + + // Get the project root (where .git lives) + const projectRoot = path.resolve(__dirname, '../../../../../../'); + + // Escape the message for shell (handles quotes and special chars) + const safeMessage = escapeForShell(message); + const safeFilepath = escapeForShell(filepath); + + return new Promise((resolve, reject) => { + // Stage the specific file, commit, and push + // Use single quotes to avoid issues with double quotes in titles + const commands = [ + `git add '${safeFilepath}'`, + `git commit -m '${safeMessage}'`, + `git push` + ].join(' && '); + + exec(commands, { cwd: projectRoot }, (error, stdout, stderr) => { + if (error) { + // Don't fail the lifecycle if git operations fail + // This allows content to still be saved even if git has issues + console.error(`⚠️ Git sync failed: ${error.message}`); + console.error(`stderr: ${stderr}`); + resolve(); // Resolve anyway to not block Strapi + return; + } + console.log(`✅ Git sync complete: ${message}`); + if (stdout) console.log(stdout); + resolve(); + }); + }); +} + +export default { + async afterCreate(event: Event) { + const { result } = event; + if (result && result.publishedAt) { + await writeMDXFile(result); + const filename = generateFilename(result); + const outputPath = process.env.EVENTS_MDX_OUTPUT_PATH || '../src/content/events'; + const baseDir = path.resolve(__dirname, '../../../../../../', outputPath); + const filepath = path.join(baseDir, filename); + await gitCommitAndPush(filepath, `events: add "${result.title}"`); + } + }, + + async afterUpdate(event: Event) { + const { result } = event; + if (result) { + const filename = generateFilename(result); + const outputPath = process.env.EVENTS_MDX_OUTPUT_PATH || '../src/content/events'; + const baseDir = path.resolve(__dirname, '../../../../../../', outputPath); + const filepath = path.join(baseDir, filename); + + if (result.publishedAt) { + await writeMDXFile(result); + await gitCommitAndPush(filepath, `events: update "${result.title}"`); + } else { + await deleteMDXFile(result); + await gitCommitAndPush(filepath, `events: unpublish "${result.title}"`); + } + } + }, + + async afterDelete(event: Event) { + const { result } = event; + if (result) { + await deleteMDXFile(result); + const filename = generateFilename(result); + const outputPath = process.env.EVENTS_MDX_OUTPUT_PATH || '../src/content/events'; + const baseDir = path.resolve(__dirname, '../../../../../../', outputPath); + const filepath = path.join(baseDir, filename); + await gitCommitAndPush(filepath, `events: delete "${result.title}"`); + } + }, +}; diff --git a/cms/src/api/news-event/content-types/news-event/schema.json b/cms/src/api/news-event/content-types/news-event/schema.json new file mode 100644 index 00000000..a6ca1733 --- /dev/null +++ b/cms/src/api/news-event/content-types/news-event/schema.json @@ -0,0 +1,37 @@ +{ + "kind": "collectionType", + "collectionName": "news_events", + "info": { + "singularName": "news-event", + "pluralName": "news-events", + "displayName": "Event", + "description": "Events content that syncs to MDX" + }, + "options": { + "draftAndPublish": true + }, + "pluginOptions": {}, + "attributes": { + "title": { + "type": "string", + "required": true + }, + "slug": { + "type": "uid", + "targetField": "title", + "required": true + }, + "order": { + "type": "integer", + "default": 0, + "required": false + }, + "content": { + "type": "customField", + "customField": "plugin::ckeditor5.CKEditor", + "options": { + "preset": "defaultMarkdown" + } + } + } +} diff --git a/cms/src/api/news-event/controllers/news-event.ts b/cms/src/api/news-event/controllers/news-event.ts new file mode 100644 index 00000000..fe94f4f7 --- /dev/null +++ b/cms/src/api/news-event/controllers/news-event.ts @@ -0,0 +1,7 @@ +/** + * news-event controller + */ + +import { factories } from '@strapi/strapi' + +export default factories.createCoreController('api::news-event.news-event'); diff --git a/cms/src/api/news-event/routes/news-event.ts b/cms/src/api/news-event/routes/news-event.ts new file mode 100644 index 00000000..59a1a23a --- /dev/null +++ b/cms/src/api/news-event/routes/news-event.ts @@ -0,0 +1,7 @@ +/** + * news-event router + */ + +import { factories } from '@strapi/strapi' + +export default factories.createCoreRouter('api::news-event.news-event'); diff --git a/cms/src/api/news-event/services/news-event.ts b/cms/src/api/news-event/services/news-event.ts new file mode 100644 index 00000000..4a749e90 --- /dev/null +++ b/cms/src/api/news-event/services/news-event.ts @@ -0,0 +1,7 @@ +/** + * news-event service + */ + +import { factories } from '@strapi/strapi' + +export default factories.createCoreService('api::news-event.news-event'); diff --git a/cms/src/api/press-item/content-types/press-item/lifecycles.ts b/cms/src/api/press-item/content-types/press-item/lifecycles.ts new file mode 100644 index 00000000..fbe294ce --- /dev/null +++ b/cms/src/api/press-item/content-types/press-item/lifecycles.ts @@ -0,0 +1,151 @@ +/** + * Lifecycle callbacks for press-item + * Automatically generates MDX files when press items are created, updated, or deleted + */ + +import fs from 'fs'; +import path from 'path'; + +interface PressItem { + id: number; + title: string; + description: string; + publishDate: string; + slug: string; + publication?: string; + publicationLogo?: string; + externalUrl?: string; + content?: string; + featured: boolean; + category: string; + publishedAt?: string; +} + +interface Event { + result?: PressItem; + params?: { + data?: Partial; + where?: { id?: number }; + }; +} + +/** + * Converts HTML content to markdown-like format + */ +function htmlToMarkdown(html: string): string { + if (!html) return ''; + + // Basic HTML to Markdown conversion + return html + .replace(/]*>(.*?)<\/h1>/gi, '# $1\n\n') + .replace(/]*>(.*?)<\/h2>/gi, '## $1\n\n') + .replace(/]*>(.*?)<\/h3>/gi, '### $1\n\n') + .replace(/]*>(.*?)<\/p>/gi, '$1\n\n') + .replace(/]*>(.*?)<\/strong>/gi, '**$1**') + .replace(/]*>(.*?)<\/b>/gi, '**$1**') + .replace(/]*>(.*?)<\/em>/gi, '*$1*') + .replace(/]*>(.*?)<\/i>/gi, '*$1*') + .replace(/]*href="([^"]*)"[^>]*>(.*?)<\/a>/gi, '[$2]($1)') + .replace(//gi, '\n') + .replace(/<[^>]+>/g, '') // Remove remaining HTML tags + .trim(); +} + +/** + * Generates MDX file content from press item data + */ +function generateMDX(item: PressItem): string { + const frontmatter = { + title: item.title, + description: item.description, + publishDate: item.publishDate, + slug: item.slug, + ...(item.publication && { publication: item.publication }), + ...(item.publicationLogo && { publicationLogo: item.publicationLogo }), + ...(item.externalUrl && { externalUrl: item.externalUrl }), + featured: item.featured, + category: item.category, + }; + + const yamlFrontmatter = Object.entries(frontmatter) + .map(([key, value]) => { + if (typeof value === 'boolean') { + return `${key}: ${value}`; + } else if (typeof value === 'string') { + // Escape quotes in strings and wrap in quotes if contains special chars + const escaped = value.replace(/"/g, '\\"'); + return `${key}: "${escaped}"`; + } + return `${key}: ${value}`; + }) + .join('\n'); + + const content = item.content ? htmlToMarkdown(item.content) : ''; + + return `---\n${yamlFrontmatter}\n---\n\n${content}\n`; +} + +/** + * Writes MDX file to the file system + */ +async function writeMDXFile(item: PressItem): Promise { + const outputPath = process.env.MDX_OUTPUT_PATH || '../src/content/press'; + // Go up from dist/src/api/press-item/content-types/press-item/ to cms root, then to project root + const baseDir = path.resolve(__dirname, '../../../../../../', outputPath); + + // Ensure directory exists + if (!fs.existsSync(baseDir)) { + fs.mkdirSync(baseDir, { recursive: true }); + } + + const filename = `${item.slug}.mdx`; + const filepath = path.join(baseDir, filename); + const mdxContent = generateMDX(item); + + fs.writeFileSync(filepath, mdxContent, 'utf-8'); + console.log(`✅ Generated MDX file: ${filepath}`); +} + +/** + * Deletes MDX file from the file system + */ +async function deleteMDXFile(slug: string): Promise { + const outputPath = process.env.MDX_OUTPUT_PATH || '../src/content/press'; + // Go up from dist/src/api/press-item/content-types/press-item/ to cms root, then to project root + const baseDir = path.resolve(__dirname, '../../../../../../', outputPath); + const filename = `${slug}.mdx`; + const filepath = path.join(baseDir, filename); + + if (fs.existsSync(filepath)) { + fs.unlinkSync(filepath); + console.log(`🗑️ Deleted MDX file: ${filepath}`); + } +} + +export default { + async afterCreate(event: Event) { + const { result } = event; + if (result && result.publishedAt) { + await writeMDXFile(result); + } + }, + + async afterUpdate(event: Event) { + const { result } = event; + if (result) { + if (result.publishedAt) { + await writeMDXFile(result); + } else { + // If unpublished, delete the MDX file + await deleteMDXFile(result.slug); + } + } + }, + + async afterDelete(event: Event) { + const { result } = event; + if (result) { + await deleteMDXFile(result.slug); + } + }, +}; diff --git a/cms/src/api/press-item/content-types/press-item/schema.json b/cms/src/api/press-item/content-types/press-item/schema.json new file mode 100644 index 00000000..cbb363d6 --- /dev/null +++ b/cms/src/api/press-item/content-types/press-item/schema.json @@ -0,0 +1,66 @@ +{ + "kind": "collectionType", + "collectionName": "press_items", + "info": { + "singularName": "press-item", + "pluralName": "press-items", + "displayName": "Press Item", + "description": "Press releases and media mentions" + }, + "options": { + "draftAndPublish": true + }, + "pluginOptions": {}, + "attributes": { + "title": { + "type": "string", + "required": true, + "maxLength": 255 + }, + "description": { + "type": "text", + "required": true + }, + "publishDate": { + "type": "date", + "required": true + }, + "slug": { + "type": "uid", + "targetField": "title", + "required": true + }, + "publication": { + "type": "string", + "required": false, + "maxLength": 255 + }, + "publicationLogo": { + "type": "string", + "required": false + }, + "externalUrl": { + "type": "string", + "required": false + }, + "content": { + "type": "richtext", + "required": false, + "pluginOptions": { + "i18n": { + "localized": false + } + }, + "customField": "plugin::ckeditor.CKEditor" + }, + "featured": { + "type": "boolean", + "default": false + }, + "category": { + "type": "enumeration", + "enum": ["press-release", "media-mention", "announcement"], + "default": "media-mention" + } + } +} diff --git a/cms/src/api/press-item/controllers/press-item.ts b/cms/src/api/press-item/controllers/press-item.ts new file mode 100644 index 00000000..ee055d83 --- /dev/null +++ b/cms/src/api/press-item/controllers/press-item.ts @@ -0,0 +1,7 @@ +/** + * press-item controller + */ + +import { factories } from '@strapi/strapi' + +export default factories.createCoreController('api::press-item.press-item'); diff --git a/cms/src/api/press-item/routes/press-item.ts b/cms/src/api/press-item/routes/press-item.ts new file mode 100644 index 00000000..70d9b5cf --- /dev/null +++ b/cms/src/api/press-item/routes/press-item.ts @@ -0,0 +1,7 @@ +/** + * press-item router + */ + +import { factories } from '@strapi/strapi'; + +export default factories.createCoreRouter('api::press-item.press-item'); diff --git a/cms/src/api/press-item/services/press-item.ts b/cms/src/api/press-item/services/press-item.ts new file mode 100644 index 00000000..9e4e43c1 --- /dev/null +++ b/cms/src/api/press-item/services/press-item.ts @@ -0,0 +1,7 @@ +/** + * press-item service + */ + +import { factories } from '@strapi/strapi'; + +export default factories.createCoreService('api::press-item.press-item'); diff --git a/cms/src/index.ts b/cms/src/index.ts new file mode 100644 index 00000000..de7f12bc --- /dev/null +++ b/cms/src/index.ts @@ -0,0 +1,205 @@ +import fs from 'fs'; +import path from 'path'; + +function copySchemas() { + const srcDir = path.join(__dirname, '../../src'); + const destDir = path.join(__dirname); + + function copyDir(src: string, dest: string) { + if (!fs.existsSync(dest)) { + fs.mkdirSync(dest, { recursive: true }); + } + + const entries = fs.readdirSync(src, { withFileTypes: true }); + + for (const entry of entries) { + const srcPath = path.join(src, entry.name); + const destPath = path.join(dest, entry.name); + + if (entry.isDirectory()) { + copyDir(srcPath, destPath); + } else if (entry.name.endsWith('.json')) { + fs.copyFileSync(srcPath, destPath); + } + } + } + + try { + copyDir(srcDir, destDir); + console.log('✅ Schema files copied successfully'); + } catch (error) { + console.error('❌ Error copying schema files:', error); + } +} + +/** + * Configure pretty labels for field names in the admin panel. + * This updates the content-manager metadata stored in the database. + */ +async function configureFieldLabels(strapi: any) { + // Map of content type UIDs to their field label configurations + // All fields get human-readable labels for better UX + const labelConfigs: Record> = { + 'api::blog-post.blog-post': { + title: 'Title', + description: 'Description', + slug: 'URL Slug', + date: 'Publish Date', + lang: 'Language', + featuredImage: 'Featured Image', + ogImageUrl: 'OG Image URL', + content: 'Content', + createdAt: 'Created At', + updatedAt: 'Updated At', + publishedAt: 'Published At', + }, + 'api::press-item.press-item': { + title: 'Title', + description: 'Description', + publishDate: 'Publish Date', + slug: 'URL Slug', + publication: 'Publication Name', + publicationLogo: 'Publication Logo URL', + externalUrl: 'External URL', + content: 'Content', + featured: 'Featured', + category: 'Category', + createdAt: 'Created At', + updatedAt: 'Updated At', + publishedAt: 'Published At', + }, + 'api::grant-track.grant-track': { + name: 'Grant Name', + amount: 'Grant Amount', + description: 'Description', + order: 'Display Order', + createdAt: 'Created At', + updatedAt: 'Updated At', + publishedAt: 'Published At', + }, + 'api::info-item.info-item': { + title: 'Title', + content: 'Content', + order: 'Display Order', + createdAt: 'Created At', + updatedAt: 'Updated At', + publishedAt: 'Published At', + }, + 'api::financial-services-page.financial-services-page': { + heroTitle: 'Hero Title', + heroDescription: 'Hero Description', + introText: 'Introduction Text', + applicationNotice: 'Application Notice', + ctaTitle: 'CTA Title', + ctaDescription: 'CTA Description', + ctaEmailLabel: 'Email Button Label', + ctaSubscribeLabel: 'Subscribe Button Label', + createdAt: 'Created At', + updatedAt: 'Updated At', + publishedAt: 'Published At', + }, + }; + + for (const [uid, labels] of Object.entries(labelConfigs)) { + if (Object.keys(labels).length === 0) continue; + + try { + // Get the content-manager plugin service + const contentManagerService = strapi.plugin('content-manager')?.service('content-types'); + if (!contentManagerService) continue; + + // Get current configuration + const configuration = await contentManagerService.findConfiguration({ uid }); + if (!configuration?.metadatas) continue; + + let needsUpdate = false; + const updatedMetadatas = { ...configuration.metadatas }; + + for (const [fieldName, label] of Object.entries(labels)) { + if (updatedMetadatas[fieldName]) { + const currentEditLabel = updatedMetadatas[fieldName]?.edit?.label; + + // Update if label is default (same as field name, case-insensitive), empty, or not set + const isDefaultLabel = !currentEditLabel || + currentEditLabel === fieldName || + currentEditLabel.toLowerCase() === fieldName.toLowerCase(); + + if (isDefaultLabel && currentEditLabel !== label) { + updatedMetadatas[fieldName] = { + ...updatedMetadatas[fieldName], + edit: { + ...updatedMetadatas[fieldName]?.edit, + label, + }, + list: { + ...updatedMetadatas[fieldName]?.list, + label, + }, + }; + needsUpdate = true; + } + } + } + + if (needsUpdate) { + await contentManagerService.updateConfiguration( + { uid }, + { metadatas: updatedMetadatas } + ); + strapi.log.info(`✅ Updated field labels for ${uid}`); + } + } catch (error) { + // Log but don't fail - configuration might not exist yet + strapi.log.debug(`Could not update labels for ${uid}: ${error.message}`); + } + } +} + +export default { + /** + * An asynchronous register function that runs before + * your application is initialized. + * + * This gives you an opportunity to extend code. + */ + register(/* { strapi } */) { + // Copy schema JSON files after TypeScript compilation + copySchemas(); + }, + + /** + * An asynchronous bootstrap function that runs before + * your application gets started. + * + * This gives you an opportunity to set up your data model, + * run jobs, or perform some special logic. + */ + async bootstrap({ strapi }) { + // Ensure database directory exists with proper permissions + // Default database path is .tmp/data.db relative to process.cwd() + const dbDir = path.resolve(process.cwd(), '.tmp'); + if (!fs.existsSync(dbDir)) { + fs.mkdirSync(dbDir, { recursive: true, mode: 0o775 }); + } else { + // Ensure directory has write permissions + try { + fs.chmodSync(dbDir, 0o775); + } catch (error) { + // Ignore permission errors if we can't change them + } + } + + // If database file exists, ensure it has write permissions + const dbPath = path.join(dbDir, 'data.db'); + if (fs.existsSync(dbPath)) { + try { + fs.chmodSync(dbPath, 0o664); + } catch (error) { + // Ignore permission errors if we can't change them + } + } + + // Configure pretty field labels for the admin panel + await configureFieldLabels(strapi); + }, +}; diff --git a/cms/strapi-server.js b/cms/strapi-server.js new file mode 100644 index 00000000..3b485595 --- /dev/null +++ b/cms/strapi-server.js @@ -0,0 +1,39 @@ +const fs = require('fs'); +const path = require('path'); + +function copySchemas() { + const srcDir = path.join(__dirname, 'src'); + const destDir = path.join(__dirname, 'dist', 'src'); + + function copyDir(src, dest) { + if (!fs.existsSync(dest)) { + fs.mkdirSync(dest, { recursive: true }); + } + + const entries = fs.readdirSync(src, { withFileTypes: true }); + + for (const entry of entries) { + const srcPath = path.join(src, entry.name); + const destPath = path.join(dest, entry.name); + + if (entry.isDirectory()) { + copyDir(srcPath, destPath); + } else if (entry.name.endsWith('.json')) { + fs.copyFileSync(srcPath, destPath); + } + } + } + + copyDir(srcDir, destDir); + console.log('✅ Schema files copied successfully'); +} + +module.exports = () => { + return { + register() { + // Copy schemas when Strapi initializes + copySchemas(); + }, + bootstrap() {}, + }; +}; diff --git a/cms/tsconfig.json b/cms/tsconfig.json new file mode 100644 index 00000000..dcd4a620 --- /dev/null +++ b/cms/tsconfig.json @@ -0,0 +1,28 @@ +{ + "compilerOptions": { + "target": "ES2020", + "module": "commonjs", + "lib": ["ES2020"], + "outDir": "dist", + "rootDir": ".", + "strict": false, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "moduleResolution": "node", + "resolveJsonModule": true, + "allowJs": true + }, + "include": [ + "src/**/*.ts", + "src/**/*.json", + "config/**/*.ts" + ], + "exclude": [ + "node_modules/**", + "build/**", + "dist/**", + ".cache/**", + ".tmp/**" + ] +} diff --git a/cms/types/generated/components.d.ts b/cms/types/generated/components.d.ts new file mode 100644 index 00000000..41aa5633 --- /dev/null +++ b/cms/types/generated/components.d.ts @@ -0,0 +1,3 @@ +/* + * The app doesn't have any components yet. + */ diff --git a/cms/types/generated/contentTypes.d.ts b/cms/types/generated/contentTypes.d.ts new file mode 100644 index 00000000..ce52f9f4 --- /dev/null +++ b/cms/types/generated/contentTypes.d.ts @@ -0,0 +1,1023 @@ +import type { Schema, Struct } from '@strapi/strapi' + +export interface AdminApiToken extends Struct.CollectionTypeSchema { + collectionName: 'strapi_api_tokens' + info: { + description: '' + displayName: 'Api Token' + name: 'Api Token' + pluralName: 'api-tokens' + singularName: 'api-token' + } + options: { + draftAndPublish: false + } + pluginOptions: { + 'content-manager': { + visible: false + } + 'content-type-builder': { + visible: false + } + } + attributes: { + accessKey: Schema.Attribute.String & + Schema.Attribute.Required & + Schema.Attribute.SetMinMaxLength<{ + minLength: 1 + }> + createdAt: Schema.Attribute.DateTime + createdBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + description: Schema.Attribute.String & + Schema.Attribute.SetMinMaxLength<{ + minLength: 1 + }> & + Schema.Attribute.DefaultTo<''> + encryptedKey: Schema.Attribute.Text & + Schema.Attribute.SetMinMaxLength<{ + minLength: 1 + }> + expiresAt: Schema.Attribute.DateTime + lastUsedAt: Schema.Attribute.DateTime + lifespan: Schema.Attribute.BigInteger + locale: Schema.Attribute.String & Schema.Attribute.Private + localizations: Schema.Attribute.Relation<'oneToMany', 'admin::api-token'> & + Schema.Attribute.Private + name: Schema.Attribute.String & + Schema.Attribute.Required & + Schema.Attribute.Unique & + Schema.Attribute.SetMinMaxLength<{ + minLength: 1 + }> + permissions: Schema.Attribute.Relation< + 'oneToMany', + 'admin::api-token-permission' + > + publishedAt: Schema.Attribute.DateTime + type: Schema.Attribute.Enumeration<['read-only', 'full-access', 'custom']> & + Schema.Attribute.Required & + Schema.Attribute.DefaultTo<'read-only'> + updatedAt: Schema.Attribute.DateTime + updatedBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + } +} + +export interface AdminApiTokenPermission extends Struct.CollectionTypeSchema { + collectionName: 'strapi_api_token_permissions' + info: { + description: '' + displayName: 'API Token Permission' + name: 'API Token Permission' + pluralName: 'api-token-permissions' + singularName: 'api-token-permission' + } + options: { + draftAndPublish: false + } + pluginOptions: { + 'content-manager': { + visible: false + } + 'content-type-builder': { + visible: false + } + } + attributes: { + action: Schema.Attribute.String & + Schema.Attribute.Required & + Schema.Attribute.SetMinMaxLength<{ + minLength: 1 + }> + createdAt: Schema.Attribute.DateTime + createdBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + locale: Schema.Attribute.String & Schema.Attribute.Private + localizations: Schema.Attribute.Relation< + 'oneToMany', + 'admin::api-token-permission' + > & + Schema.Attribute.Private + publishedAt: Schema.Attribute.DateTime + token: Schema.Attribute.Relation<'manyToOne', 'admin::api-token'> + updatedAt: Schema.Attribute.DateTime + updatedBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + } +} + +export interface AdminPermission extends Struct.CollectionTypeSchema { + collectionName: 'admin_permissions' + info: { + description: '' + displayName: 'Permission' + name: 'Permission' + pluralName: 'permissions' + singularName: 'permission' + } + options: { + draftAndPublish: false + } + pluginOptions: { + 'content-manager': { + visible: false + } + 'content-type-builder': { + visible: false + } + } + attributes: { + action: Schema.Attribute.String & + Schema.Attribute.Required & + Schema.Attribute.SetMinMaxLength<{ + minLength: 1 + }> + actionParameters: Schema.Attribute.JSON & Schema.Attribute.DefaultTo<{}> + conditions: Schema.Attribute.JSON & Schema.Attribute.DefaultTo<[]> + createdAt: Schema.Attribute.DateTime + createdBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + locale: Schema.Attribute.String & Schema.Attribute.Private + localizations: Schema.Attribute.Relation<'oneToMany', 'admin::permission'> & + Schema.Attribute.Private + properties: Schema.Attribute.JSON & Schema.Attribute.DefaultTo<{}> + publishedAt: Schema.Attribute.DateTime + role: Schema.Attribute.Relation<'manyToOne', 'admin::role'> + subject: Schema.Attribute.String & + Schema.Attribute.SetMinMaxLength<{ + minLength: 1 + }> + updatedAt: Schema.Attribute.DateTime + updatedBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + } +} + +export interface AdminRole extends Struct.CollectionTypeSchema { + collectionName: 'admin_roles' + info: { + description: '' + displayName: 'Role' + name: 'Role' + pluralName: 'roles' + singularName: 'role' + } + options: { + draftAndPublish: false + } + pluginOptions: { + 'content-manager': { + visible: false + } + 'content-type-builder': { + visible: false + } + } + attributes: { + code: Schema.Attribute.String & + Schema.Attribute.Required & + Schema.Attribute.Unique & + Schema.Attribute.SetMinMaxLength<{ + minLength: 1 + }> + createdAt: Schema.Attribute.DateTime + createdBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + description: Schema.Attribute.String + locale: Schema.Attribute.String & Schema.Attribute.Private + localizations: Schema.Attribute.Relation<'oneToMany', 'admin::role'> & + Schema.Attribute.Private + name: Schema.Attribute.String & + Schema.Attribute.Required & + Schema.Attribute.Unique & + Schema.Attribute.SetMinMaxLength<{ + minLength: 1 + }> + permissions: Schema.Attribute.Relation<'oneToMany', 'admin::permission'> + publishedAt: Schema.Attribute.DateTime + updatedAt: Schema.Attribute.DateTime + updatedBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + users: Schema.Attribute.Relation<'manyToMany', 'admin::user'> + } +} + +export interface AdminSession extends Struct.CollectionTypeSchema { + collectionName: 'strapi_sessions' + info: { + description: 'Session Manager storage' + displayName: 'Session' + name: 'Session' + pluralName: 'sessions' + singularName: 'session' + } + options: { + draftAndPublish: false + } + pluginOptions: { + 'content-manager': { + visible: false + } + 'content-type-builder': { + visible: false + } + i18n: { + localized: false + } + } + attributes: { + absoluteExpiresAt: Schema.Attribute.DateTime & Schema.Attribute.Private + childId: Schema.Attribute.String & Schema.Attribute.Private + createdAt: Schema.Attribute.DateTime + createdBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + deviceId: Schema.Attribute.String & + Schema.Attribute.Required & + Schema.Attribute.Private + expiresAt: Schema.Attribute.DateTime & + Schema.Attribute.Required & + Schema.Attribute.Private + locale: Schema.Attribute.String & Schema.Attribute.Private + localizations: Schema.Attribute.Relation<'oneToMany', 'admin::session'> & + Schema.Attribute.Private + origin: Schema.Attribute.String & + Schema.Attribute.Required & + Schema.Attribute.Private + publishedAt: Schema.Attribute.DateTime + sessionId: Schema.Attribute.String & + Schema.Attribute.Required & + Schema.Attribute.Private & + Schema.Attribute.Unique + status: Schema.Attribute.String & Schema.Attribute.Private + type: Schema.Attribute.String & Schema.Attribute.Private + updatedAt: Schema.Attribute.DateTime + updatedBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + userId: Schema.Attribute.String & + Schema.Attribute.Required & + Schema.Attribute.Private + } +} + +export interface AdminTransferToken extends Struct.CollectionTypeSchema { + collectionName: 'strapi_transfer_tokens' + info: { + description: '' + displayName: 'Transfer Token' + name: 'Transfer Token' + pluralName: 'transfer-tokens' + singularName: 'transfer-token' + } + options: { + draftAndPublish: false + } + pluginOptions: { + 'content-manager': { + visible: false + } + 'content-type-builder': { + visible: false + } + } + attributes: { + accessKey: Schema.Attribute.String & + Schema.Attribute.Required & + Schema.Attribute.SetMinMaxLength<{ + minLength: 1 + }> + createdAt: Schema.Attribute.DateTime + createdBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + description: Schema.Attribute.String & + Schema.Attribute.SetMinMaxLength<{ + minLength: 1 + }> & + Schema.Attribute.DefaultTo<''> + expiresAt: Schema.Attribute.DateTime + lastUsedAt: Schema.Attribute.DateTime + lifespan: Schema.Attribute.BigInteger + locale: Schema.Attribute.String & Schema.Attribute.Private + localizations: Schema.Attribute.Relation< + 'oneToMany', + 'admin::transfer-token' + > & + Schema.Attribute.Private + name: Schema.Attribute.String & + Schema.Attribute.Required & + Schema.Attribute.Unique & + Schema.Attribute.SetMinMaxLength<{ + minLength: 1 + }> + permissions: Schema.Attribute.Relation< + 'oneToMany', + 'admin::transfer-token-permission' + > + publishedAt: Schema.Attribute.DateTime + updatedAt: Schema.Attribute.DateTime + updatedBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + } +} + +export interface AdminTransferTokenPermission + extends Struct.CollectionTypeSchema { + collectionName: 'strapi_transfer_token_permissions' + info: { + description: '' + displayName: 'Transfer Token Permission' + name: 'Transfer Token Permission' + pluralName: 'transfer-token-permissions' + singularName: 'transfer-token-permission' + } + options: { + draftAndPublish: false + } + pluginOptions: { + 'content-manager': { + visible: false + } + 'content-type-builder': { + visible: false + } + } + attributes: { + action: Schema.Attribute.String & + Schema.Attribute.Required & + Schema.Attribute.SetMinMaxLength<{ + minLength: 1 + }> + createdAt: Schema.Attribute.DateTime + createdBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + locale: Schema.Attribute.String & Schema.Attribute.Private + localizations: Schema.Attribute.Relation< + 'oneToMany', + 'admin::transfer-token-permission' + > & + Schema.Attribute.Private + publishedAt: Schema.Attribute.DateTime + token: Schema.Attribute.Relation<'manyToOne', 'admin::transfer-token'> + updatedAt: Schema.Attribute.DateTime + updatedBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + } +} + +export interface AdminUser extends Struct.CollectionTypeSchema { + collectionName: 'admin_users' + info: { + description: '' + displayName: 'User' + name: 'User' + pluralName: 'users' + singularName: 'user' + } + options: { + draftAndPublish: false + } + pluginOptions: { + 'content-manager': { + visible: false + } + 'content-type-builder': { + visible: false + } + } + attributes: { + blocked: Schema.Attribute.Boolean & + Schema.Attribute.Private & + Schema.Attribute.DefaultTo + createdAt: Schema.Attribute.DateTime + createdBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + email: Schema.Attribute.Email & + Schema.Attribute.Required & + Schema.Attribute.Private & + Schema.Attribute.Unique & + Schema.Attribute.SetMinMaxLength<{ + minLength: 6 + }> + firstname: Schema.Attribute.String & + Schema.Attribute.SetMinMaxLength<{ + minLength: 1 + }> + isActive: Schema.Attribute.Boolean & + Schema.Attribute.Private & + Schema.Attribute.DefaultTo + lastname: Schema.Attribute.String & + Schema.Attribute.SetMinMaxLength<{ + minLength: 1 + }> + locale: Schema.Attribute.String & Schema.Attribute.Private + localizations: Schema.Attribute.Relation<'oneToMany', 'admin::user'> & + Schema.Attribute.Private + password: Schema.Attribute.Password & + Schema.Attribute.Private & + Schema.Attribute.SetMinMaxLength<{ + minLength: 6 + }> + preferedLanguage: Schema.Attribute.String + publishedAt: Schema.Attribute.DateTime + registrationToken: Schema.Attribute.String & Schema.Attribute.Private + resetPasswordToken: Schema.Attribute.String & Schema.Attribute.Private + roles: Schema.Attribute.Relation<'manyToMany', 'admin::role'> & + Schema.Attribute.Private + updatedAt: Schema.Attribute.DateTime + updatedBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + username: Schema.Attribute.String + } +} + +export interface ApiBlogPostBlogPost extends Struct.CollectionTypeSchema { + collectionName: 'blog_posts' + info: { + description: 'Engineering blog posts that sync to MDX' + displayName: 'Blog Post' + pluralName: 'blog-posts' + singularName: 'blog-post' + } + options: { + draftAndPublish: true + } + attributes: { + content: Schema.Attribute.RichText & + Schema.Attribute.CustomField< + 'plugin::ckeditor5.CKEditor', + { + preset: 'defaultMarkdown' + } + > + createdAt: Schema.Attribute.DateTime + createdBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + date: Schema.Attribute.Date & Schema.Attribute.Required + description: Schema.Attribute.Text & Schema.Attribute.Required + featuredImage: Schema.Attribute.Media<'images'> + lang: Schema.Attribute.String & + Schema.Attribute.SetMinMaxLength<{ + maxLength: 10 + }> + locale: Schema.Attribute.String & Schema.Attribute.Private + localizations: Schema.Attribute.Relation< + 'oneToMany', + 'api::blog-post.blog-post' + > & + Schema.Attribute.Private + ogImageUrl: Schema.Attribute.String & + Schema.Attribute.SetMinMaxLength<{ + maxLength: 255 + }> + publishedAt: Schema.Attribute.DateTime + slug: Schema.Attribute.UID<'title'> & Schema.Attribute.Required + title: Schema.Attribute.String & + Schema.Attribute.Required & + Schema.Attribute.SetMinMaxLength<{ + maxLength: 255 + }> + updatedAt: Schema.Attribute.DateTime + updatedBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + } +} + +export interface ApiFinancialServicesPageFinancialServicesPage + extends Struct.SingleTypeSchema { + collectionName: 'financial_services_page' + info: { + description: 'Manage the Financial Services landing page content' + displayName: 'Financial Services Page' + pluralName: 'financial-services-pages' + singularName: 'financial-services-page' + } + options: { + draftAndPublish: true + } + attributes: { + applicationNotice: Schema.Attribute.Text + createdAt: Schema.Attribute.DateTime + createdBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + ctaDescription: Schema.Attribute.Text + ctaEmailLabel: Schema.Attribute.String & + Schema.Attribute.DefaultTo<'Contact Us'> + ctaSubscribeLabel: Schema.Attribute.String & + Schema.Attribute.DefaultTo<'Subscribe for Updates'> + ctaTitle: Schema.Attribute.String & + Schema.Attribute.DefaultTo<'Ready to Apply?'> + heroDescription: Schema.Attribute.Text & Schema.Attribute.Required + heroTitle: Schema.Attribute.String & + Schema.Attribute.Required & + Schema.Attribute.DefaultTo<'Unlock payment potential'> + introText: Schema.Attribute.Text & Schema.Attribute.Required + locale: Schema.Attribute.String & Schema.Attribute.Private + localizations: Schema.Attribute.Relation< + 'oneToMany', + 'api::financial-services-page.financial-services-page' + > & + Schema.Attribute.Private + publishedAt: Schema.Attribute.DateTime + updatedAt: Schema.Attribute.DateTime + updatedBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + } +} + +export interface ApiGrantTrackGrantTrack extends Struct.CollectionTypeSchema { + collectionName: 'grant_tracks' + info: { + description: 'Individual grant programs' + displayName: 'Grant Track' + pluralName: 'grant-tracks' + singularName: 'grant-track' + } + options: { + draftAndPublish: true + } + attributes: { + amount: Schema.Attribute.String & Schema.Attribute.Required + createdAt: Schema.Attribute.DateTime + createdBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + description: Schema.Attribute.RichText & Schema.Attribute.Required + locale: Schema.Attribute.String & Schema.Attribute.Private + localizations: Schema.Attribute.Relation< + 'oneToMany', + 'api::grant-track.grant-track' + > & + Schema.Attribute.Private + name: Schema.Attribute.String & Schema.Attribute.Required + order: Schema.Attribute.Integer & Schema.Attribute.DefaultTo<0> + publishedAt: Schema.Attribute.DateTime + updatedAt: Schema.Attribute.DateTime + updatedBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + } +} + +export interface ApiNewsEventNewsEvent extends Struct.CollectionTypeSchema { + collectionName: 'news_events' + info: { + description: 'Events content that syncs to MDX' + displayName: 'Event' + pluralName: 'news-events' + singularName: 'news-event' + } + options: { + draftAndPublish: true + } + attributes: { + content: Schema.Attribute.RichText & + Schema.Attribute.CustomField< + 'plugin::ckeditor5.CKEditor', + { + preset: 'defaultMarkdown' + } + > + createdAt: Schema.Attribute.DateTime + createdBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + locale: Schema.Attribute.String & Schema.Attribute.Private + localizations: Schema.Attribute.Relation< + 'oneToMany', + 'api::news-event.news-event' + > & + Schema.Attribute.Private + order: Schema.Attribute.Integer & Schema.Attribute.DefaultTo<0> + publishedAt: Schema.Attribute.DateTime + slug: Schema.Attribute.UID<'title'> & Schema.Attribute.Required + title: Schema.Attribute.String & Schema.Attribute.Required + updatedAt: Schema.Attribute.DateTime + updatedBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + } +} + +export interface ApiPressItemPressItem extends Struct.CollectionTypeSchema { + collectionName: 'press_items' + info: { + description: 'Press releases and media mentions' + displayName: 'Press Item' + pluralName: 'press-items' + singularName: 'press-item' + } + options: { + draftAndPublish: true + } + attributes: { + category: Schema.Attribute.Enumeration< + ['press-release', 'media-mention', 'announcement'] + > & + Schema.Attribute.DefaultTo<'media-mention'> + content: Schema.Attribute.RichText & + Schema.Attribute.CustomField<'plugin::ckeditor.CKEditor'> & + Schema.Attribute.SetPluginOptions<{ + i18n: { + localized: false + } + }> + createdAt: Schema.Attribute.DateTime + createdBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + description: Schema.Attribute.Text & Schema.Attribute.Required + externalUrl: Schema.Attribute.String + featured: Schema.Attribute.Boolean & Schema.Attribute.DefaultTo + locale: Schema.Attribute.String & Schema.Attribute.Private + localizations: Schema.Attribute.Relation< + 'oneToMany', + 'api::press-item.press-item' + > & + Schema.Attribute.Private + publication: Schema.Attribute.String & + Schema.Attribute.SetMinMaxLength<{ + maxLength: 255 + }> + publicationLogo: Schema.Attribute.String + publishDate: Schema.Attribute.Date & Schema.Attribute.Required + publishedAt: Schema.Attribute.DateTime + slug: Schema.Attribute.UID<'title'> & Schema.Attribute.Required + title: Schema.Attribute.String & + Schema.Attribute.Required & + Schema.Attribute.SetMinMaxLength<{ + maxLength: 255 + }> + updatedAt: Schema.Attribute.DateTime + updatedBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + } +} + +export interface PluginContentReleasesRelease + extends Struct.CollectionTypeSchema { + collectionName: 'strapi_releases' + info: { + displayName: 'Release' + pluralName: 'releases' + singularName: 'release' + } + options: { + draftAndPublish: false + } + pluginOptions: { + 'content-manager': { + visible: false + } + 'content-type-builder': { + visible: false + } + } + attributes: { + actions: Schema.Attribute.Relation< + 'oneToMany', + 'plugin::content-releases.release-action' + > + createdAt: Schema.Attribute.DateTime + createdBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + locale: Schema.Attribute.String & Schema.Attribute.Private + localizations: Schema.Attribute.Relation< + 'oneToMany', + 'plugin::content-releases.release' + > & + Schema.Attribute.Private + name: Schema.Attribute.String & Schema.Attribute.Required + publishedAt: Schema.Attribute.DateTime + releasedAt: Schema.Attribute.DateTime + scheduledAt: Schema.Attribute.DateTime + status: Schema.Attribute.Enumeration< + ['ready', 'blocked', 'failed', 'done', 'empty'] + > & + Schema.Attribute.Required + timezone: Schema.Attribute.String + updatedAt: Schema.Attribute.DateTime + updatedBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + } +} + +export interface PluginContentReleasesReleaseAction + extends Struct.CollectionTypeSchema { + collectionName: 'strapi_release_actions' + info: { + displayName: 'Release Action' + pluralName: 'release-actions' + singularName: 'release-action' + } + options: { + draftAndPublish: false + } + pluginOptions: { + 'content-manager': { + visible: false + } + 'content-type-builder': { + visible: false + } + } + attributes: { + contentType: Schema.Attribute.String & Schema.Attribute.Required + createdAt: Schema.Attribute.DateTime + createdBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + entryDocumentId: Schema.Attribute.String + isEntryValid: Schema.Attribute.Boolean + locale: Schema.Attribute.String & Schema.Attribute.Private + localizations: Schema.Attribute.Relation< + 'oneToMany', + 'plugin::content-releases.release-action' + > & + Schema.Attribute.Private + publishedAt: Schema.Attribute.DateTime + release: Schema.Attribute.Relation< + 'manyToOne', + 'plugin::content-releases.release' + > + type: Schema.Attribute.Enumeration<['publish', 'unpublish']> & + Schema.Attribute.Required + updatedAt: Schema.Attribute.DateTime + updatedBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + } +} + +export interface PluginI18NLocale extends Struct.CollectionTypeSchema { + collectionName: 'i18n_locale' + info: { + collectionName: 'locales' + description: '' + displayName: 'Locale' + pluralName: 'locales' + singularName: 'locale' + } + options: { + draftAndPublish: false + } + pluginOptions: { + 'content-manager': { + visible: false + } + 'content-type-builder': { + visible: false + } + } + attributes: { + code: Schema.Attribute.String & Schema.Attribute.Unique + createdAt: Schema.Attribute.DateTime + createdBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + locale: Schema.Attribute.String & Schema.Attribute.Private + localizations: Schema.Attribute.Relation< + 'oneToMany', + 'plugin::i18n.locale' + > & + Schema.Attribute.Private + name: Schema.Attribute.String & + Schema.Attribute.SetMinMax< + { + max: 50 + min: 1 + }, + number + > + publishedAt: Schema.Attribute.DateTime + updatedAt: Schema.Attribute.DateTime + updatedBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + } +} + +export interface PluginReviewWorkflowsWorkflow + extends Struct.CollectionTypeSchema { + collectionName: 'strapi_workflows' + info: { + description: '' + displayName: 'Workflow' + name: 'Workflow' + pluralName: 'workflows' + singularName: 'workflow' + } + options: { + draftAndPublish: false + } + pluginOptions: { + 'content-manager': { + visible: false + } + 'content-type-builder': { + visible: false + } + } + attributes: { + contentTypes: Schema.Attribute.JSON & + Schema.Attribute.Required & + Schema.Attribute.DefaultTo<'[]'> + createdAt: Schema.Attribute.DateTime + createdBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + locale: Schema.Attribute.String & Schema.Attribute.Private + localizations: Schema.Attribute.Relation< + 'oneToMany', + 'plugin::review-workflows.workflow' + > & + Schema.Attribute.Private + name: Schema.Attribute.String & + Schema.Attribute.Required & + Schema.Attribute.Unique + publishedAt: Schema.Attribute.DateTime + stageRequiredToPublish: Schema.Attribute.Relation< + 'oneToOne', + 'plugin::review-workflows.workflow-stage' + > + stages: Schema.Attribute.Relation< + 'oneToMany', + 'plugin::review-workflows.workflow-stage' + > + updatedAt: Schema.Attribute.DateTime + updatedBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + } +} + +export interface PluginReviewWorkflowsWorkflowStage + extends Struct.CollectionTypeSchema { + collectionName: 'strapi_workflows_stages' + info: { + description: '' + displayName: 'Stages' + name: 'Workflow Stage' + pluralName: 'workflow-stages' + singularName: 'workflow-stage' + } + options: { + draftAndPublish: false + version: '1.1.0' + } + pluginOptions: { + 'content-manager': { + visible: false + } + 'content-type-builder': { + visible: false + } + } + attributes: { + color: Schema.Attribute.String & Schema.Attribute.DefaultTo<'#4945FF'> + createdAt: Schema.Attribute.DateTime + createdBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + locale: Schema.Attribute.String & Schema.Attribute.Private + localizations: Schema.Attribute.Relation< + 'oneToMany', + 'plugin::review-workflows.workflow-stage' + > & + Schema.Attribute.Private + name: Schema.Attribute.String + permissions: Schema.Attribute.Relation<'manyToMany', 'admin::permission'> + publishedAt: Schema.Attribute.DateTime + updatedAt: Schema.Attribute.DateTime + updatedBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + workflow: Schema.Attribute.Relation< + 'manyToOne', + 'plugin::review-workflows.workflow' + > + } +} + +export interface PluginUploadFile extends Struct.CollectionTypeSchema { + collectionName: 'files' + info: { + description: '' + displayName: 'File' + pluralName: 'files' + singularName: 'file' + } + options: { + draftAndPublish: false + } + pluginOptions: { + 'content-manager': { + visible: false + } + 'content-type-builder': { + visible: false + } + } + attributes: { + alternativeText: Schema.Attribute.String + caption: Schema.Attribute.String + createdAt: Schema.Attribute.DateTime + createdBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + ext: Schema.Attribute.String + folder: Schema.Attribute.Relation<'manyToOne', 'plugin::upload.folder'> & + Schema.Attribute.Private + folderPath: Schema.Attribute.String & + Schema.Attribute.Required & + Schema.Attribute.Private & + Schema.Attribute.SetMinMaxLength<{ + minLength: 1 + }> + formats: Schema.Attribute.JSON + hash: Schema.Attribute.String & Schema.Attribute.Required + height: Schema.Attribute.Integer + locale: Schema.Attribute.String & Schema.Attribute.Private + localizations: Schema.Attribute.Relation< + 'oneToMany', + 'plugin::upload.file' + > & + Schema.Attribute.Private + mime: Schema.Attribute.String & Schema.Attribute.Required + name: Schema.Attribute.String & Schema.Attribute.Required + previewUrl: Schema.Attribute.String + provider: Schema.Attribute.String & Schema.Attribute.Required + provider_metadata: Schema.Attribute.JSON + publishedAt: Schema.Attribute.DateTime + related: Schema.Attribute.Relation<'morphToMany'> + size: Schema.Attribute.Decimal & Schema.Attribute.Required + updatedAt: Schema.Attribute.DateTime + updatedBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + url: Schema.Attribute.String & Schema.Attribute.Required + width: Schema.Attribute.Integer + } +} + +export interface PluginUploadFolder extends Struct.CollectionTypeSchema { + collectionName: 'upload_folders' + info: { + displayName: 'Folder' + pluralName: 'folders' + singularName: 'folder' + } + options: { + draftAndPublish: false + } + pluginOptions: { + 'content-manager': { + visible: false + } + 'content-type-builder': { + visible: false + } + } + attributes: { + children: Schema.Attribute.Relation<'oneToMany', 'plugin::upload.folder'> + createdAt: Schema.Attribute.DateTime + createdBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + files: Schema.Attribute.Relation<'oneToMany', 'plugin::upload.file'> + locale: Schema.Attribute.String & Schema.Attribute.Private + localizations: Schema.Attribute.Relation< + 'oneToMany', + 'plugin::upload.folder' + > & + Schema.Attribute.Private + name: Schema.Attribute.String & + Schema.Attribute.Required & + Schema.Attribute.SetMinMaxLength<{ + minLength: 1 + }> + parent: Schema.Attribute.Relation<'manyToOne', 'plugin::upload.folder'> + path: Schema.Attribute.String & + Schema.Attribute.Required & + Schema.Attribute.SetMinMaxLength<{ + minLength: 1 + }> + pathId: Schema.Attribute.Integer & + Schema.Attribute.Required & + Schema.Attribute.Unique + publishedAt: Schema.Attribute.DateTime + updatedAt: Schema.Attribute.DateTime + updatedBy: Schema.Attribute.Relation<'oneToOne', 'admin::user'> & + Schema.Attribute.Private + } +} + +declare module '@strapi/strapi' { + export module Public { + export interface ContentTypeSchemas { + 'admin::api-token': AdminApiToken + 'admin::api-token-permission': AdminApiTokenPermission + 'admin::permission': AdminPermission + 'admin::role': AdminRole + 'admin::session': AdminSession + 'admin::transfer-token': AdminTransferToken + 'admin::transfer-token-permission': AdminTransferTokenPermission + 'admin::user': AdminUser + 'api::blog-post.blog-post': ApiBlogPostBlogPost + 'api::financial-services-page.financial-services-page': ApiFinancialServicesPageFinancialServicesPage + 'api::grant-track.grant-track': ApiGrantTrackGrantTrack + 'api::news-event.news-event': ApiNewsEventNewsEvent + 'api::press-item.press-item': ApiPressItemPressItem + 'plugin::content-releases.release': PluginContentReleasesRelease + 'plugin::content-releases.release-action': PluginContentReleasesReleaseAction + 'plugin::i18n.locale': PluginI18NLocale + 'plugin::review-workflows.workflow': PluginReviewWorkflowsWorkflow + 'plugin::review-workflows.workflow-stage': PluginReviewWorkflowsWorkflowStage + 'plugin::upload.file': PluginUploadFile + 'plugin::upload.folder': PluginUploadFolder + } + } +} diff --git a/src/components/Header.astro b/src/components/Header.astro index 9cc06609..0741c758 100644 --- a/src/components/Header.astro +++ b/src/components/Header.astro @@ -6,7 +6,7 @@ import DevelopersLogo from './logos/DevelopersLogo.astro' ---
- + diff --git a/src/components/blog/Pagination.astro b/src/components/blog/Pagination.astro index 7682204c..9ef5c4b2 100644 --- a/src/components/blog/Pagination.astro +++ b/src/components/blog/Pagination.astro @@ -28,7 +28,7 @@ const paginationList = Array.from({ length }, (_, i) => i + 1) { paginationList.map((num) => ( {num} diff --git a/src/components/pages/Footer.astro b/src/components/pages/Footer.astro index 856d137d..31a1dfed 100644 --- a/src/components/pages/Footer.astro +++ b/src/components/pages/Footer.astro @@ -7,31 +7,31 @@ const currentYear = new Date().getFullYear()
  • Mastadon
  • TwitterTwitter
  • SlackSlack
  • GithubGithub
  • Linkedin diff --git a/src/components/pages/FoundationHeader.astro b/src/components/pages/FoundationHeader.astro index b9d95098..c0ab124f 100644 --- a/src/components/pages/FoundationHeader.astro +++ b/src/components/pages/FoundationHeader.astro @@ -32,21 +32,6 @@ import FoundationLogo from '../logos/FoundationLogo.astro' >Foundation @@ -586,7 +498,7 @@ import FoundationLogo from '../logos/FoundationLogo.astro' currentPath = currentPath.slice(0, -1) } - const activeSections = ['/developers/blog'] + const activeSections = ['/blog'] devLinks.forEach((devLink) => { if (devLink instanceof HTMLAnchorElement) { const linkPath = devLink.pathname diff --git a/src/components/pages/LanderHeader.astro b/src/components/pages/LanderHeader.astro index 80cca758..b1094324 100644 --- a/src/components/pages/LanderHeader.astro +++ b/src/components/pages/LanderHeader.astro @@ -4,7 +4,7 @@ import DevelopersLogo from '../logos/DevelopersLogo.astro'
    - + @@ -19,7 +19,7 @@ import DevelopersLogo from '../logos/DevelopersLogo.astro' Interledger Github diff --git a/src/content.config.ts b/src/content.config.ts index 20afe03f..75dabf7e 100644 --- a/src/content.config.ts +++ b/src/content.config.ts @@ -12,14 +12,65 @@ const blogCollection = defineCollection({ date: z.date(), lang: z.string().optional(), image: z.string().optional(), - tags: z.array(z.string()), - authors: z.array(z.string()), - author_urls: z.array(z.string()) + ogImageUrl: z.string().optional() + }) +}) + +const pressCollection = defineCollection({ + loader: glob({ pattern: '**/[^_]*.{md,mdx}', base: './src/content/press' }), + schema: z.object({ + title: z.string(), + description: z.string(), + publishDate: z.string(), + slug: z.string(), + publication: z.string().optional(), + publicationLogo: z.string().optional(), + externalUrl: z.string().optional(), + featured: z.boolean().default(false), + category: z.enum(['press-release', 'media-mention', 'announcement']).default('media-mention') + }) +}) + +const grantTrackCollection = defineCollection({ + loader: glob({ pattern: '**/[^_]*.{md,mdx}', base: './src/content/grant-tracks' }), + schema: z.object({ + name: z.string(), + amount: z.string(), + description: z.string(), + order: z.number().default(0) + }) +}) + +const financialServicesPageCollection = defineCollection({ + loader: glob({ + pattern: '**/[^_]*.{md,mdx}', + base: './src/content/financial-services' + }), + schema: z.object({ + heroTitle: z.string(), + heroDescription: z.string(), + introText: z.string(), + ctaTitle: z.string(), + ctaDescription: z.string().optional(), + ctaEmailLabel: z.string(), + ctaSubscribeLabel: z.string() + }) +}) + +const eventsCollection = defineCollection({ + loader: glob({ pattern: '**/[^_]*.{md,mdx}', base: './src/content/events' }), + schema: z.object({ + title: z.string(), + order: z.number().default(0) }) }) export const collections = { docs: defineCollection({ loader: docsLoader(), schema: docsSchema() }), i18n: defineCollection({ loader: i18nLoader(), schema: i18nSchema() }), - blog: blogCollection + blog: blogCollection, + press: pressCollection, + events: eventsCollection, + 'grant-tracks': grantTrackCollection, + 'financial-services': financialServicesPageCollection } diff --git a/src/content/blog/2018-01-29-simplifying-interledger-the-graveyard-of-possible-protocol-features.md b/src/content/blog/2018-01-29-simplifying-interledger-the-graveyard-of-possible-protocol-features.md deleted file mode 100644 index 1f551ce4..00000000 --- a/src/content/blog/2018-01-29-simplifying-interledger-the-graveyard-of-possible-protocol-features.md +++ /dev/null @@ -1,139 +0,0 @@ ---- -title: 'Simplifying Interledger: The Graveyard of Possible Protocol Features' -description: As the development of the Interledger Protocol (ILP) nears completion, I thought we should take a moment to remember some of the many core protocol features we’ve killed off along the way. -date: 2018-01-29 -slug: simplifying-interledger-the-graveyard-of-possible-protocol-features -authors: - - Evan Schwartz -author_urls: - - https://www.linkedin.com/in/evanmarkschwartz/ -external_url: https://medium.com/interledger-blog/simplifying-interledger-the-graveyard-of-possible-protocol-features-b35bf67439be -tags: - - Interledger - - Interoperability ---- - -As the development of the [Interledger Protocol](https://interledger.org/) (ILP) nears completion, I thought we should take a moment to remember some of the many core protocol features we’ve killed off along the way. - -
    - Lone tree on graveyard -
    Photo by Ashim D’Silva on Unsplash
    -
    - -These were 12 promising features, and countless hours were spent perfecting them. But they were sacrificed for simplicity’s sake. Rest in peace, old friends. - -Getting people to agree on any standard is notoriously difficult, so we have worked to make Interledger [as simple as possible](https://medium.com/@justmoon/blockchain-advocates-must-learn-the-law-of-standards-8f3116ccdc5f). We have often repeated the mantra that the core protocol would only be finished when there was nothing more to take out — and little left to debate. Today, we celebrate the life and death of these features for bringing us closer to payments interoperability. - -_(Curious which features are left? Look for upcoming posts on Interledger V4!)_ - -## 1\. The King: One Ledger to Rule Them All - -Died: *November, 2014*. Age: *Timeless.* -Cause of Death: *The world will never agree on a single ledger.* - -Every payment network wants to be king. From traditional networks like Visa and SWIFT to blockchains like Bitcoin, Ripple, Stellar, and Cosmos, many have tried to enable payments “interoperability” by convincing others to connect through their network. But as long as each provider wants to own the network, we’ll end up with the fragmented payment landscape we see today. - -The Interledger project began with the realization that the world will never agree to use a single payment network — whether centralized or decentralized, blockchain or traditional bank ledger. There will be a proliferation of networks. What we know now is there is a need to connect all of these payment networks with an [_internetworking_](https://en.wikipedia.org/wiki/Internetworking) protocol that is not tied to any one company, currency, or network. - -## 2\. The Notary: Fully Atomic Payments - -Died: [_June, 2016_](https://github.com/interledger/rfcs/issues/28). Age: *2 years*. -Cause of Death: *Trust isn’t universal.* - -If we could not agree to use a single ledger, maybe we could replicate the benefits of having all transactions within one system, but across multiple ledgers. The idea of “Atomic Mode,” as described in the [Interledger whitepaper](https://interledger.org/interledger.pdf), was to use a group of “notaries” or validators to ensure that transfers on multiple systems would be atomic, meaning they would be executed or rolled back together. Senders and intermediary connectors would first put funds on hold in the first part of a two-phase commit. Notaries would then decide whether the payment succeeded or failed, similar to blockchain validators or miners, but chosen on a per-transaction basis. - -Atomic mode provides some important benefits, but it only works if all parties in a payment share a commonly-trusted set of notaries or blockchain. Unfortunately, finding overlapping trust amongst groups using different ledgers and spread across the entire internet just isn’t likely. Atomic Mode would work best in pre-defined groups with fixed notaries, rather than as a solution for general interoperability. We prioritized the other mode, called Universal. Intermediaries take some [manageable risk](https://github.com/interledger/rfcs/blob/master/0018-connector-risk-mitigations/0018-connector-risk-mitigations.md#fulfillment-failure) but, the protocol does not require agreement on who to trust, making it more… universal. - -## 3\. The Cashier: Ledger-Generated Receipts - -Died: *December 2014*. Age: *6 months*. -Cause of Death: *Ledgers aren’t meant to understand one another.* - -Without notaries, how would ledgers know when to execute or roll back their transfers? We wanted the connectors to get paid only once the receiver was paid. One idea was to make the transfers dependent on a receipt from the last ledger proving that the receiver was paid. However, this would mean that all ledgers would need to understand that receipt from the receiver’s ledger. - -This idea died quickly. Building one ledger to understand another, as with [Sidechains](https://blockstream.com/sidechains.pdf) or [BTC Relay](http://btcrelay.org/), could work, or even building a group of ledgers to interoperate, such as with [Cosmos](http://cosmos.network/) and [Polkadot](https://polkadot.io/). However, we could not expect every ledger in the world to understand all other ledgers. Agreeing on how different systems would verify each other’s state would be a challenge and we could never expect all existing ledgers to be upgraded. The subtle realization was that a statement from the receiver that they were paid would be just as good as a proof from their ledger. For Interledger to achieve general interoperability it would be easier to standardize receiver behavior and we would need to minimize the requirements to integrate ledgers. - -## 4\. The Director: Source Routing - -Died: [_May, 2016_](https://github.com/interledgerjs/ilp-connector/pull/150). Age: *1 year* -Cause of Death: *Decisions should be made where the knowledge is.* - -A central question for an internetworking protocol like Interledger is how the paths for multi-hop payments are determined. Early versions of Interledger used source routing, in which senders would know the entire topology of the network and choose the payment path themselves. Connectors would broadcast routes and exchange rates and every participant would store a map of the network. Source routing may work for a limited number of nodes or for a single currency, but it does not scale for millions of nodes with fluctuating exchange rates. - -Inspired by the Internet’s decision to separate [addressing from routing](https://www.rfc-editor.org/ien/ien19.txt), we came up with an [Interledger address](https://interledger.org/developers/rfcs/ilp-addresses/) format similar to IP addresses. Senders would specify *where* they wanted their money to go, but connectors would determine *how* to route payments. Connectors would use their local knowledge of routes and rates, rather than needing everyone to keep an up-to-date map of all connectors. (Note: one of the powerful benefits of the Internet’s separation of addressing and routing was that it enabled the routing protocol to be [upgraded numerous times](https://medium.com/@datapath_io/the-history-of-border-gateway-protocol-a212b7ee6208) without most users noticing, because the IP address stayed the same.) - -## 5\. The Magician: Abstract Packet - -Died: [_January, 2017_](https://github.com/interledger/rfcs/issues/146). Reborn and Died Again: [_December, 2017_](https://github.com/interledger/rfcs/pull/347). -Cause of Death: *Having options isn’t always better.* - -One of the most often debated aspects of Interledger has been the ILP packet format — not just the fields in the packet but the encoding as well. Should the packet be sent as text, JSON, Protocol Buffers, or a custom binary format? Having an abstract packet definition that could be encoded in different formats and translated by connectors seemed attractive, because it meant we would not need to agree on one encoding. - -Leaving the encoding up to implementations would mean that every ledger protocol would need to send each of the fields from the packet individually. They would need to be careful to avoid subtle incompatibilities with other implementations. Furthermore, it would be nearly impossible to extend the ILP packet in the future, because it would be unlikely that every intermediary would correctly forward all extensions, including those they do not understand. Ultimately, we decided that picking a single format would provide greater consistency and extensibility than having multiple formats. - -## 6\. The Cryptographer: Crypto Conditions - -Died\*: [_February, 2017_](https://github.com/interledger/rfcs/issues/153). Age: *2 years*. -Cause of Death: *The features supported will be the least common denominator.\* Crypto Conditions continue to be* [_developed at the IETF_](https://github.com/rfcs/crypto-conditions) *and used outside of ILP* - -One of the most thoroughly designed features that was ultimately left out of ILP was the [Crypto Condition](https://github.com/rfcs/crypto-conditions): a standard for encoding different signature algorithms and ways to combine them. A central primitive in the original Interledger design were the conditions used to hold and execute payments. We spent months developing a standard for this more flexible type of multisig, [submitted it to the IETF](https://tools.ietf.org/html/draft-thomas-crypto-conditions-03), and then realized we did not need it. - -The problem with having many condition types is that all intermediaries in a certain path would need to support the same algorithms in order for them to be usable. That meant that the only algorithms you could rely upon having support for would be the least common denominator. Most likely, this would come down to [simple SHA256 hash-locks](https://github.com/interledger/rfcs/issues/153). Less than 20% of the functionality could serve more than 80% of the use cases, and so the Interledger standard parted ways with Crypto Conditions. - -## 7\. The Optimist: Condition-Less Transfers - -Died: [_June, 2017_](https://lists.w3.org/Archives/Public/public-interledger/2017Jun/0033.html). Age: *2 years*. -Cause of Death: *Anything that can be done at the edge of the network, should be.* - -While most payments would likely use conditions for security, we always thought there could be some micropayment use cases where the sender would not care about securing their payments with a condition. Originally, conditions were considered optional and the reference ILP connector would forward “optimistic” payments in addition to those with conditions. - -The idea of optimistic payments as a fully separate mode of ILP died when we realized this could be implemented [on top of ILP payments with conditions](https://lists.w3.org/Archives/Public/public-interledger/2017Jun/0033.html). Instead of requiring *all* connectors to have special functionality for forwarding condition-less payments, it could be made optional by using a well-known hash as the condition (such as the hash of 32 zero-bytes). Connectors that recognize optimistic payments could skip the hold step, while those that do not would get a valid fulfillment just like any other payment. The old [end-to-end principle](http://web.mit.edu/Saltzer/www/publications/endtoend/endtoend.pdf) won again, and the core ILP became one step simpler. - -## 8\. The Geologist: Accommodating Slow Ledgers - -Died: [_June, 2017_](https://www.coindesk.com/interoperability-boost-ripple-sends-blockchain-transaction-across-7-different-ledgers/). Age: *2 years*. -Cause of Death: *Only the speediest survive.* - -In June 2017, we excitedly sent a [single payment across seven different types of ledger integrations](https://www.coindesk.com/interoperability-boost-ripple-sends-blockchain-transaction-across-7-different-ledgers/), including payment channels, trustlines, and on-ledger escrow. We wrote up a spec for [Hashed Time-Lock Agreements (HTLAs)](https://interledger.org/developers/rfcs/hashed-timelock-agreements/) that described the array of options for integrating with ledgers. Having such a wide variety of integrations made Interledger more open and flexible. However, the “Seven Ledger Demo” showed that some integrations are superior to others. We spent minutes waiting for an on-ledger Ethereum transfer to execute, whereas the transfers via payment channels over Bitcoin and XRP went through instantaneously. - -After the demo, we turned our attention to sending Interledger payments with real money and started focusing on the integration methods that would provide fast, cheap, and good user experiences. For cryptocurrencies, this would mean payment channels instead of on-ledger escrow. The shift to payment channels and sending smaller, fast payments would also mark the beginning of the end for the next couple of features. - -## 9\. The Negotiator: Interledger Quoting Protocol - -Died: [_September, 2017_](https://github.com/interledger/rfcs/pull/309). Age: *14 months*. -Cause of Death: *The end-to-end principle struck again.* - -Since the shift to non-source routing, there had been two protocols that all senders and connectors needed to support: Interledger payments and the Interledger Quoting Protocol (ILQP). ILQP allowed you to ask connectors up front how much a given payment would cost to send. You could specify a fixed source, or destination amount, or ask for a Liquidity Curve (see feature 11). It was non-binding, but we thought that surely you needed a way to determine the cost before sending a payment. - -Similar to Optimistic Mode, ILQP was brought down by the realization that it could be implemented on top of normal Interledger payments. If payments were fast and cheap, what if you could just send a test payment and ask the receiver how much arrived? Using “end-to-end quoting,” senders could determine the rates without special functionality being built into the every connector. The end-to-end principle was victorious once again, and ILQP was removed from the core Interledger stack. - -## 10\. The Postman: Destination Amount Delivery - -Died: [_October, 2017_](https://github.com/interledger/rfcs/issues/312). Age: *14 months*. -Cause of Death: *One behavior is better than two.* - -Another feature that was introduced with the switch to non-source routing was the destination amount in the ILP packet. We thought that senders would want to indicate to the connectors exactly how much money should be delivered to the receiver. We debated what number format to use for the amounts, considered [various floating point encodings](https://github.com/interledger/rfcs/commit/9716fb7aa68a8770aee96413916e12edd69787fe#diff-42c9b615e907424c7cae3feb333d8b6fR36), and ultimately settled on unsigned 64-bit integers. Connectors would parse the ILP address to determine whether they should [“forward” the packet or “deliver” it](https://github.com/interledger/rfcs/issues/77) locally. This was considered so crucial that the destination amount was one of just three fields in the ILPv1 packet. - -Once we started implementing [“end-to-end quoting”](https://github.com/interledger/rfcs/pull/309) to replace ILQP, it became clear that we needed a way to have connectors forward packets without trying to deliver a specific amount. How could senders use a test payment to determine the exchange rate if they needed to know the rate first to determine the destination amount to put in the packet? “Forwarded payments” became the norm in ILP versions 2–4. Connectors would simply look at the amount in the incoming transfer, apply their local rate, and pass on the payments. This simplified the connector behavior, as they no longer needed the “delivery” functionality, nor up-to-date exchange rate information for the entire network to determine the proper amount. This change also enabled us to build multiple types of functionality on top of one simple ILP primitive, from end-to-end quoting to streaming payments. - -## 11\. The Surfer: Liquidity Curves and Large Packets - -Died: [_September, 2017_](https://github.com/interledger/rfcs/pull/309). Age: *9 months*. -Cause of Death: *Small is beautiful.* - -Should Interledger be built for small packet amounts, large amounts, or both? Since the goal was to support nearly all use cases, and thus all possible amounts, we needed a way to express how the exchange rate would depend on the payment size. We designed the [Liquidity Curve](https://github.com/interledger/rfcs/blob/51e2ec229085ddef1606b2848953901c61d5ae2f/asn1/InterledgerTypes.asn#L44-L68), which used a series of points to represent the input and output amounts. Liquidity Curves were used in both the routing and quoting protocols to represent the potentially complex exchange rates. - -For some time, there were discussions about killing Liquidity Curves because they were the most complicated feature in the core ILP. But they would ultimately be killed off by a more fundamental realization: that all Interledger payments would be small. Splitting larger payments down into smaller ones would make packets going over the network more homogenous. Exchange rates could be expressed as a single number and Interledger would actually use Internet-style packet switching. - -## 12\. The Escrow Agent: Conditional Ledger Transfers - -Died: [_December, 2017_](https://github.com/interledger/rfcs/issues/359). Age: *2 years, 6 months*. -Cause of Death: *When it seems simple, there’s one thing left to take out.* - -The most recent and surprising feature to pass away was the conditional transfer or “on-ledger escrow”, which had been with us since the white paper was written. Interledger payments were comprised of transfers on multiple ledgers and the ledgers would act as a kind of escrow agent. Certain varieties of [Hashed Time-Lock Agreements (HTLAs)](https://interledger.org/developers/rfcs/hashed-timelock-agreements/#htlas-without-ledger-supoprt) allowed for this behavior to be modeled by connectors for cases where ledgers did not natively support holds. - -However, the shift to smaller, faster payments ultimately led to the [realization](https://github.com/interledger/rfcs/issues/359) that it would be *connectors*, rather than *ledgers,* that would implement the conditions. Connectors would forward Interledger packets, creating payment obligations, and users would settle with unconditional ledger transfers *out of the flow of the ILP payment.* The only requirement for ledgers to be used with ILP would be the ability to make simple transfers. The faster and cheaper a ledger is — or if it supports [simple payment channels](https://interledger.org/developers/rfcs/hashed-timelock-agreements/#simple-payment-channels) — the faster a connector and its users (or peers) can settle. But, the system works even with the slowest ledgers. The condition was moved from the ledger transfer into the ILP packet and ILPv4 was born. - -Wondering what’s left in Interledger V4 if all of this has been taken out? Keep an eye out for upcoming posts that will explain it in detail! - -Check out [Interledger.org](https://interledger.org/) and join the [community](https://community.interledger.org/) to learn more and get involved in the project! diff --git a/src/content/blog/2018-10-03-interledger-how-to-interconnect-all-blockchains-and-value-networks.md b/src/content/blog/2018-10-03-interledger-how-to-interconnect-all-blockchains-and-value-networks.md deleted file mode 100644 index 27988d98..00000000 --- a/src/content/blog/2018-10-03-interledger-how-to-interconnect-all-blockchains-and-value-networks.md +++ /dev/null @@ -1,109 +0,0 @@ ---- -title: 'Interledger: How to Interconnect All Blockchains and Value Networks' -description: 'Interledger was born out of a project to build a blockchain-agnostic smart contracts platform. A key challenge was neutrality: how could a decentralized app buy resources like storage and computing, without being tied to a specific blockchain?' -date: 2018-10-03 -slug: interledger-how-to-interconnect-all-blockchains-and-value-networks -authors: - - Evan Schwartz -author_urls: - - https://www.linkedin.com/in/evanmarkschwartz/ -external_url: https://medium.com/xpring/interledger-how-to-interconnect-all-blockchains-and-value-networks-74f432e64543 -tags: - - Interledger - - Connector - - Streaming Payments ---- - -*By* [_Evan Schwartz_](https://www.linkedin.com/in/evanmarkschwartz/) *and* [_Vanessa Pestritto_](https://www.linkedin.com/in/vanessaalexandra/) - -Interledger was born out of a project to build a [blockchain-agnostic smart contracts](https://medium.com/coil/codius-smart-contracts-made-from-containers-b3b16c3e3890) platform. A key challenge was neutrality: how could a decentralized app buy resources like storage and computing, without being tied to a specific blockchain? Across the internet, apps and services face a similar issue of how to directly monetize without relying on a single cryptocurrency, a proprietary network like Visa or PayPal, or a monolithic platform like Apple. Interledger was designed to answer the question: - -> What would a universal network for sending value, independent of any company or currency, look like? - -Interledger is now live and the core protocol was finalized in late 2017. The network’s early use cases include trustlessly exchanging cryptocurrencies and enabling new business models with streaming micropayments. And that’s just the beginning. This post gives an overview of the Interledger network and highlights key features of the protocol that help connect vastly different blockchains and value systems. - -## A Network of Decentralized Exchanges - -Interledger is made up of a network of [connectors](https://interledger.org/developers/rfcs/interledger-architecture/#sender-receiver-connectors), independent operators that act as decentralized exchanges or market makers for cryptocurrencies, fiat currencies, and other tokenized assets. The protocol allows users to transact natively on the network of their choice, without needing to move assets to a centralized exchange or to a specific blockchain for trading. - -The Interledger network has no central authority or company and the protocol is not tied to any currency, token or blockchain. - -### Paying from One Currency to Another - -With Interledger, a user can send BTC and the recipient will automatically receive ETH, or whatever their preferred currency happens to be. The assets are exchanged in the flow of the transfer without either party needing to think about how this happens. - -Behind the scenes, Interledger routes packets of money across value networks like the internet routes packets of data between Internet Service Providers (ISPs). When the user sends BTC, the user’s wallet sends Interledger packets denominated in BTC to a connector. The connector applies their exchange rate and forwards ETH-denominated packets on to the receiver. - -![Diagram showing how the connector forwards packets to the receiver](/developers/img/blog/2018-10-03/connector.webp) - -For more obscure assets, Interledger packets are automatically routed across multiple connectors and each one is incentivized to help find the best paths through the network. Importantly, all of this happens without the sender needing to trust the connectors, as the protocol guarantees that the sender’s money cannot be lost or stolen in transit (see Trustless Sending below). - -## Key Features of the Open Protocol - -Interledger is a pure protocol and simplicity was one of the primary design principles. The simpler the protocol, the more networks it can connect. In this way, we drew much of our inspiration from the Internet. **An open network of networks is more resilient, scalable, and feature-rich than any independent network on its own.** - -The key features of the Interledger Protocol are: - -- Simple Packet Format -- Trustless Sending -- Packetizing Value - -### Simple Packet Format - -The core of the Interledger Protocol (ILP) is the ILP packet, the messaging standard used between senders, connectors, and receivers. The packet is inspired by Internet Protocol (IP) packets and addresses, which are the core of the Internet. - -[ILPv4](https://interledger.org/developers/rfcs/interledger-protocol/) has three packet types: Prepare, Fulfill, and Reject; which correspond to request, response, and error messages. Connectors forward Prepare packets from senders to receivers and the connectors relay the Fulfill or Reject packets back from the receivers to the senders. - -Prepare packets have only five fields: a destination address, amount, end-to-end data, and a “condition” and expiration that enable the trustless sending. The packet format is network-agnostic and the universal [ILP address](https://interledger.org/developers/rfcs/ilp-addresses/) scheme helps connectors route packets to the correct receiver. - -### Trustless Sending - -The second key feature of Interledger is that it enables users to send money through the network of connectors without needing to trust them. ILP guarantees that the sender’s money cannot be lost or stolen in flight, which is critical for creating an open and competitive network. - -Interledger uses a [“forward-and-backward” packet flow](https://interledger.org/developers/rfcs/interledger-protocol/#ilp-packet-lifecycle), or incentivized two-phase commit, in which the recipient gets paid before the money ever leaves the sender’s account. - -![Diagram showing the “forward-and-backward” packet flow](/developers/img/blog/2018-10-03/packet-flow.webp) - -- Prepare packets travel from the sender to the receiver (the “forward” part) and represent a commitment to pay, *if and only if* the connector presents proof that the receiver was paid. -- Fulfill packets include proof that the receiver was paid and are relayed by connectors back to the sender (the “backward” part). Only the receiver could generate the correct proof, which is a simple preimage of a hash. The sender knows with certainty when the money has arrived, no matter what path the packet has taken through the network of connectors. If a packet is misrouted or dropped, the sender will never get the Fulfill and the money will never leave their account. -- Reject packets are returned by the receiver if they do not want the Prepare packet or the packet does not pass one of the receiver’s checks. Connectors may also return Reject packets if the Prepare expires before the Fulfill is returned. Note that the sender can retry rejected packets, because they haven’t sent the money yet, and [higher-level protocols](https://medium.com/interledger-blog/streaming-money-and-data-over-ilp-fabd76fc991e) built on top of Interledger handle retries automatically. - -### Packetizing Value - -Interledger’s third key feature (and the [biggest difference between ILPv1 and ILPv4](https://interledger.org/developers/rfcs/interledger-protocol/#differences-from-previous-versions-of-ilp)) is packetizing value, or splitting up larger transfers into many lower-value packets. This is very similar to how big files sent over the internet are sent as many small packets. The benefits are surprisingly analogous to the internet itself, as homogeneous packets increase the network’s efficiency, security, and interoperability - -Connectors process Interledger packets using limited pools of capital or liquidity, and using this efficiently is central to keeping costs low. Each Prepare packet requires connectors to hold the specified amount of money until the transaction is fulfilled or rejected. Smaller packet amounts help connectors avoid reserving large amounts of money for each transaction before knowing if it will be fulfilled. Connectors can operate with smaller pools of liquidity and increase the velocity and utilization of their money. - -Packetized payments also increase the security and resilience of the network. Connectors can allocate their liquidity like Internet bandwidth (“payment bandwidth”) to prevent users from interfering with others’ connections. Additionally, smaller packets enable the use of shorter Prepare packet timeouts, which is critical for mitigating the [“free option problem”](https://altheamesh.com/blog/the-free-option-problem/) (locking in an exchange rate that attackers could exploit). At the same time, lower-value packets reduce the [risk](https://github.com/interledger/rfcs/blob/main/0018-connector-risk-mitigations/0018-connector-risk-mitigations.md) to a connector posed by failing to deliver the Fulfill packet in time. - -Finally, packetized payments help Interledger connect more disparate types of ledgers and facilitate a broader array of use cases. Smaller packets can be cleared through ILP without ledger-provided escrow, which was needed for [ILPv1](https://github.com/interledger/rfcs/blob/main/deprecated/0003-interledger-protocol/0003-interledger-protocol.md#model-of-operation). This reduces the requirements for integrating a ledger down to just having the ability to transfer value (though simple payment channels are nice to have to increase speed and lower costs). - -Connectors can optimize for speed and throughput, because every transaction— from large purchases to streams of micropayments — turns into similarly-sized ILP packets.\` - -## The Interledger Network Today - -The early Interledger network is optimized for micropayment use cases and trustlessly trading cryptocurrencies natively across blockchains. Here are some of the infrastructure and application companies building with ILP (and yes, they’re hiring!): - -- [Coil](https://coil.com/) is a subscription service for supporting web content creators underpinned by Interledger micropayments and the proposed [Web Monetization standard](https://webmonetization.org/). Link to [demo](https://www.youtube.com/watch?v=q6sXGdQ_knE). -- [StrataLabs](https://web.archive.org/web/20201113153526/https://www.stratalabs.io/) is the first commercial Interledger connector company, enabling micropayment services like Coil. -- [Kava](https://kava.io/) runs an Interledger connector and is developing technology for the ILP ecosystem including new cross-currency integrations and a [Cosmos](https://cosmos.network/)\-based blockchain optimized for ILP. -- [The Bill & Melinda Gates Foundation](https://www.gatesfoundation.org/What-We-Do/Global-Growth-and-Opportunity/Financial-Services-for-the-Poor) developed [Mojaloop](http://mojaloop.io/), an open source payment system for emerging markets, using Interledger to increase financial inclusion through interoperability. -- Ilp.ix, an [mlab](https://mlab.company/) project, is an XRP peering exchange that helps connectors find and connect to one another. -- [XRP Tip Bot](https://www.xrptipbot.com/) enables Twitter, Reddit, and Discord users to tip one another using XRP and ILP. -- More are in stealth mode and coming soon! - -## Build on Interledger - -- Interested in running an Interledger connector? Check out [this guide](https://medium.com/interledger-blog/running-your-own-ilp-connector-c296a6dcf39a). -- Want to start building apps with Interledger? Download [moneyd](https://medium.com/interledger-blog/using-moneyd-to-join-the-ilp-testnet-ba64bd42bb14) and check out the [tutorials](https://medium.com/interledger-blog) to connect to Interledger in just a few minutes. -- If you’re interested in getting involved in the project, join the bi-weekly [community calls](https://interledger.org/events) and come chat with us on [Gitter](https://gitter.im/interledger/Lobby). - -At Xpring, Ripple’s ecosystem initiative, we’re focused on supporting developers and projects building both infrastructure and applications in the Interledger ecosystem. We’re specifically interested in new connectors and wallets as well as micropayment services and decentralized exchange applications. If you’re building in these areas, please get in touch at [xpring@ripple.com](mailto:xpring@ripple.com). - -Disclosure: Xpring is an investor in Coil and StrataLabs. - -## About Interledger - -Interledger is an open source protocol developed by the [W3C Interledger Community Group](https://www.w3.org/community/interledger/). There is no Interledger company, currency, or blockchain. - -_Thanks to Danny Aranda, Brandon Curtis, Kevin Davis, Meredith Finkelstein, Jamie Goldstein, Brian Kerr, Akash Khosla, Austin King, Zaki Manian, Cristina Nita-Rotaru, Teemu Paivinen, Sid Ramesh, Rome Reginelli, Dan Robinson, Dino Rodriguez, and Scott Stuart for their feedback on this post!_ diff --git a/src/content/blog/2019-01-23-thoughts-on-scaling-interledger-connectors.md b/src/content/blog/2019-01-23-thoughts-on-scaling-interledger-connectors.md deleted file mode 100644 index 869db5de..00000000 --- a/src/content/blog/2019-01-23-thoughts-on-scaling-interledger-connectors.md +++ /dev/null @@ -1,81 +0,0 @@ ---- -title: Thoughts on Scaling Interledger Connectors -description: Streaming payments mean that Interledger connectors need to process huge volumes of Interledger packets, but the current reference implementation is hard to run at scale. -date: 2019-01-23 -slug: thoughts-on-scaling-interledger-connectors -authors: - - Evan Schwartz -author_urls: - - https://www.linkedin.com/in/evanmarkschwartz/ -external_url: https://medium.com/interledger-blog/thoughts-on-scaling-interledger-connectors-7e3cad0dab7f -tags: - - Interledger - - Connector - - Scaling - - Streaming Payments - - Internet Of Value ---- - -Streaming payments mean that Interledger connectors need to process huge volumes of Interledger packets, but the current reference implementation is hard to run at scale. My hypothesis is that we should make the connector completely stateless using an HTTP-based bilateral communication protocol. - -This post describes ongoing work and neither the design nor the protocol are settled. Comments and alternative suggestions are welcome! You can find a basic prototype of the proposed design implemented [here](https://github.com/emschwartz/interledger-rs/commit/af795bc03a236ee39798e6dc76524afd49cef876). - -## Bilateral Communication in Interledger - -[Interledger.js](https://github.com/interledgerjs) uses a [plugin architecture](https://github.com/interledger/rfcs/blob/main/deprecated/0024-ledger-plugin-interface-2/0024-ledger-plugin-interface-2.md) to abstract away different possible bilateral messaging protocols, but today all of the plugins are built on the [Bilateral Transfer Protocol (BTP)](https://interledger.org/developers/rfcs/bilateral-transfer-protocol/). BTP is a binary request/response protocol implemented over [WebSockets](https://en.wikipedia.org/wiki/WebSocket). It originally included message types for Prepare, Fulfill, Reject and Transfer, but [BTP 2.0](https://github.com/interledger/rfcs/pull/383), which is used today, stripped out nearly everything except request/response semantics, authentication, and “sub-protocol” naming. - -Why did we use WebSockets? Over the years of working on Interledger, we have had [countless](https://github.com/interledger/rfcs/pull/125) [discussions](https://github.com/interledger/rfcs/pull/251) about the optimal “ledger layer” or bilateral messaging protocol to use for communicating Interledger details. At various points, we considered HTTP, GRPC, WebRTC, MQTT, and UDP, among others. - -WebSockets were chosen for their mix of: - -- Available implementations in nearly all programming languages -- Relatively low overhead on the wire -- Bidirectional messaging (without requiring both parties to have publicly accessible endpoints) -- Server and browser support - -This set of features meant that we could support a single protocol for both client-to-server relationships and peering relationships between connectors. However, as the Interledger network has grown, we are seeing the limits of using BTP for server-to-server communication. - -## Challenges Scaling WebSocket Peering - -Interledger Service Providers (ILSPs) need to run multiple instances of the connector in order to process large volumes of Interledger packets. Unfortunately, this is difficult to do today, arguably because of BTP and its use of WebSockets. - -Current ILSPs that run multiple connectors configure different WebSocket URLs and ILP addresses for each instance. As a result, peering with another ILSP requires configuring each connector with *all* of the URLs and ILP addresses of the peer’s connectors (n² connections). This complicates configuring connectors, exposes an ILSP’s internal network configuration to their peers (a potential security concern), and prevents “autoscaling” connectors (having a cloud provider automatically deploy new instances to handle additional demand). Furthermore, if a single connector instance crashes, any applications that are in the process of sending packets to or from that ILP address (for example, using [STREAM](https://medium.com/interledger-blog/streaming-money-and-data-over-ilp-fabd76fc991e)) will need to re-establish their connection with a new ILP address. - -## Stateless, HTTP-Based Connectors - -An alternative to our current architecture is to switch the bilateral communication to HTTP(S) and make the connector completely stateless. - -As illustrated below, each incoming ILP Prepare packet would be its own HTTP POST request, and the ILP Fulfill or Reject would come back on the HTTP response. An ILSP would run a standard HTTP load balancer in front of an autoscaling cluster of connectors. Connectors would look up the next hop in their routing table and send an outgoing HTTP POST request to the peer’s endpoint, which would likely correspond to their load balancer. As a result, each ILSP could use a single ILP address and HTTPS URL for peering and the internal configuration of their network would be kept private. - -
    - Proposed ILSP architecture -
    Proposed ILSP architecture
    -
    - -## Balance Logic In the Database - -In order to keep the connector stateless, all balance logic would need to be performed in a database. This could be done using a fast, in-memory system like [Redis](https://redis.io/) while the authoritative ledger could be persisted by writing all packets to an on-disk SQL or NoSQL store. The advantage of offloading balance-related transactions to a database is that that is exactly what databases are designed for. If there is even a slight possibility that multiple connectors could process transactions for the same account, a single system will need to ensure that transactions are applied atomically. Since we need the database to be the ultimate arbiter, we should offload all of the related logic to it. (Note that we may eventually surpass the performance of available databases, but we are many orders of magnitude of transactions away from that.) - -## Connection Overhead? - -What about the overhead of establishing HTTPS connections? Each client or peer could keep a TLS/TCP socket open with the load balancer using HTTP2 or HTTP 1.1 with Keep-Alive. The load balancer would similarly maintain open sockets with the internal connector instances. At worst, each connector might have an open socket with each peer’s load balancer for outgoing packets. - -Even if outgoing connections are a concern or performance issue, most load balancers’ strategies can be configured to prioritize sending certain traffic to particular instances. For example, Google Cloud’s load balancer supports [Session Affinity](https://cloud.google.com/load-balancing/docs/backend-service#session_affinity) that can route requests based on parameters such as client IP address. This would boost the performance in cases where the same HTTP client is sending many packets to the same destinations. Alternatively, the sender of the HTTP-based protocol could include the destination ILP address in an HTTP header (or even the URL path) so that the load balancer could route based on the final destination, which is a reasonably proxy for the next hop the packet will be forwarded to. (Note that because of HTTP2 header compression, adding the destination ILP address in a header would add incur [1–2 bytes](https://blog.cloudflare.com/hpack-the-silent-killer-feature-of-http-2/) per request on the wire if many packets are sent to the same destination.) - -## Separating Clearing and Settlement - -Another change that would help scale (and simplify) the connector would be to completely separate the forwarding of Interledger packets from settlement-related behavior. The Interledger.js plugin architecture assumes that a plugin will handle both. However, as soon as companies started running connectors and working to scale their throughput, they immediately switched to an out-of-stream settlement process. If a connector has enough volume, performing and verifying a cryptographic signature for each packet quickly becomes an issue for CPU usage and latency. Furthermore, if you use payment channels for settlement, you need a process that is constantly monitoring the blockchain for close transactions anyway, so that same system might as well handle updating the channels too. - -Part of the original motivation for combining clearing and settlement in the plugin was due to the use of WebSockets for bilateral communication. If you already have a communication channel with a peer for sending ILP packets, you might as well use the same channel to send settlement-related messages. Switching to HTTP can help us decouple this. - -A separate settlement engine could trivially make outgoing HTTP requests to send payment channel updates or other details to its peers. Incoming requests could be routed by the same externally-facing load balancer using URL path-based routing (for example, XRP Payment Channel updates would be POSTed to provider.example/xrp). The settlement engine would also connect to the same database as the connectors to atomically adjust the balance based on incoming settlements. A final benefit of this separation is that the settlement engine would be a standalone service, so it would not need to be reimplemented in every programming language as the plugins must be today. - -## HTTP(S) Everywhere - -One of the earlier arguments for WebSockets was being able to support clients that do not have publicly accessible endpoints, such as browser clients or mobile apps. However, the most widely used Interledger Application Layer Protocol, the [Simple Payment Setup Protocol (SPSP)](https://interledger.org/developers/rfcs/simple-payment-setup-protocol/), requires a public HTTPS endpoint. Thus, in order to receive money via Interledger right now, an application already needs a public endpoint. - -Until the end of last year, I had thought that we would eventually replace SPSP with another protocol that would not require a public endpoint to receive payments. It felt a bit silly to pull in heavy dependencies including HTTP, TLS, DNS, and the whole (flawed) Certificate Authority system for the [single HTTPS request](https://interledger.org/developers/rfcs/simple-payment-setup-protocol/#query-get-spsp-endpoint) SPSP uses. However, the alternatives leave much to be desired. Few people outside of the cryptocurrency world want cryptographic keys as identifiers and, as of today, there is no alternative for establishing an encrypted connection from a human-readable identifier that is anywhere nearly as widely supported as DNS and TLS. - -This was how I learned to stop worrying and love HTTP(S). If receiving money already requires a publicly accessible HTTPS server, then why not just use another HTTPS endpoint to receive ILP packets? Making connectors stateless should help us scale to handle greater volumes of ILP packets and enable groundbreaking use cases with efficient streaming payments. - -If you’re interested in helping to scale Interledger infrastructure, get in touch because companies in the Interledger community are hiring and looking for people with your skills! You can also join the [Interledger Community Group calls](https://interledger.org/events), where we’ll be discussing this topic and many others. diff --git a/src/content/blog/2024-04-10-the-telemetry-tale.md b/src/content/blog/2024-04-10-the-telemetry-tale.md deleted file mode 100644 index b46c74dc..00000000 --- a/src/content/blog/2024-04-10-the-telemetry-tale.md +++ /dev/null @@ -1,152 +0,0 @@ ---- -title: 'The Telemetry Tale: A Journey into the Metrics of Interledger' -description: When simple metrics are paired with complex cloud solutions and important privacy considerations, the implementation process becomes significantly more complicated. -date: 2024-04-10 -slug: the-telemetry-tale -authors: - - Sarah Jones -author_urls: - - https://www.linkedin.com/in/sarah-jones-ba6bb6b9 -tags: - - Interledger - - Telemetry ---- - -## Charting the Course - -[Rafiki](https://rafiki.dev/) is open-source software that enables Account Servicing Entities (ASEs), like digital wallet providers, to leverage [Interledger](https://interledger.org/interledger) functionality. Interledger creates interoperability between different payment systems and currencies, making payments easier, faster, and cheaper. We hope that by removing the friction of this integration, we can expand the adoption of Interledger and grow the network. - -This is where our tale begins, on a balmy August in 2023, with the Rafiki team united for our work week in Cluj-Napoca, Romania. - -In order to measure the growth of the network, we needed to capture its state over time: - -- how many transactions occurred on the network over the last week / month / year -- how much value flowed over the network in the last week / month / year - -Since telemetry was marked as a high-priority item, promises were made about completing the feature in the course of the week. We envisioned a streamlined process using managed services. With a few configuration and integration tweaks, we’d be up and running in no time, right? Not quite, as we soon accepted. - -It is a truth universally acknowledged that software development timelines are more aspirational than absolute. Telemetry was merged into main 6 months later. This article provides an account of our journey and an explanation of our final solution. - -The initial plan was to use [OpenTelemetry](https://opentelemetry.io/) collectors alongside AWS-managed [Prometheus](https://prometheus.io/) and [Grafana](https://grafana.com/). Our decision was driven by a commitment to flexibility, compatibility, and open standards. OpenTelemetry (Otel) is a standardized observability framework that is designed to provide a uniform way to capture and export telemetry data across various environments. We chose to use Prometheus and Grafana, but with Otel collectors, integrating ASEs could send these metrics to different backends for their own use as well. Prometheus is a monitoring toolkit that stores the incoming metrics in its time-series database. It includes its own query language, [PromQL](https://prometheus.io/docs/prometheus/latest/querying/basics/), that enables detailed querying and analysis of the data. Grafana can query Prometheus directly using PromQL, allowing us to create visualizations of the collected metrics in dashboards. - -The open-source nature of these components aligns with Rafiki's principles, ensuring our telemetry stack not only meets our technical requirements but also reflects our commitment to open, accessible technology. - -We donned headsets, turned the music up, and set off to work spinning up our required services and diving into the Rafiki code base. - -## Navigating Cloud Complexities - -The allure of managed services began to fade as we grappled with their constraints. Managed services add complexities to troubleshooting, where the confluence of permissions, network access, and external service configurations can interplay in unexpected ways. Debugging became one of our biggest challenges. - -Throughout this process, Grafana was our only source of truth, serving as our sole beacon of verification for whether metrics were successfully being captured. Prometheus, acting as a managed-service intermediary, was an inscrutable black box. This was despite having activated every available logging feature within AWS. - -This meant that when we encountered issues it was not clear whether the problem lay in a breakdown in communication between Prometheus and Grafana or whether it was between the Otel SDK and Prometheus. To diagnose problems we had to redirect the Otel collector's Prometheus write URL to a [BusyBox](https://busybox.net/) setup where we could perform a tcpdump to capture and inspect packets and verify if data was indeed being sent. - -We ran into a roadblock when we realized that AWS-managed Grafana does not allow for public dashboards. To resolve this we had to actually abandon AWS-managed Grafana and move over to Grafana Cloud. They offered a workaround by editing the configuration to enable public dashboard access upon request--a process they handled with commendable responsiveness. Unfortunately, it still fell short by imposing limitations on embedding these dashboards on our own site. - -Despite Grafana Cloud’s responsive support team, we also encountered issues adding the AWS-Managed Prometheus as a data source. Our [sigv4 authentication](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_aws-signing.html) failed with a 403 Forbidden response, despite having the appropriate IAM permissions. The issue resolved spontaneously without clear intervention, implying an external factor (potentially AWS-side changes or maintenance) was at play. - -You can view our public dashboard for test data telemetry [here](https://rafikitelemetry.grafana.net/public-dashboards/f70c8a6033b14da5a9f1cb974def602a). - -Here is an example of how it looks: - -![A screenshot of Testnet's telemetry dashboard](/developers/img/blog/2024-04-10/testnet-telemetry-dashboard-screenshot.png) - -Each column of our dashboard represents one of our metrics (number of transactions, value sent through the network, and average transaction value) measured over the last hour, day, week, and month respectively. - -## Refining Our Course - -As we moved past our infrastructure hurdles, our attention shifted towards decisions surrounding our approach to data collection. Initially, the idea of capturing transaction values in their original currencies seemed appealing. It promised insights into the diversity of currencies traversing the Interledger network and their respective volumes. - -Upon deeper reflection, we recognized the potential privacy implications of such granularity. Given the predominance of USD transactions, data on less common currencies could inadvertently reveal specifics about the activities of certain entities. - -To mitigate this risk and enhance data anonymity, we opted to standardize on USD as the base currency for all transactions. This decision introduced a new challenge: not all ASEs might have the means to convert their transactions to USD. To address this, we incorporated a local rates conversion service. - -Unfortunately, we couldn’t find a free service that provides all the conversions we might want. So we've implemented an AWS Lambda function which runs daily, pulling data from our configured sources and then storing it in an Amazon S3 bucket. - -This setup positions us to seamlessly integrate additional data sources as needed by adding the new data source to our Lambda function. Currency conversions are processed internally within the Rafiki instance itself, maintaining confidentiality from public exposure. - -While we were extracting transaction counts at the level of individual payments, we recognized that tracking transaction values at this level compromised privacy by revealing the full transaction value amount. Finding an optimal location to extract the value metric was not straightforward. - -We were initially unsure of collecting our metrics at the ILP packet level since this layer did not readily expose the asset’s scale and currency information that we needed to get all value metrics into a comparable form. So we shifted our focus to the accounting layer. This layer processes the outcomes of packet layer interactions (such as successfully delivered packets and failed or rejected transfers) and converts these activities into financial records. - -We considered embedding our telemetry logic directly within this layer, but ultimately decided to maintain a better separation of concerns by building telemetry its own middleware. Using a middleware layer also gave us greater flexibility in exposing the full context we needed which opened up the possibility of using the ILP connector core to extract our value metrics at packet level. - -Connectors apply currency exchange as needed and forward ILP packets from the sender to the receiver through the Interledger network. Positioning our data extraction at a packet level ensured that data from a single transaction, potentially spread across multiple packets, or data from various transactions occurring simultaneously, could not be traced back to a specific single payment. - -A significant issue emerged when considering scenarios where the maximum packet value could accommodate an entire transaction amount within a single packet. Such a case threatened to undermine our privacy safeguards. This underscored the complexity of our task and highlighted the necessity for formalized, privacy-preserving methodologies in our telemetry framework. - -## Deep Dive into Privacy - -Privacy is always important when it comes to financial transactions. It is not only about individual and institutional confidentiality, but it is also a protective mechanism against market manipulation. Concealing transaction volume and pattern specifics allows us to prevent market participants from using the information to influence prices or liquidity. With this in mind, we re-worked our strategy using a layered, privacy-centric approach. We also ensure all data anonymization occurs locally. - -### Packetization - -The Interledger Protocol (ILP) naturally anonymizes transactions by splitting larger transactions across multiple payment packets. This only applies when the maximum value per ILP packet is smaller than the total transaction value. - -### Currency Conversion - -Converting all transactions to a standardized base currency ensures a uniform metric for comparison and introduces a degree of data obfuscation through the daily sampling of conversion rates, which adds an approximation layer to the transaction details. Our [AWS Lambda function](https://github.com/interledger/rafiki/blob/main/aws/lambdas/exchange-rates/main.go) retrieves and stores daily exchange rates in a publicly accessible Amazon S3 bucket, deliberately avoiding versioning to further enhance privacy. Of course, this is only relevant for non-USD transactions. - -### Bucketing and Rounding Technique - -This method categorizes transaction values into "buckets" to protect privacy, with the bucket size dictating the rounding precision. For common, lower-value transactions, we use a linear scale to create closely spaced buckets, enabling detailed granularity. For less common, higher-value transactions that necessitate greater privacy, we switch to logarithmic scaling, resulting in wider bucket intervals. - -Extreme values are managed by "clipping," where values beyond set thresholds are grouped into maximum or minimum buckets, ensuring all values fall within predefined limits. Values are then rounded to a precision point that is determined by the nearest bucket edge. - -| Raw Value | Bucket Size | Rounded Value | -| ----------- | ----------- | ------------- | -| 8,300 | 10,000 | 10,000 | -| 13,200 | 15,000 | 15,000 | -| 147,700 | 160,000 | 160,000 | -| 1,426,100 | 2,560,000 | 2,560,000 | -| 1,788,200 | 2,560,000 | 2,560,000 | -| 90,422,400 | 10,000,000 | 90,000,000 | -| 112,400,400 | 10,000,000 | 100,000,000 | -| 222,290,500 | 10,000,000 | 100,000,000 | - -### Local Differential Privacy - -Fully activating nerd mode, we read up on Local Differential Privacy (LDP). LDP is a variant of differential privacy where noise is added to each individual's data point locally before it is made available for collection. Once the data has been rounded, we apply a random amount of noise to each data point. - -This noise is derived from the Laplacian distribution which is generated based on a privacy parameter that is relative to the size of the rounded value. This distribution is characterized by its sharp peak and exponential decay, which means that most of the noise added to the data will be relatively small, thus minimally impacting the overall utility of the data, while providing a strong guarantee of privacy. - -Central to our privacy framework is empowering ASEs with the informed ability to opt-in or opt-out of the telemetry service. We also only collect data on outgoing payments, thus honoring the preferences of those who may choose to disable telemetry. In other words if one ASE enables telemetry and receives payments from an entity who has not enabled telemetry we won’t collect metrics on those incoming payments. - -For more information please have a look at our privacy [docs](https://github.com/interledger/rafiki/blob/main/packages/documentation/src/content/docs/telemetry/privacy.md) and our [implementation](https://github.com/interledger/rafiki/blob/main/packages/backend/src/telemetry/privacy.ts). - -## Architecture and Instrumentation - -![The Rafiki telemetry architecture](/developers/img/blog/2024-04-10/telemetry-architecture.png) - -The [number of transactions](https://github.com/interledger/rafiki/blob/d3be6b8d151d8cebc32b862e52a7bb678674d48e/packages/backend/src/open_payments/payment/outgoing/lifecycle.ts#L84-L90) is extracted from the Open Payments outgoing payment lifecycle and the value metric is handled by a [telemetry middleware](https://github.com/interledger/rafiki/blob/d3be6b8d151d8cebc32b862e52a7bb678674d48e/packages/backend/src/payment-method/ilp/connector/core/middleware/telemetry.ts) layer inside the ILP connector core. - -In summary, before these metrics are sent to the Otel collectors, we’ve implemented local privacy-preserving measures, including: - -- packetization which is inherent in the Interledger Protocol (ILP) -- a currency conversion service that uses a custom AWS Lambda function that pulls daily exchange rates from configured sources and stores them in an Amazon S3 bucket -- rounding data to a precision point determined by bucketing -- local differential privacy in the form of adding Laplacian noise to anonymize transaction data - -Finally, we grappled with the dilemma of disentangling playground data from our test network and live production data. The [Test Network](https://github.com/interledger/testnet) (Testnet) is an open Interledger network using test money within example wallet and e-commerce applications. Rafiki also provides a [local playground](https://github.com/interledger/rafiki/tree/main/localenv) to experiment with. We’d like to monitor activity within the testing environments to gauge interest in the Interledger Protocol (ILP) and identify usage patterns by new participants. - -We’ve opted to provide users the choice between the test network (testnet) and the live network (livenet). To support this dual-environment approach, the infrastructure for telemetry was effectively expanded to include two separate services: one dedicated to collecting and managing telemetry data from the test network and another for the live network. This setup ensures that data from each environment is handled independently. - -Our telemetry clusters are deployed on AWS, utilizing the ECS Fargate service to support two distinct operational environments: the live network environment (livenet) and the test network environment (testnet). Since the two environments are separate, it is the Rafiki client that determines where to send the data and the load balancers themselves do not need to decrypt GRPC/HTTPS messages to direct traffic between environments. This way each environment has its own load balancer to simply load-balance over their set of ECS task replicas. This configuration maintains good separation between testnet and livenet data as well as optimizing our resource utilization, leading to a cleaner, more efficient, and cost-effective telemetry infrastructure. - -When integrating ASEs opt-in for telemetry, metrics are sent to our Telemetry Services using gRPC. The collectors capture and export our data by periodically pushing it to an Amazon-managed Prometheus (AMP) instance for storage. Finally, Grafana Cloud is used to query Prometheus in order to visualize our data in dashboards. - -In order for ASEs to build their own telemetry solution, Rafiki can send data to multiple endpoints. This allows for integrating a local Otel collector container that acts as a sidecar and can support custom requirements. - -Thus ASEs can choose to opt-in and send telemetry metrics to us, optionally collect metrics for their own personal use, or opt-out of telemetry entirely. - -Please check out our telemetry [docs](https://github.com/interledger/rafiki/blob/main/packages/documentation/src/content/docs/telemetry/overview.md), [integration guide](https://github.com/interledger/rafiki/blob/main/packages/documentation/src/content/docs/telemetry/integrating.md), and our telemetry [code](https://github.com/interledger/rafiki/tree/main/packages/backend/src/telemetry) on GitHub. - -### Reflecting on Our Journey - -A lot of what we’ve covered in this article could be construed as scope creep. What initially seemed like a straightforward goal evolved into a more complex and considered endeavor. I like to think that this is not a bad thing, though. We are a team who have a proactive approach to understanding and resolving challenges. This journey is a testament to the iterative nature and necessary time that this process requires. - -That said, we could have benefited from more time upfront for understanding and planning instead of simply diving straight in. Perhaps, our work week eagerness may have led us to jump in too quickly. It is with a great sense of accomplishment that we now have telemetry running in our development environment where we are seeing how it holds up against our test data and hope it will be used in a production environment shortly. - -We look forward to having a real handle on the pulse of the ILP network soon. Of course, this journey is far from its conclusion. Telemetry, by its nature, is an ever-evolving domain, requiring adaptation to meet the network's growing needs and challenges. Having laid a solid foundation, future developments should be smoother. - -As we reflect on our path thus far, the question is: "Given our current knowledge and experiences, would we approach this project differently?" In hindsight, we would have used self-hosted Prometheus and Grafana instances from the start and avoided many of the problems we faced. This is a goal which remains on our roadmap, in order to provide us with the flexibility we seek. Some of the back and forth on our decision-making would have been smoother had we spent some more time in discussions at the start about what privacy factors to keep in mind and how public our results were intended to be. diff --git a/src/content/blog/2024-07-09-simple-open-payments-guide.md b/src/content/blog/2024-07-09-simple-open-payments-guide.md deleted file mode 100644 index 85f6d77d..00000000 --- a/src/content/blog/2024-07-09-simple-open-payments-guide.md +++ /dev/null @@ -1,101 +0,0 @@ ---- -title: 'A Simple Guide to the Open Payments Standard' -description: Learn how the Open Payments standard makes online payments easier and more accessible for everyone. -date: 2024-07-09 -slug: simple-open-payments-guide -authors: - - Sarah Jones -author_urls: - - https://www.linkedin.com/in/sarah-jones-ba6bb6b9 -tags: - - Interledger - - Open Payments ---- - -## The Current Digital Payments Landscape - -Handling payments is a crucial part of many online applications. Whether it's an eCommerce site selling products, a fundraising platform accepting donations, a streaming service charging for content, or a subscription service with monthly fees, digital payments are central to their operations. - -Many application developers rely on third-party payment gateways to handle these transactions. Companies like PayPal, Stripe, and Square offer services to process credit card payments on behalf of the application. In return for these services, applications incur various fees, which can include monthly fees, flat rates per transaction, or a percentage of the transaction amounts. This approach, while convenient, introduces additional expenses and makes the application reliant on a third-party provider. This reliance can limit control over the user experience, such as forcing users to trust third-party providers with sensitive information like credit card details. - -![Most applications' payment implementation today](/developers/img/blog/2024-06-20/credit-cards.png) - -An alternative is for applications to integrate directly with their bank's payment processing services. This method can offer lower transaction fees and increase control over the payment process. However, it requires significant development effort and is not always possible. Additionally, switching to a different bank becomes very challenging due to the extensive integration work that was already completed. - -This problem gets even more complicated when we consider situations where either the sender or the recipient of funds does not have a bank account. What if a payment has to take place between a bank and a mobile money provider? Now the application would have to also integrate with the mobile money provider. Custom integration becomes an expansive problem. - -![Custom integrations are not scaleable](/developers/img/blog/2024-06-20/custom-integration.png) - -What if there was a way for an application to access your account directly, and securely? Currently, even if you provide an application with your account details, it likely wouldn't be able to do much with them, even with your permission. As an account owner, shouldn't you have the power to decide who can access your account and what they can do with it? - -## The Promise of the Open Payments Standard - -Open Payments aims to change that by enabling applications to access users' accounts directly without needing multiple custom integrations. Standardization is key to eliminating these custom integrations. With a single integration point, applications can access any account that implements the standard, whether it's a bank account, a digital wallet, or a mobile money account. The only requirement is that both the sender's and recipient's account providers support the Open Payments standard. - -![Open Payments allow applications to talk to accounting service entities directly](/developers/img/blog/2024-06-20/open-payments.png) - -Consider how email works: if you have a Gmail account and someone else has an Outlook account, it doesn't matter because our email providers communicate using a shared, standardized language, allowing us to send emails seamlessly across providers. To send an email, you don't need to know the other person's email provider; you just need their email address. - -This is how the Open Payments standard operates. Similar to an email address, if an account provider implements the Open Payments standard, they will provide you with a wallet address. This wallet address is human-readable (consisting of words instead of a long string of numbers) and publicly shareable (unlike your credit card number). - -When you want to make a payment online, you simply provide the application with your wallet address. This wallet address is also a URL, allowing the application to make queries directly to the wallet address. The application can then access information about where to contact your account provider. Using the Open Payments standard, the application can send a request directly to your account, asking for the specified amount to be transferred from your account to the recipient's account. - -As an open standard, Open Payments is free for anyone to use, ensuring seamless communication between applications and their user’s account providers, regardless of which financial institutions are involved. This direct communication means you're not incurring additional fees or delays. - -The Open Payments protocol also aims to make transactions more transparent. Before committing to a payment, you understand how much the recipient is receiving and how much this payment costs you in transaction fees. - -The Open Payments standard addresses key questions such as: - -- Where should the money be sent, exactly? It provides a standardized way to identify and locate user accounts across different financial institutions. -- Do all all parties agree on the final amount after any fees are added to a transaction? -- What is the best underlying method to use for the transaction? - -With the Open Payments standard, applications do not need to be registered financial service providers (FSPs) to facilitate transactions. Instead of moving or holding money, applications send payment obligations. This means they are not transferring money directly but are sending instructions to your financial institution to transfer the funds on your behalf. This setup eliminates the need for the application to be a financial service provider itself, even though it has direct communication access to your account. This is beneficial for applications because they don’t need to contend with the legal and compliance hurdles required to be a financial services provider. But it’s also advantageous for you because you don't need to share sensitive information with online applications. Applications storing information like credit card numbers and CVV codes always pose a risk of data leaks or hacking, even with the best intentions. - -You might think, this sounds convenient, but is it secure? I don’t want just anyone pulling money out of my account! - -## You Remain in Control - -With the Open Payments standard, you remain in full control of your financial transactions. When an application uses Open Payments, it securely and cryptographically shares important information about itself with the financial institutions it interacts with. This verification ensures that the account provider knows the application is legitimate when making a payment request on your behalf. - -Importantly, any withdrawal of money from your account requires your explicit consent. When you grant an application access to your account, you are not giving it unrestricted access. Instead, you control the access rights: the exact amount, the time frame, and whether it can access your transaction history or move funds. Open Payments also supports recurring payments, allowing you to define how often, for how long, and up to what amount an application can access your account. This granular control ensures that you are always aware of and consenting to the transactions made from your account. - -## There Are Some Catches - -For this system to work, both the sender and the recipient must have Open Payments-enabled accounts. This requirement poses an adoption hurdle, which the Interledger Foundation is actively addressing. We are dedicated to making adoption as seamless as possible for financial service providers (FSPs) and applications by providing extensive support and resources. - -It's important to note that integrating Open Payments does not guarantee a shared settlement layer. A settlement layer in a payment system refers to the infrastructure and processes used to actually transfer the funds between parties to finalize transactions. This includes the mechanisms that ensure that money moves from the sender to the recipient and that all parties' accounts are accurately updated. It is the actual movement of your money out of your account and into someone else's. Open Payments is designed to enable different payment systems to communicate and transact with each other. However, at the end of the day, those systems also need to be connected in such a way that they can settle the movement of funds between accounts as well. - -Many financial institutions already have established pathways and processes for moving money between accounts. The [Interledger Protocol](https://interledger.org/interledger) automates and optimizes how these institutions navigate this network. If you think of Open Payments as a way of sending payment obligations directly to the relevant account provider, then Interledger is about finding the best route through that exisiting network that allows funds to be transferred and payments to be completed. While Open Payments simplifies the initiation of payments, it doesn't guarantee that a route for completing those payments will always be found, potentially leading to failed transactions. However, as adoption of the standard grows, more people will gain access to an expanding and interconnected payments network, reducing the likelihood of payment failures. - -While some fees, such as currency conversion and bank transaction fees, may still apply, Open Payments aims to eliminate unnecessary middleman fees, reducing overall transaction costs. However, it’s important to recognize that not all intermediaries are unnecessary. For example, currency conversion services are essential for transactions involving different currencies. Suppose your account provider only deals in Euros and mine only in Dollars. In that case, we might rely on an intermediary that handles both currencies to facilitate the transaction effectively. - -Another scenario arises when the sender's and recipient's account providers are not directly connected by a shared settlement layer. In such cases, as long as there are one or more intermediaries that share a common settlement system with each institution, settlement can still occur via these intermediaries. For instance, if my bank deals in Dollars and cannot settle directly with your cryptocurrency wallet, there might be a digital wallet intermediary capable of settling between fiat and cryptocurrency. Thus, a shared settlement layer exists indirectly through intermediaries. The goal of the Interledger Protocol is to navigate the network of financial institutions and settlement systems to find the most efficient and cost-effective settlement paths. By leveraging these interconnected networks, Interledger aims to minimize costs and enhance reliability. - -## What's Happening in the Open Payments Space Today? - -Currently, the adoption of the Open Payments standard is still in progress. Some innovative institutions and services have begun to integrate this standard, but widespread use is still developing. - -[GateHub](https://gatehub.net/) is a digital wallet provider working with the Open Payments standard globally. They facilitate some cross-currency transactions, although regulatory limitations may apply depending on the user's country of residence. [Chimoney](https://chimoney.io/) and [Fynbos](https://wallet.fynbos.app) digital wallets have also implemented Open Payments capabilities. Chimoney enables Open Payments transfers between Chimoney accounts, and Fynbos supports payments between Fynbos accounts. Fynbos is operational in America, Europe, and South Africa. However, their transactions are currently limited to wallets in the same region due to regulatory and technical constraints. Plans are underway to establish payment channels between Fynbos and GateHub users, beginning with Europe soon. - -As well as having the first digital wallet providers that are Open Payments-enabled, we also have an application that runs Open Payments. [Interledger Pay](https://interledgerpay.com/) is a simplified payment platform that allows you to easily send or request money using your wallet address. - -For most people, accessing an Open Payments-enabled account depends on their financial institutions adopting the standard. As more banks, digital wallets, and mobile money providers incorporate Open Payments, the benefits will become more widely accessible. - -## What Are the Possibilities for Tomorrow? - -A world with direct access to accounts through the Open Payments standard fosters innovation and inclusion by reducing barriers to entry for developers, who can create direct payment solutions more quickly and at a lower cost without the need for custom integrations. This democratization of development enables smaller companies and startups to compete and innovate. - -Users are empowered to give applications direct access to their accounts. They can give access to who they want, to do what they want within set limits. Users also benefit from seamless transactions, transparency with regards to what fees they incur, and enhanced control over their financial data, improving trust and security. Financial inclusion is significantly increased by enabling access for those without traditional bank accounts, as well as cutting down on the costs of making payments by removing unnecessary intermediaries, and reducing the development effort of payment integrations. - -## Try Open Payments Yourself - -If you're curious about how the Open Payments standard works in practice, you can explore it by creating an account on the [Interledger Test Wallet](https://rafiki.money/). Upon account creation you'll be given a wallet address. This enables you to get hands on experience of simulating transactions using Open Payments. Once you're set up with your account, and have given yourself a generous amount of fictional money, you can embark on a shopping spree at the [Interledger Boutique](https://rafiki.boutique) and buy some delightful products ranging from luck to kindness. - -## Key Takeaways - -Standardization enhances interoperability by reducing the development effort required for each integration. This alone brings down costs while providing a scalable and efficient solution for handling digital payments. When interoperability is easy, there is no need for unnecessary middlemen and the fees they add to the cost of transactions. - -With Open Payments, users are empowered to give applications direct access to their accounts, deciding who can access their money, how much, and how often. Users only share public information (in the form of wallet addresses) with applications when making payments, keeping their sensitive financial data private. - -Applications can handle payments without needing to be registered financial service providers or navigating the risks involved in handling sensitive information like credit card numbers. diff --git a/src/content/blog/2024-07-30-open-payments-cinderella-story.mdx b/src/content/blog/2024-07-30-open-payments-cinderella-story.mdx deleted file mode 100644 index 782ef680..00000000 --- a/src/content/blog/2024-07-30-open-payments-cinderella-story.mdx +++ /dev/null @@ -1,184 +0,0 @@ ---- -title: 'Open Payments: The Cinderella Story of Finding a Fitting Authorization Method' -description: A breakdown of the unique needs that an authorization method for Open Payments needs to be able to fulfill. -date: 2024-07-30 -slug: open-payments-cinderella-story -authors: - - Nathan Lie -author_urls: - - https://www.linkedin.com/in/nathan-lie-138a73121 -tags: - - Interledger - - Open Payments - - GNAP ---- - -import LargeImg from '/src/components/blog/LargeImg.astro' - -## The Internet Runs on OAuth 2.0 - -If you’ve ever signed into a website with your Google account, Apple ID, or perhaps an account on a social media website, then you’ve participated in a grand tradition of access delegation that began back in 2007, when the core protocol for the first iteration of OAuth was released. Since then, the protocol has expanded into what we see throughout the web today in the form of OAuth 2.0. - -At a certain point in [Rafiki’s](https://rafiki.dev/) development it became necessary to implement a standard that described how third parties could initiate payments on behalf of someone else. This standard came to be known as Open Payments, which not only would have to provide a framework to describe those payments, but also incorporate an access delegation method for those third parties to use. Having such a well-established access delegation method like OAuth 2.0 made it seem like a clear choice as an authorization method for this standard. - -However, as [Open Payment’s](https://openpayments.dev/introduction/overview/) methods for describing and managing payments became fleshed out, the shortcomings of OAuth 2.0 for that use case revealed themselves. To understand what they are, let’s first go over the features of OAuth 2.0 that helped propel it into mainstream popularity. - -As simple of a process as it is for a user, access delegation through OAuth 2.0 is achieved through a lot of moving parts by different parties. These roles are as follows: - -- Third Party Client (or just client) - - The party that is requesting access to a resource that it does not own. - - In a “Sign in with Google”-type example, this is the party that is sending the user to Google to give the client access to information on the user’s Google account. -- Resource Owner - - The entity that owns one or more resources that may be accessible through an authorization flow. - - In the “Sign in with Google” example, this would be the user that owns the Google account. -- Resource Server - - The place where a resource owner’s resources (e.g. an account’s email address or username) are stored. - - In the “Sign in with Google” example, this would be the Google server where the account is stored. -- Identity Provider - - The entity that determines the identity of the resource owner, so that they may access their resources. - - In the “Sign in with Google” example, this would be the Google login page the resource owner is directed to when selecting the option to sign in with Google on the client site. - - The party that controls this often has overlap with the party that controls the resource server, but not always. Note how in the “Sign in with Google” example Google owns both the login page and the resource server that account resources are stored on. -- Authorization Server - - The server that delegates access to the resources on a given resource server. - - The client makes a request to the authorization server to receive an access token for resources owned by a resource owner on a given resource server. - - The resource server makes requests to the authorization server to ensure access tokens presented by a client are valid for the resource the client is requesting access for. - - In the “Sign in with Google” example, this would be a Google server that manages authorization. The authorization server isn’t always run by the same entity as the resource server, and may be outsourced. - -When the client needs to perform an action using a resource hosted externally (like signing into their app with a user’s Google account), the client requests access by making a request to the corresponding authorization server for that resource. The authorization server responds with a redirect URL that goes to an identity provider used by the resource server. This redirect also contains information that identifies the client and specifies what resources it wants access to. - -The client then redirects the user to a page on the identity provider with that URL. Typically this page will verify the user’s identity by requiring them to provide login credentials in some fashion, then present them with a consent screen asking if they would like to approve or deny the client’s request for access. - -Once the user completes the flow on the identity provider, the authorization server gives the client a token which is used to communicate with an API on the resource server to retrieve the resource in question. - -Let’s collect this all into a nice sequence diagram. - - - -While extremely useful, OAuth 2.0 is best suited for delegating access to _information_. Sadly, when one is interested in delegating _control_, rather than access, the tradition of OAuth 2.0 begins to fall short. A typical OAuth 2.0 authorization is initialized when a third party client generates a link for a resource owner that contains, among other things, a “scope” value that contains a list of the items the authorization should grant access to. For example: - -```js -scope = 'email,username,channels:read' -``` - -In this example lifted from Slack’s OAuth API, this scope will grant access to a resource owner’s email address and username. In addition, it will allow the channels of a Slack instance to be read by the third party client. - -Now, the issue with expressing access only as a string, is that it’s cumbersome to express any specifics on the access that’s being granted. Slack attempts to solve this by concatenating parts together, but for a payment, there are enough parts where it becomes really clumsy. Imagine what the scope for a payment might look like with this model, accounting for things like the transaction amount and a billing frequency of once a month: - -```js -scope = 'outgoing-payment:100:USD:P1M' -``` - -This approach is starting to push the boundaries of convenience and ability to parse the scope. Do we need to enforce an order in which information is added to a scope, so that it can be parsed properly? How would we handle optional parts? Should we just stringify a JSON object and call that a scope? From a development standpoint, things are starting to get out of hand. - -## Trying to Make it Work with OAuth 2.0 - -An early attempt to add this context to an authorized payment was through something called “mandates”. These were objects that a third party client would create on an Open Payments resource server that contained the aforementioned payment information. That mandate would then be referenced inside of a Authorization Details object, which would then be stringified and passed as a query parameter in an OAuth authorization URL: - -```bash wrap -GET /authorize?response_type=code&client_id=s6BhdRkqt3&state=af0ifjsldkj - &redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb - &authorization_details=%7B%0A%20%20%22open_payments%22%3A%7B%0A%20%20%20%20%20%22mandate%22%3A%7B%0A%20%20%20%20%20%20%20%20%22name%22%3A%20%22%2F%2Fissuer.wallet%2Fmandates%2F2fad69d0-7997-4543-8346-69b418c479a6%22%0A%20%20%20%20%20%7D%0A%20%20%7D%0A%7D HTTP/1.1 -Host: wallet.example - -// URL contains stringified copy of the following object: -// { -// "id": "https://wallet.example/mandates/2fad69d0-7997-4543-8346-69b418c479a6", -// "account": "https://wallet.example/bob", -// "amount": 200, -// "assetCode" : "USD", -// "assetScale": 2, -// "interval": "P1M", -// "startAt": "2020-01-22T00:00:00Z", -// "balance": 200 -// } -``` - -Now we’re getting messy again. It’s immediately clear how much noise is in the “authorization_details” query parameter, and in a more practical sense, there’s the added step of creating the mandate before a client can request authorization from a resource owner. We haven’t even gotten into the fact that a whole other object, a “charge”, needs to be created on the resource server in order to make use of the mandate. **It’s additional overhead for the resource server to maintain all of those mandates for a process that ideally should be handled entirely by the authorization server delegating access.** - -Compare this sequence diagram with the previous sequence diagram and the increased complexity. There’s more interaction with the resource server just to set up the authorization flow and more work after the flow in order to initiate the payment. - - - -## A New Approach - -Enter the Grant Negotiation and Authorization Protocol (GNAP), the heir apparent to the OAuth lineage. While maintaining the standard of security that OAuth established, GNAP is capable of authorizing a broader range of actions. Consider the authorization of a payment. Not only does the _ability_ to make a payment need to be specified when authorizing it, but also the _recipient_ and the _amount_ of the payment. Those complications are difficult to account for in OAuth, but much easier to handle in GNAP. - -Like mandates, a client will make a request to the Open Payments Auth Server specifying what permissions it would like to have on a resource. The difference here is that this request is part of the spec, so it doesn’t have to live on and be maintained by the server. Additionally, it’s capable of expressing the limitations and caveats on the permissions it’s requesting that we need to properly describe a payment. GNAP achieves this through the grant request. This example describes how a client making this request can create or read outgoing payments for a particular incoming payment for 5 dollars: - -```json wrap -{ - "access_token": { - "access": [ - { - "type": "outgoing-payment", - "actions": ["create", "read"], - "identifier": "https://ilp.rafiki.money/alice", - "limits": { - "receiver": "https://ilp.rafiki.money/incoming-payments/45a0d0ee-26dc-4c66-89e0-01fbf93156f7", - "interval": "R12/2019-08-24T14:15:22Z/P1M", - "debitAmount": { - "value": "500", - "assetCode": "USD", - "assetScale": 2 - } - } - } - ] - }, - "client": "https://webmonize.com/.well-known/pay", - "interact": { - "start": ["redirect"], - "finish": { - "method": "redirect", - "uri": "https://webmonize.com/return/876FGRD8VC", - "nonce": "4edb2194-dbdf-46bb-9397-d5fd57b7c8a7" - } - } -} -``` - -After the grant request is made, the Open Payments authorization server responds with a URL that kicks off an authorization flow, similar to when a client makes a request to an OAuth 2.0 authorization server. There are many components to the grant request, but the most important to describing a payment is the “access” field: - -```json wrap -"access": [ - { - "type": "outgoing-payment", - "actions": [ - "create", - "read" - ], - "identifier": "https://ilp.rafiki.money/alice", - "limits": { - "receiver": "https://ilp.rafiki.money/incoming-payments/45a0d0ee-26dc-4c66-89e0-01fbf93156f7", - "interval": "R12/2019-08-24T14:15:22Z/P1M", - "debitAmount": { - "value": "500", - "assetCode": "USD", - "assetScale": 2 - } - } - } -] -``` - -Note how much more specific and readable a grant request can be with its intentions for a payment. The “actions” field can hold what actions can be performed on the resource. Importantly, the “limits” field is open-ended enough that it can specify the amount of the payment, the currency it is in, and its frequency (if applicable) in an understandable way. Essentially, it is able to use two important features of OAuth 2.0 and the “mandates” system without creating significant overhead: - -All access delegation is kept within an authorization server, making accounting the resource server’s sole responsibility. -There is a clear way to describe the parameters for a payment. - -For reference, we have a more tame sequence diagram with GNAP. It’s closer to the baseline set by the OAuth 2.0 sequence diagram and keeps the responsibilities of the resource server properly separated from the authorization flow at large. - - - -With this in mind, it’s clear that GNAP is best suited for sending payments via Open Payments. Though the spec is not officially final, progress is steady and the future looks promising - the specifications are well on their way to becoming a proper RFC. [The core protocol](https://datatracker.ietf.org/doc/draft-ietf-gnap-core-protocol/) was recently approved by the IESG and has entered the IESG editor’s queue. [The specification for resource servers](https://datatracker.ietf.org/doc/draft-ietf-gnap-resource-servers/) is also on the cusp of being submitted to the IESG for publication. - -For more information on Open Payments as a whole, consider perusing the [documentation](https://openpayments.dev/introduction/overview/). diff --git a/src/content/blog/2024-08-13-interledger-universe.mdx b/src/content/blog/2024-08-13-interledger-universe.mdx deleted file mode 100644 index 1c166a87..00000000 --- a/src/content/blog/2024-08-13-interledger-universe.mdx +++ /dev/null @@ -1,202 +0,0 @@ ---- -title: 'The Interledger Universe' -description: 'Or: “What the heck are all those products and protocols?”' -ogImageUrl: /developers/img/blog/2024-08-13/og-image.png -date: 2024-08-13 -slug: interledger-universe -authors: - - Sabine Schaller -author_urls: - - https://www.linkedin.com/in/sabineschaller -tags: - - Interledger - - Interledger Protocol - - Interledger Stack - - Interledger Foundation - - Open Payments - - Rafiki - - Dassie - - Web Monetization - - STREAM - - SPSP ---- - -If you have stumbled across terms like _Interledger Protocol, Interledger Stack, Interledger Foundation, Open Payments, Rafiki, Rafiki.money, Dassie, Web Monetization (extension), STREAM, SPSP, packets_, … and you are like and you're like, “Wait, what?!” Say no more! We are here to sort through this cloud of terms and finally shed some light on the foggiest depths of the Interledger Universe. Let’s start with the obvious first term: - -## Interledger - -The term Interledger can be split into the prefix _inter_, meaning “between” and _ledger_, which the [Merriam-Webster Dictionary](https://www.merriam-webster.com/dictionary/ledger) defines as “a book containing accounts to which debits and credits are posted from books of original entry”. Hence, Interledger aims to be the means by which payments can be made between multiple accounting books, a.k.a. ledgers. - -What does that really mean? Let’s say I have a German bank account and want to transfer money to my friend Allan in South Africa. Which options do I have? I can initiate an international transfer from my bank account to Allan’s bank account in South Africa, which will use the SWIFT network to exchange payment messages. The transfer is probably going to take at least 3 business days for Allan to see the money in his account and it is going to cost me a relatively huge fee. I could also use a service like [Wise](https://wise.com/), but that is closed-loop and requires me to sign up and Allan to either sign up or complete a form with the South African Reserve bank before he can receive the funds. I may not ever use the service again afterwards and while I know they are regulated, their software is proprietary, so there is no way for me to check how they process my data. I need to trust them. It gets even more complicated if we assume that Allan does not have a traditional bank account but only a mobile money provider. How would I transfer money to him then? - -Interledger is designed to be a network of nodes that forward payment messages while also taking care of any “currency” conversion, where “currency” could be anything of value including fiat currencies, crypto currencies, or mobile money. - -![Interledger network diagram](/developers/img/blog/2024-08-13/network.png) - -My bank account holds Euros and Allan’s mobile money account holds South African Rands. When looking at the above graph, there are multiple ways my money could be sent–or routed–to Allan. Interledger ensures that the packets take the fastest and cheapest route from the Interledger node servicing me to the Interledger node servicing Allan. Hence, Interledger is designed to be a network on top of existing payment networks that serves as the interoperability layer between them all. - -### Interledger Foundation - -Before we dive into the tech, let me quickly introduce the Interledger Foundation. We are a US non-profit organization whose vision is to send a payment as easy as an email. We are the custodians of the Interledger Protocol and its adjacent protocols, and we are dedicated to developing digital financial inclusion in systems around the world. -Our global strategy is to support research and development of digital financial systems in vulnerable areas, fund innovative solutions for underrepresented populations, and foster an ecosystem that drives a paradigm shift in payment systems. We aim to create a robust and active Interledger community that grows together, enriching talent pipelines to bring new voices and perspectives into the fintech space. - -And with that, what are these protocols that we are developing and maintaining? - -## The Interledger Stack - -The Interledger stack, much like the Internet stack, consists of multiple layers. This is not a coincidence, since the Interledger stack was modeled after the Internet stack. Hence, for every layer in the Internet stack, there is an equivalent in the Interledger stack. Each layer serves a specific function and interacts with the layers above and below it. Let's explore each layer of the Interledger stack, from the bottom up. - -![Interledger stack vs the Internet stack](/developers/img/blog/2024-08-13/stack.png) - -If you prefer a video version of this, please check out my [Interledger Stack presentation](https://youtu.be/sqGjkZKFjgo) on Youtube. - -### Settlement Layer - -The settlement layer is not technically part of the stack but is essential for the other protocols to function. It defines how actual value is exchanged between parties. Settlement can occur with fiat currencies, cryptocurrencies, mobile money, or any agreed-upon asset of value, like Starbucks credits or even physical coffee beans. This layer ensures that once a payment has been cleared, the actual transfer of value is settled between the involved parties. Usually, peered nodes, also called connectors, enter a legally binding peering agreement to define the line of credit they extend to each other and to enforce that settlement happens. Settlement can either occur at a predefined point in time or whenever this line of credit, also called peer liquidity, is fully consumed. - -Note that in the case that a cryptocurrency is used for settlement between two peers, settlement can happen automatically and without a peering agreement because blockchains enforce settlement due to their cryptographic capabilities and their binding execution. - -### Link Layer - -The link layer defines how two peered connectors communicate. There are currently two main protocols used in this layer: - -- [Bilateral Transport Protocol](https://interledger.org/developers/rfcs/bilateral-transfer-protocol/) (BTP): Uses WebSocket-based communication between connectors. -- [ILPoverHTTP](https://interledger.org/developers/rfcs/ilp-over-http/): Utilizes HTTPS for communication between connectors. - -These protocols establish the connection needed for the higher layers to function. - -### Protocol Layer - The Interledger Protocol (ILP) - -The core of the Interledger stack is the [Interledger Protocol](https://interledger.org/developers/rfcs/interledger-protocol/) (ILP). This protocol splits larger payments into smaller packets, whose content it prescribes, and defines a two-phase transfer protocol. - -Why are we using a two-phase rather than a single phase transfer protocol? Let us have a look at an example of a single-phase transfer. - -![Single-phase transfer diagram](/developers/img/blog/2024-08-13/transfer.png) - -Alice on the left is a customer of an Account Servicing Entity (ASE) that runs an Interledger node or connector (A). - -Side note: An Account Servicing Entity provides and maintains a payment account for a payer and payee, and is a regulated entity in the country/countries it operates (e.g. banks, mobile money providers etc.). - -Bob on the right is a customer of the Account Servicing Entity that runs the Interledger connector (D). In order for Alice to send a payment to Bob, connector (A) needs to forward packets to connector (B), that needs to forward packets to connector (C), that needs to forward packets to connector (D). In the optimistic scenario, ASE A would debit Alice’s account and then forward the packets on. But what happens if, for whatever reason, connector (C) cannot forward the packets to connector (D)? Alice’s account has already been debited but Bob did not receive the funds. - -In order to move that risk of transfer failure from the end users, Alice and Bob, to the connector nodes, the Interledger Protocol defines a two-phase transfer. - -![Two-phase transfer diagram](/developers/img/blog/2024-08-13/2p-transfer.png) - -The ILP packet transfer begins with the sending connector (A) constructing an ILP Prepare packet, containing the receiver's ILP address, an execution condition, amount, and expiry time. The sending connector may also include additional data, the format of which is determined by the higher-level protocol in use. This packet is sent to connector (B) over an authenticated channel, set up using a link layer protocol. Connector (B) verifies connector (A)’s liquidity balance and, if sufficient, debits the amount from the connector’s liquidity account. The connector then uses routing tables to determine the next hop, adjusts the packet's amount and expiry for its exchange rate, and forwards the packet. - -Subsequent connectors repeat these steps until the packet reaches the receiving connector (D). The receiver validates the packet based on higher-level protocol requirements and either accepts it by returning an ILP Fulfill packet with the [preimage]() of the condition or rejects it with an ILP Reject packet. If accepted, each connector in the chain verifies the fulfillment and credits the next connector until the original sender is reached. - -The sending connector then checks the fulfillment against the original condition, records the transaction, and may repeat the process to complete the desired total transfer amount. This lifecycle ensures secure, efficient, and multi-currency transactions across a network of connectors, maintaining the integrity and timing of each packet transfer. - -Note that the protocol is specifically designed for very low value packets. If connector (A) and (B) peer using packets of let’s say 1 cent, losing a couple of them due to network failures can quickly add up. However, if (A) and (B) peer based on low value packets of a billionth of 1 cent (1/1,000,000,000), then losing a very small amount of them will be inconsequential when rounding during settlement. - -### Interlude: ILP addresses and Payment Pointers - -[ILP addresses](https://interledger.org/developers/rfcs/ilp-addresses/) are a fundamental part of the Interledger Protocol, serving as unique identifiers for accounts within the Interledger network. These addresses follow a hierarchical format, similar to IP addresses on the internet, enabling efficient routing of payment packets across different ledgers. - -The structure of an ILP address consists of several components: - -- Allocation: This is the first part of the address and indicates the type of network. For example, `g` is used for global live networks, and `test` is used for test networks. -- Neighborhood: Following the allocation scheme, the neighborhood specifies a group of connectors or ledger or institutions. For instance, `sepa` could represent the ledgers in the [Single Euro Payments Area](https://en.wikipedia.org/wiki/Single_Euro_Payments_Area) or `us-fed` could represent the Federal Reserve of the United States. The goal of neighborhoods is to group connectors and ledgers that know about each other, so that routing is more efficient. -- Account Identifier: This part identifies the specific account within the ledger. It is unique to each account holder and ensures that funds are routed to the correct recipient. -- Interaction (optional): Finally, the interaction encodes business logic and varies for each transaction, allowing multiple requests to be distinguished. - -An example ILP address might look like this: `g.us-fed.ach.acmebank.acmecorp.~ipr.73WakrfVbNJBaAmhQtEeDv.2`. Here, `g` indicates a global live network, `us-fed.ach` represents the neighborhood (the US Fed on the ACH network), `acmebank.acmecorp` is the account identifier, and `~ipr.73WakrfVbNJBaAmhQtEeDv.2` is the interaction. - -#### Payment Pointers - -[Payment pointers](https://paymentpointers.org/) are a user-friendly way to represent ILP addresses, similar to how URLs are used to represent IP addresses. They make it easier for users to handle and share their payment information. - -A payment pointer always begins with a dollar sign (`$`) followed by a URL-like structure. For example: `$wallet.com/alice`. This payment pointer resolves to a URL `https://wallet.com/alice` and points to an ILP address, e.g. `test.wallet.alice` (no optional interaction part). - -Payment pointers can also be hosted on the root domain. In that case, a payment pointer like `$mymarketplace.com` resolves to `https://marketplace.com/.well-known/pay` and points to an ILP address like `g.wallet.mymarketplace`. - -We will come back to payment pointers in the section on the Application Layer, specifically the section on the Simple Payment Setup Protocol (SPSP). If you can’t wait, feel free and [skip ahead](#application-layer). - -### Transport Layer - The STREAM Protocol - -The transport layer builds on ILP by providing additional functionalities for managing value transfer. The only supported protocol at the moment is the [STREAM Protocol](https://interledger.org/developers/rfcs/stream-protocol/) (Streaming Transport for Real-time Exchange of Assets and Messages). - -![STREAM protocol animation](/developers/img/blog/2024-08-13/stream.gif) - -STREAM is a versatile and secure transport protocol for ILP, facilitating efficient and scalable transmission of money and data. It offers a range of features designed to optimize ILP-based transactions: - -- **Money and Data Transmission**: Allows for the simultaneous transfer of money and data. -- **Packet Segmentation and Reassembly**: Segments larger payments or messages into smaller packets for easier transmission and reassembles them at the destination. -- **Bi-directional Communication**: Supports two-way communication, facilitating the exchange of money and data in both directions. -- **Stream Multiplexing**: Multiple logical streams can be sent over one ILP connection, with unique numerical IDs assigned to avoid collisions. -- **Flow and Congestion Control**: Adjusts the rate of money and data transfer based on network conditions to maintain efficiency. -- **Authentication and Encryption**: Ensures security through authenticated and encrypted packet data. -- **Condition Generation and Fulfillment**: Manages the generation of conditions for ILP packets and their fulfillment, ensuring transaction integrity. -- **Connection Migration**: Supports uninterrupted streams even if the underlying connection changes. - -STREAM also manages path exchange rates effectively. It includes a minimum acceptable amount in ILP Prepare packets and the received amount in Fulfill or Reject packets. By doing so, it allows senders to judge amounts and prices in their own units using the calculated path exchange rate. To estimate the path exchange rate, an unfulfillable test packet may be used at the start of the connection. The protocol ensures that incoming Prepare packets with amounts below the specified minimum are not fulfilled. - -Note that a STREAM packet is included in the data field of an ILP packet. - -### Application Layer - -The application layer is the final layer of the Interledger stack, defining developer-facing functionalities and enabling various applications. The two supported protocols at this layer are SPSP (Simple Payment Setup Protocol) and Open Payments. - -[SPSP](https://interledger.org/developers/rfcs/simple-payment-setup-protocol/) simplifies the process of setting up payments. When a GET request is made to a URL associated with a payment pointer using the SPSP request headers, SPSP defines what needs to be returned. - -```http wrap -HTP/1.1 200 OK -Content-Type: application/spsp4+json -{ - "destination_account": "example.ilpdemo.red.bob", - "shared_secret": "6jR5iNIVRvqeasJeCty6C+YB5X9FhSOUPCL/5nha5Vs=", - "Receipts_enabled": true -} -``` - -These include the `destination_account`, which is the ILP address of the recipient, a `shared_secret` for encrypting STREAM packets, and may include a flag called `receipts_enabled`, indicating whether [STREAM receipts](https://interledger.org/developers/rfcs/stream-receipts/) have been requested. SPSP ensures a secure and straightforward payment setup for entities or individuals with direct ILP access, meaning entities or individuals that are able to create, send, and receive ILP packets directly without the help of another entity. - -[Open Payments](https://openpayments.dev/introduction/overview/) is an API standard for account servicing entities, allowing third parties to securely access digital accounts for viewing account information and initiating payments. Open Payments supports complex payment scenarios, such as e-commerce or recurring payments, by providing a robust framework for authorizing and initiating digital payments. It employs the [Grant Negotiation and Authorization Protocol (GNAP)](https://datatracker.ietf.org/doc/html/draft-ietf-gnap-core-protocol-20) for fine-grained access control and secure authorization. - -For an in-depth introduction to Open Payments, check out [Sarah’s fantastic blog post](https://interledger.org/developers/blog/simple-open-payments-guide/). If you would like a more high level overview of GNAP and why we are using it with Open Payments, check out [Nathan's Cinderella Story of Finding a Fitting Authorization Method](https://interledger.org/developers/blog/open-payments-cinderella-story/). - -## Where is Web Monetization in this stack? - -[Web Monetization](https://webmonetization.org/) is not part of the Interledger stack but is a user-facing application that sits on top of the ILP stack. - -![Web Monetization in the Interledger stack](/developers/img/blog/2024-08-13/wm.png) - -Web Monetization is a proposed W3C standard that facilitates seamless payments directly within the web browsing experience. It allows website visitors to pay an amount of their choosing to websites with minimal to no user interaction. As a proposed standard, the goal is for Web Monetization to be natively built into browsers; however, no browsers currently support the functionality. Hence, the Interledger Foundation is working on a browser extension to enable Web Monetization functionality in the meantime. - -When a web browser (or the Web Monetization extension) encounters a web-monetized site, the site can automatically signal its ability to accept payments. Once the browser or extension has obtained authorization from the Web Monetization user during the setup phase, it gathers necessary payment details, and issues instructions to move the money utilizing the Open Payments APIs. The browser then creates a payment session and communicates payment events back to the site. In response, the site can provide benefits to paying visitors, such as removing ads or granting access to exclusive content. This approach aims to create a smooth, integrated experience for both users and content providers, promoting a new model for web monetization that is efficient, privacy preserving, and user-centric. - -## What is Rafiki? - -The answer to this question is very short: it is a [reference implementation](https://github.com/interledger/rafiki) of the Interledger stack. It is **not** a wallet, **not** a platform, and **not** a service! **It is software.** - -![Components of the Interledger stack included in Rafiki](/developers/img/blog/2024-08-13/rafiki.png) - -[Rafiki](https://rafiki.dev/) is open-source software that is freely available for any licensed entity. The purpose of Rafiki is to minimize the effort for entities to incorporate Interledger on their users’ accounts and run as a connector on the ILP network. Rafiki uses ILPoverHTTP rather than BTP because we assume that packet sizes for these transactions will be a bit bigger, maybe as big as a cent. Hence, payments are split into fewer packets, making the establishment of a socket connection excessive. - -### Rafiki.money, testnet, and test network - -We have to admit, we chose poorly when picking names for our testing and demonstration tech. We have created a test wallet that, as of today, has no name but we host it on [rafiki.money](https://rafiki.money/). It is simulating an Account Servicing Entity that users can sign up for, go through a fake KYC flow, and then have the possibility to hold a play-money balance and send and receive Interledger Payments. The test wallet is integrated with [Rapyd](https://www.rapyd.net/)’s sandbox environment for holding balances and with Rafiki for facilitating payments. However, since Rapyd’s sandbox environment is very limiting due to its API request restrictions, we are exploring alternatives. - -We are currently also in the process of finding a name for the test wallet so that people don’t confuse it with Rafiki, the ILP reference implementation, anymore. Additionally, we are changing the look and feel of the test wallet to set it apart even further. Stay tuned for exciting new updates! - -Test wallet deploys one instance of Rafiki, meaning that it is one node in the Interledger test network running an Interledger connector. We encourage future integrators of Rafiki, i.e. licensed Account Servicing Entities, to peer with at least the test wallet instance to try out its functionality and to form a larger test network. - -We have also used the term “testnet” to loosely describe all the tooling that we have developed around the test wallet, e.g. the [Boutique](https://rafiki.boutique/products) to experience what eCommerce would be like with Open Payments. However, we have decided to not use this term anymore to reduce its confusion with the test network. - -## What is Dassie? - -[Dassie](https://dassie.land/) is a second reference implementation of the ILP stack, but it’s targeted towards cryptocurrency users and developers rather than regulated Account Servicing Entities. It is not developed by the Interledger Foundation but a personal project by Stefan Thomas, one of the creators of the Interledger Protocol. - -While servicing two different worlds, a Dassie node could peer with an Rafiki node, for example if such a Rafiki node is run by a cryptocurrency exchange. - -## Final Words - -Navigating the Interledger Universe can initially seem overwhelming with its array of terms and concepts. However, at its core, Interledger aims to facilitate seamless, efficient, and secure value transfer across diverse ledgers and currencies. From the structured Interledger Stack to the reference implementation Rafiki or application-specific use cases like Web Monetization, each component plays a crucial role in realizing a unified, interoperable financial network. - -Whether it's enabling payments through Web Monetization, simplifying account servicing with Open Payments, or testing out functionalities with our test wallet, the Interledger ecosystem is designed to promote innovation and accessibility in the world of digital finance. By breaking down these elements and understanding their interactions, we can appreciate the vast potential of the Interledger Protocol to revolutionize global payments and value exchange. - -Stay tuned as we continue to refine and expand these tools, making the Interledger vision a reality. The Interledger Foundation’s mission is to make sending a payment as easy as sending an email, by fostering an inclusive, innovative ecosystem that bridges existing financial systems.The future of interconnected, inclusive financial systems is here, and we’re excited to see where this journey takes us. - -_Thank you to Sarah, Radu, Melissa, Tseli, Mohammed, Max, and Chris for reviewing this blog post and helping me make it the best version it could possibly be._ diff --git a/src/content/blog/2024-09-06-integration-tests.mdx b/src/content/blog/2024-09-06-integration-tests.mdx deleted file mode 100644 index edb67adb..00000000 --- a/src/content/blog/2024-09-06-integration-tests.mdx +++ /dev/null @@ -1,139 +0,0 @@ ---- -title: 'Leveling Up Rafiki Testing: Shifting from Manual to Automated' -description: 'How we automated our manual payment flow tests.' -date: 2024-09-06 -slug: integration-tests -authors: - - Blair Currey -author_urls: - - https://www.linkedin.com/in/blair-currey/ -tags: - - Rafiki - - Open Payments - - Testing ---- - -import LargeImg from '/src/components/blog/LargeImg.astro' - -[Rafiki](https://rafiki.dev/) is open source software that allows an [Account Servicing Entity](https://rafiki.dev/overview/overview#more-about-account-servicing-entities) to enable Interledger functionality on its users’ accounts, so testing is really important to us. Given the critical nature of handling payments, it's essential that our tests not only validate correctness but also build confidence for our integrators. - -Historically, Rafiki was tested via a combination of unit tests and manually running sequences of [Bruno](https://www.usebruno.com/) requests constituting different payment flows using two local Mock Account Servicing Entities (MASE). The MASEs are part of our local development playground and encapsulate the necessary components for integrating with Rafiki, including accounting, webhook handling, and consent interactions to facilitate payment flows. These MASEs run in [Docker](https://www.docker.com/) alongside their own Rafiki instances. [Visit our documentation](https://rafiki.dev/integration/playground/overview) to learn more about MASEs and our local playground. - -One such instance of these payment flows is our ["Open Payments" example](https://github.com/interledger/rafiki/tree/main/bruno/collections/Rafiki/Examples/Open%20Payments), demonstrating payment creation in an e-commerce context. This example consists of a series of requests to our Open Payments API and a short browser interaction running on our local MASE to authorize the payment. For any changes made to Rafiki, one would need to manually perform these Bruno requests against our local environment to ensure these flows still worked as expected. With [several different flows](https://github.com/interledger/rafiki/tree/main/bruno/collections/Rafiki/Examples) to validate, this manual process was time-consuming and error-prone, making it unrealistic to thoroughly test every variation for each change. This blog post covers how we automated these manual tests and the principles that guided us. - -## Our Testing Philosophy - -At Interledger, we believe in maintaining a balanced approach to testing that upholds both thoroughness and agility. As Rafiki transitions from Alpha to Beta, our focus remains on safeguarding our core business logic while quickly adapting to changes. To this end, the new integration tests focus on high-impact scenarios and critical edge cases, while existing unit tests offer more comprehensive coverage of individual components. The integration tests will run in our continuous integration (CI) pipeline on Github, as our unit tests do now. This approach allows us to rigorously validate our system while preserving the flexibility needed for rapid development. As Rafiki continues to mature, we will iterate and refine our testing strategies. - -### Testing Requirements - -Before diving into the implementation details, let’s outline our requirements for the new tests: - -- **Automation and CI Integration**: The tests should automate our Bruno example flows and run in our CI pipeline. -- **MASE Implementation**: The tests need to implement two MASEs to complete the payment flows and validate the received webhook events. -- **Backend and Auth Services**: The tests should run against our fully operational backend and auth services to ensure proper integration and functionality. -- **Efficiency**: The tests should be mindful of speed, avoiding any unnecessary delays while ensuring comprehensive coverage. - -#### Out of Scope - -- **Rafiki Admin:** Testing for our Rafiki Admin frontend is still to come and will be addressed in a separate test suite. -- **Non-Functional Requirements:** While performance, security, and other non-functional aspects are not the focus of these integration tests, they are being addressed through other ongoing initiatives. We are in the process of adding tracing and [performance tests](https://github.com/interledger/rafiki/tree/main/test/performance) with [k6](https://k6.io/), and we are conducting external security audits to ensure robust protection across our systems. - -## Solution overview - -After evaluating several options, we decided to run our services in Docker with a [Jest](https://jestjs.io/) test runner and MASE on the host machine. [A shell script](https://github.com/interledger/rafiki/blob/main/test/integration/scripts/run-tests.sh) launches the Docker environment, runs the tests, then spins down the environment. - -### Alternatives Considered - -Before arriving at this solution we considered a few variations: - -- **Bruno CLI and existing local environment**: Given that we wanted to automate our manual tests consisting of Bruno requests, the first option we explored was simply using the [Bruno CLI](https://docs.usebruno.com/bru-cli/overview) to run our existing example flows. However, our “Open Payments” flow requires a short UI interaction on the MASE which would have been difficult to mock from a Bruno request. Additionally, we would not have access to the webhooks and it would generally limit our testing capabilities. Reusing the same Docker environment would also prevent us from configuring the test and local environments independently. -- **Testcontainers**: We considered using [Testcontainers](https://testcontainers.com/) in our Jest tests instead of launching native Docker containers from a shell script. While Testcontainers is a powerful tool for managing Docker containers in code, we do not currently need to set up and tear down the environment between tests because our tests do not require a fresh state. However, if we extend our tests and find they cause side effects between tests, we could utilize Testcontainers to rebuild the environment, ensuring proper isolation. Additionally, managing containers outside of Jest allows us to maintain a more persistent environment that is not tied to the lifecycle of the tests. This flexibility makes it easier to reuse the environment for additional testing purposes. In the event that we need to leverage the power of Testcontainers in the future we can [leverage our existing Docker Compose files](https://node.testcontainers.org/features/compose/) to transition. -- **Dockerizing Test Runner**: We also considered putting the tests in a Docker image and running them inside the Docker network instead of from the host machine. This would solve [some networking issues](#url-handling) but introduces additional complexity with creating a new Docker image for the tests and retrieving test exit codes and displaying test output. Additionally, running tests in Docker would require a full environment restart for each test run, making the development process more cumbersome. - -### Key Benefits - -- **Efficiency**: We spin up containers once and run all tests. Starting and stopping the Docker environment consumes a significant amount of time compared to the actual test execution, so avoiding unnecessary restarts keeps the tests fast. -- **Ease of Configuration**: Keeping the test and local development environments separate makes them independently configurable. -- **Flexibility**: Running tests from the host machine and directly managing the Docker environment outside of our test runner makes the test environment more flexible. We can develop new tests or change existing tests and rerun without restarting the containers. Additionally, we can reuse the environment for other ad-hoc development or testing purposes. -- **Comprehensive Testing Capabilities**: A full Jest test suite with an independent test environment and MASEs ensures complete access to all necessary components for making detailed assertions and manipulating flows. - -### Implementation Details - -Let's take a closer look at the structure of our test code and the key components involved. - -#### Test Environment - -Our test environment resembles our local development environment with some key variations. We implemented a Cloud Nine Wallet and Happy Life Bank MASE in the Jest test suite so that they can be controlled and inspected as needed. To facilitate these new MASE implementations we extracted `mock-account-servicing-entity`’s core logic into a new [mock-account-service-lib](https://github.com/interledger/rafiki/tree/main/packages/mock-account-service-lib). Each of these MASE’s integrate with a paired down version of Rafiki consisting of the `auth` and `backend` services and their requisite data stores. These Rafiki instances are defined and configured in Docker Compose files for each MASE. - -![Integration Test Architecture Diagram](/developers/img/blog/2024-09-06/architecture.png) - -#### Launching the Test Environment and Running Tests - -The environment and tests are launched from a shell script that does the following: - -- Starts the test environment consisting of our two Rafiki instances from our Docker Compose files. -- Maps hosts to our host machine’s hosts file. -- Runs the Jest test suite against our Rafiki instances in Docker and saves the test exit code. -- Tears down the Docker environment and returns the test exit code. - -#### Test Platform Components - -Using Jest as our testing framework, we structured our test code around a few key components: - -1. **Mock Account Servicing Entity (MASE)** - - **Integration Server**: Includes all endpoints needed for integrating Account Servicing Entities with Rafiki. This includes the [rates endpoint](https://rafiki.dev/integration/requirements/exchange-rates) for supplying currency exchange rates and an endpoint for handling Rafiki’s [webhook events](https://rafiki.dev/integration/requirements/webhook-events/) throughout the payment lifecycle, such as depositing liquidity on `outgoing_payment.created`. - - **Open Payments Client**: Communicates with our Open Payments API to perform the payment flows. This API is a reference implementation of the [Open Payments standard](https://rafiki.dev/overview/concepts/open-payments) that enables third parties to directly access users’ accounts. - - **Admin (GraphQL) Client**: Communicates with our GraphQL admin API to set up tests and complete some of the flows, such as “Peer-to-Peer” payments. - - **Account Provider**: A simple accounting service to handle basic accounting functions and facilitate payment flows. - -2. **Test Actions:** These are functions analogous to our Bruno requests but designed to be repeatable across tests. These actions abstract away some baseline assumptions about the sending/receiving MASE relationship, assertions, and how each endpoint is called. - -On test start, we create MASEs for `cloud-nine-wallet` and `happy-life-bank` and seed their respective Rafiki instances. Then these are designated as `sendingASE` and `receivingASE` in our test actions and we run our test flows which include: - -**Open Payments:** - -This is our primary payment flow and would be used in contexts such as e-commerce. It consist of creating an incoming payment, quote, and outgoing payment along with their requisite grants. For more details on this flow visit our [Open Payments Flow documentation](https://openpayments.dev/introduction/op-flow/). The outgoing payment requires a grant interaction which needs to be implemented by an ASE and is mocked for these tests. For detailed information on these grants and how we handle authorization in general, see our [Open Payments Grants guide](https://openpayments.dev/introduction/grants/). - -We run this flow with the following variations: - -- With continuation via polling -- With continuation via finish method -- Without explicit quote step - -**Peer-to-Peer:** - -A simple form of payment that consists of creating a receiver (incoming payment), quote, and outgoing payment without any grant requests. - -We run this flow with the following variations: - -- Single currency -- Cross currency - -To ensure functionality of these critical payment flows as our codebase evolves, we've integrated these tests into a GitHub Action. [This action](https://github.com/interledger/rafiki/blob/main/.github/workflows/node-build.yml) runs automatically against all pull requests, safeguarding our main branch from potential regressions. - -#### URL Handling - -Running tests from the host machine against services in Docker posed a problem with respect to URLs. We needed to use URLs that worked from both the host machine and from within our Docker services. From the host machine, we could reach a Docker container by referencing the exposed port on `localhost`, while in the Docker network, we needed to use the hostname derived from the Docker container name. For example, from the host machine we would get the `gfranklin` wallet address via `localhost:3100/accounts/gfranklin`. But from within Docker the URL should be `http://cloud-nine-wallet-test-backend/accounts/gfranklin` instead. To resolve this we used [hostile](https://github.com/feross/hostile) to map `127.0.0.1` (`localhost`) to our Docker service hostnames (`cloud-nine-wallet-test-backend`, `happy-life-bank-test-backend`, etc.) in the start script. This allows us to use the same URL pattern everywhere. - -This sequence diagram illustrates how a request from the host machine resolves using the mapped hostnames. - - - -## Conclusion - -Manually validating payment flows with series of Bruno requests against our local environment was tedious and error-prone, leading to less thorough testing and a slow developer experience. By automating these tests and integrating them into our CI pipeline, we have significantly sped up our development workflow and ensured the integrity of our payment flows across all code changes. - -### Further Development Ideas - -Testing is an evolving process with constant opportunities for improvement. Further areas of enhancement could include: - -- Expanding tests to cover more variations, such as a cross-currency variation of “Open Payments” and failure scenarios. -- Incorporating real GraphQL authentication instead of bypassing it. -- Extend test environment to use either [TigerBeetle](https://tigerbeetle.com/) or Postgres accounting services (currently only uses Postgres). -- In the future, if we require more programmatic control over the container lifecycle, we can leverage Testcontainers with our existing Docker Compose files. - -We encourage developers to add tests and contribute to our continuous improvement. Check out our [GitHub issues](https://github.com/interledger/rafiki/issues) to get involved. diff --git a/src/content/blog/2024-09-23-rafiki-code-architecture.mdx b/src/content/blog/2024-09-23-rafiki-code-architecture.mdx deleted file mode 100644 index f85b2ae5..00000000 --- a/src/content/blog/2024-09-23-rafiki-code-architecture.mdx +++ /dev/null @@ -1,230 +0,0 @@ ---- -title: 'Breaking Down Rafiki: What Makes Our Friend Tick' -description: 'A low-level introduction to the software packages that comprise Rafiki.' -date: 2024-09-23 -slug: rafiki-low-level-intro -authors: - - Nathan Lie -author_urls: - - https://www.linkedin.com/in/nathan-lie-138a73121 -tags: - - Rafiki ---- - -import LargeImg from '/src/components/blog/LargeImg.astro' - -## Introduction - -It has been said that the Interledger Foundation upholds open technology as one of its core values. It has also been said that [Rafiki](https://github.com/interledger/rafiki), a white-label application that enables Interledger usage, upholds this value by being open-source and allowing anyone to contribute. - -It has _not_ been said, however, how one comes to intimately understand Rafiki. It's not impossible, to be sure, but the Interledger endeavor has been around for long enough that a write-up at a lower level is warranted. - -So, what makes Rafiki tick? What do we see when we venture beyond _what_ Rafiki does and into _how_ it does those things? - -### Disclaimer - -This article assumes the reader has high-level knowledge of the following concepts: - -- [Open Payments](https://openpayments.dev/introduction/overview/) -- [Interledger](https://interledger.org/developers/get-started/#how-does-interledger-work) - -This article also assumes that its readers already have high-level knowledge of [Rafiki](https://rafiki.dev/overview/overview/) itself, and that they understand on that level how it accomplishes being an Interledger node and an Open Payments server. - -## Components - - - -Rafiki is comprised primarily of three packages: - -- A [`backend`](https://github.com/interledger/rafiki/tree/main/packages/backend) package that extends an API for managing Open Payments resources like Incoming or Outgoing Payments, an Admin API, and an API from the Interledger connector to accept ILP packets. -- An [`auth`](https://github.com/interledger/rafiki/tree/main/packages/auth) package that provide third parties with a method of acquiring authorization to manage Open Payments resources on the Rafiki instance's associated `backend` package. It also extends an Admin API as well. -- A [`frontend`](https://github.com/interledger/rafiki/tree/main/packages/frontend) that serves as an Admin-level UI for that Rafiki instance. It allows the manager of that Rafiki instance to directly manage items such as other peers on the Interledger network or what currencies it supports. - -Rafiki also maintains a few other utility packages: - -- A [`documentation`](https://github.com/interledger/rafiki/tree/main/packages/documentation) package that the documentation website (https://rafiki.dev/) is maintained from. -- A [`mock-account-service-lib`](https://github.com/interledger/rafiki/tree/main/packages/mock-account-service-lib) that provides a useful library to mock the utilities used by the mock Account Service Providers in the test local environment. -- A [`token-introspection`](https://github.com/interledger/rafiki/tree/main/packages/token-introspection) package that creates a client to easily manage a GNAP token with an Open Payments Authorization server. - -All of these packages are managed together as a monorepo using [pnpm](https://pnpm.io/motivation). - -### The `backend` and `auth` stacks - -Both the `backend` and `auth` packages are largely built in the same way. They both leverage the same three Node.js frameworks: - -- [KoaJS](https://koajs.com/) -- [AdonisJS](https://docs.adonisjs.com/guides/preface/introduction) -- [KnexJS](https://knexjs.org/guide/) - -KoaJS is used as the framework for setting up the API routes, assigning functions to handle business logic for those routes, and wrapping those in middlewares for addtional functionality. It's a lot like an [Express server](https://expressjs.com/) if that helps with familiarity. - -AdonisJS is used to perform [dependency injection](https://docs.adonisjs.com/guides/concepts/dependency-injection) on the services used by each package to perform business logic. - -KnexJS is an ORM that manages calls made to Rafiki's Postgres database. - -Additionally, both packages extend a [GraphQL API](https://github.com/interledger/rafiki/blob/main/packages/backend/src/graphql/schema.graphql) that serves as an Admin API. - -Finally, [ObjectionJS](https://vincit.github.io/objection.js/) is used as an ORM to perform database operations. It leverages the query building syntax of KnexJS to express its database operations. - -Familiarity with these frameworks will be valuable in understanding how Rafiki works, and with understanding the rest of this post. - -### The Rafiki Backend - -As mentioned before, the Rafiki `backend` manages Open Payments resources. These resources are managed by services attached to an inversion-of-control (IoC) container via dependency injection with AdonisJS, as also mentioned previously. To see this in action, the [`/src/index.ts`](https://github.com/interledger/rafiki/blob/main/packages/backend/src/index.ts) is the best place to look. Take this example of how the service for handling incoming payment routes gets injected into the IoC container: - -```ts wrap -import { Ioc, IocContract } from '@adonisjs/fold' -import { createIncomingPaymentRoutes } from './open_payments/payment/incoming/routes' - -... - -const container: IocContract = new Ioc() - -... - -container.singleton('incomingPaymentRoutes', async (deps) => { - return createIncomingPaymentRoutes({ - config: await deps.use('config'), - logger: await deps.use('logger'), - incomingPaymentService: await deps.use('incomingPaymentService'), - streamCredentialsService: await deps.use('streamCredentialsService') - }) -}) -``` - -In this call, a name for the service is passed to the IoC container, and a factory for that service. In turn, that factory takes the other services that it depends on. In this case, the `incomingPaymentRoutes` service depends on services that parse configuration and output logs for the `backend`. The `index.ts` file thus good for seeing all of the services the backend uses, and how each of the services depend on one another. - -These services are then attached to routes in the [`/src/app.ts`](https://github.com/interledger/rafiki/blob/main/packages/backend/src/app.ts) file. The `app.ts` file is a great place to work backwards from and see which routes exist on the backend server and what services are used to complete the operations presented by the routes. Take [this route](https://github.com/interledger/rafiki/blob/main/packages/backend/src/app.ts#L432-L454) for example: - -```ts wrap -const incomingPaymentRoutes = await this.container.use( - 'incomingPaymentRoutes' -) - -... - -// POST /incoming-payments -// Create incoming payment -router.post>( - '/incoming-payments', - createValidatorMiddleware< - ContextType> - >( - resourceServerSpec, - { - path: '/incoming-payments', - method: HttpMethod.POST - }, - validatorMiddlewareOptions - ), - getWalletAddressUrlFromRequestBody, - createTokenIntrospectionMiddleware({ - requestType: AccessType.IncomingPayment, - requestAction: RequestAction.Create - }), - httpsigMiddleware, - getWalletAddressForSubresource, - incomingPaymentRoutes.create -) -``` - -In this mounting of the `POST /incoming-payments` route, all of the middleware as well as the main handler (`incomingPaymentRoutes.create`) can be seen being attached to that mounting. Note how the `incomingPaymentRoutes` services is acquired from the container using the name that was passed into the function that attached that service to the IoC container. This pattern can be followed for other routes to see which services handle fulfilling requests made to the routes on the Rafiki `backend`. - -Finally, the `backend` also launches a GraphQL server that extends an API defined by [this schema](https://github.com/interledger/rafiki/blob/main/packages/backend/src/graphql/schema.graphql). - -In order fulfill the payments described in Open Payments resources and maintain peering relationships, Rafiki runs an instance of an Interledger Connector, acting as its node on the Interledger network. More on this in a future blog post. - -### The Rafiki Authorization Server - -The Authorization Server on Rafiki is structured similarly to the backend, in that it injects services into an IoC container which are retrieved by routes on Koa server to perform the business logic. - -[index.ts](https://github.com/interledger/rafiki/blob/main/packages/auth/src/index.ts) - -```ts wrap -import { Ioc, IocContract } from '@adonisjs/fold' -import { createGrantRoutes } from './grant/routes' - -... - -const container: IocContract = new Ioc() - -... - -container.singleton('grantRoutes', async (deps: IocContract) => { - return createGrantRoutes({ - grantService: await deps.use('grantService'), - clientService: await deps.use('clientService'), - accessTokenService: await deps.use('accessTokenService'), - accessService: await deps.use('accessService'), - interactionService: await deps.use('interactionService'), - logger: await deps.use('logger'), - config: await deps.use('config') - }) -}) -``` - -Again, note the route service for grants is mounted to the container, and then subsequently referenced when attaching service functions to the relevant routes. - -[app.ts](https://github.com/interledger/rafiki/blob/main/packages/auth/src/app.ts) - -```ts wrap -const grantRoutes = await this.container.use('grantRoutes') - -... - -/* Back-channel GNAP Routes */ -// Grant Initiation -router.post( - '/', - createValidatorMiddleware(openApi.authServerSpec, { - path: '/', - method: HttpMethod.POST - }), - grantInitiationHttpsigMiddleware, - grantRoutes.create -) -``` - -Like the `backend`, the `auth` server also extends a GraphQL API to perform admin functions. The API is modeled by this [GraphQL schema](https://github.com/interledger/rafiki/blob/main/packages/auth/src/graphql/schema.graphql). - -### The Rafiki Admin Frontend - -The Rafiki Admin `frontend` has a React-based (specifically [Remix](https://remix.run/docs/en/main)) frontend with server-side rendering that consumes the GraphQL API extended by the `backend` server. -In general, the [name of a file](https://github.com/interledger/rafiki/tree/main/packages/frontend/app/routes) dictates how the app constructs its routes. For example, the file [assets.create.tsx](https://github.com/interledger/rafiki/blob/main/packages/frontend/app/routes/assets.create.tsx) is expressed as `/assets/create` in the frontend app. Path parameters are denoted by a `$` sign, so the file [assets.$assetId.tsx](https://github.com/interledger/rafiki/blob/main/packages/frontend/app/routes/assets.%24assetId.tsx) might be expressed as `/assets/1234-1234-12345678`, where the `$assetId` portion of the file name stands in for an identifier in the final route. - -![A screenshot of the Rafiki Admin UI](/developers/img/blog/2024-09-23/rafiki-admin-screen.png) - -Pages on the frontend acquire data to populate the view and send data in requests using [_loaders_](https://remix.run/docs/en/main/route/loader) and [_actions_](https://remix.run/docs/en/main/route/action). The [page for an invidivdual asset](https://github.com/interledger/rafiki/blob/main/packages/frontend/app/routes/assets.%24assetId.tsx) is an example of a file containing this `loader` and `action` pattern. - -## Seeing Rafiki In Action - -If Docker is installed, the whole environment can be started locally with a single command: - -```ts wrap -pnpm localenv:compose up -``` - -With this environment live, the Admin GraphQL Endpoints can be demoed using [Bruno](https://www.usebruno.com/). [The Rafiki repository contains a collection](https://github.com/interledger/rafiki/tree/main/bruno/collections/Rafiki) which contains example calls for all of the GraphQL endpoints on both the `backend` and `auth` servers. It also contains calls for every Open Payments action and examples for certain flows in Open Payments. - - - -To help provide an idea of what integrating with Rafiki would be like, the local Docker environment also starts up two [Mock Accout Servicing Entities (Mock ASEs)](https://github.com/interledger/rafiki/tree/main/localenv/mock-account-servicing-entity) to represent integrators of Rafiki. -These Mock ASEs have [pages that display information for individual accounts](https://github.com/interledger/rafiki/blob/main/localenv/mock-account-servicing-entity/app/routes/accounts.%24accountId.tsx) on their respective Rafiki instances. Crucially, they each also host [pages that collect authorization from account owners](https://github.com/interledger/rafiki/blob/main/localenv/mock-account-servicing-entity/app/routes/consent-screen.tsx) for payments made using grants from the `auth` server. - -![A screenshot of the Mock ASE's consent screen](/developers/img/blog/2024-09-23/mock-ase-consent-screen.png) - -The flow for creating an outgoing payment can also be demoed with a combination of Bruno API calls and the Mock ASE consent screen. - -## Conclusion - -With any luck, this article should bridge the gap between loftier concepts like Open Payments and the code that implements it. The files showcased in this article generally serve as good starting points for figuring out how the rest of a given package works, and the patterns that are used throughout them. - -With even more luck, this article will be relevant for quite some time after publication, but if it isn't, it can serve as a recent entry in the [Interledger graveyard](https://interledger.org/developers/blog/simplifying-interledger-the-graveyard-of-possible-protocol-features). - -For even more information, please peruse the [Rafiki documentation](https://rafiki.dev/overview/overview/). diff --git a/src/content/blog/2024-10-11-where-did-rafiki-money-go.mdx b/src/content/blog/2024-10-11-where-did-rafiki-money-go.mdx deleted file mode 100644 index e895cc2c..00000000 --- a/src/content/blog/2024-10-11-where-did-rafiki-money-go.mdx +++ /dev/null @@ -1,104 +0,0 @@ ---- -title: 'Where did rafiki.money go?' -description: 'Or “The need for rebranding when something confuses people.”' -date: 2024-10-11 -slug: where-did-rafiki-money-go -authors: - - Timea Nagy -author_urls: - - https://www.linkedin.com/in/nagy-timea-35483024 -tags: - - Test Network ---- - -In the past few months we have come to know and love rafiki.money. So why did it disappear? Where did it go? - -Fear not, rafiki.money is not gone. It just needed a facelift, a proper rebranding. Why, you may ask? We will answer everything, but first, let’s just do a quick recap: - -[Rafiki](https://github.com/interledger/rafiki) is open source software that allows an [Account Servicing Entity](https://rafiki.dev/resources/glossary/#account-servicing-entity-ase) to enable Interledger functionality on its users’ accounts. This includes sending and receiving money via [SPSP](https://interledger.org/developers/rfcs/simple-payment-setup-protocol/) and [Open Payments](https://openpayments.dev/) and allowing third-party access to initiate payments and view transactions. - -Rafiki.money was intended to be the Test Wallet application for our Test Network. It simulates an Account Servicing Entity that has integrated Rafiki. Users can sign up for an account, navigate a KYC flow (newly powered by our partner [GateHub](https://gatehub.net/)) and then hold a play-money balance to send and receive Interledger Payments. In short, it is basically a Rafiki playground. - -All of the above still stands, but the need for a rebranding became obvious when some community members confused Rafiki with rafiki.money: - -“I am using Rafiki, so what now?” - -“I have integrated Rafiki locally. What are the next steps?” - -These users were in fact **using rafiki.money** and they had **not integrated Rafiki locally**, but were using rafiki.money on their machine. - -In order to help differentiate Rafiki from rafiki.money we have begun discussing ways in which we could rebrand our Test Wallet. We decided on two major changes: domain name and design. And while we are at it, we also squeezed in the same rebranding for Test E-Commerce, our own rafiki.boutique. - -## Introducing wallet.interledger-test.dev and boutique.interledger-test.dev - -We sensed that the name rafiki.money was contributing to the confusion, so we decided to change the domain name to interledger-test.dev. You can still enjoy the same fantastic Rafiki playground, Test Wallet, at _wallet.interledger-test.dev_ and our new boutique can be found at _boutique.interledger-test.dev_. - -![Old to new URL mapping](/developers/img/blog/2024-10-11/url-change.png) - -## New design - -For the new design, we wanted to distinguish the look and feel of Testnet from any of the other Interledger projects. By going with a more angular and flat style, reminiscent of 8-bit or pixel-based visuals, we are hoping to express the idea that Test Network showcases products or nodes that are not fully evolved yet. - -We decided to go with a dark background and neon highlights, and a monospace font with pixel-style decorative elements for the development code. We also wanted to include a light-theme version for presentation in more formal settings, or for people who prefer a light theme. For now, this will be our production enabled theme. - -Upon finalizing the visuals with our in-house illustrator, [Madalina Tantareanu](https://www.madalinatantareanu.com/), our handover was done via HTML mock-ups built with Astro instead of a static design file. You can even see the frontend mockups at https://testnet-mockup.surge.sh/wallet/. This allowed us to effectively check that the new design would be completely viable on a web page. - -We were also able to account for designing the interactions, like hover states, which commonly get missed out if a static design file is used. - - - -## Challenges we faced, interesting stuff we used - -One of the more personal challenges we faced was coming to terms with letting go of the old colorful design, which we had grown fond of and associated closely with rafiki.money and rafiki.boutique. The color scheme was too similar to Interledgers look and feel. Our aim with the new design was to entirely separate Test Wallet from the idea of Rafiki. This led to the introduction of the two new color schemes for light and dark mode. We were pleased to see that the images remained in the design, but with different color variations. - -Have you seen anything like the new design before? We kind of love it! - -## Tailwind CSS - -Now, let’s go into details on the more techy side of the design. - -[Tailwind CSS](https://tailwindcss.com/) is a utility-first CSS framework, packed with useful classes that can be used to build any design. Test Network has already been using Tailwind CSS, as we have found that this framework meets our needs the best. We didn’t want it to be one of those websites that blinds people when they open it on their device in the middle of the night, so we fixed this by having a dark mode option to be enabled in the application. Currently, this is available only for development mode, you can check out the dark theme in our [public Github repo](https://github.com/interledger/testnet). - -Codewise it is quite easy, just throw a dark: in front of any color utility to apply it when dark mode is active. Works for background colors, text colors, borders and gradients. - -The production/light mode, and development/dark mode will look like this: - -
    -![Login page for redesigned Test Wallet in light mode](/developers/img/blog/2024-10-11/test-wallet-light.png) - -![Login page for redesigned Test Wallet in dark mode](/developers/img/blog/2024-10-11/test-wallet-dark.png) - -
    - -
    -![Login page for redesigned Test Boutique in light mode](/developers/img/blog/2024-10-11/test-boutique-light.png) - -![Login page for redesigned Test Boutique in dark mode](/developers/img/blog/2024-10-11/test-boutique-dark.png) - -
    - -Another great thing about Tailwind is that it’s tiny. It automatically removes unused CSS when building for production, so the final shipped CSS bundle is the smallest it could possibly be. It is, of course, responsive on all devices. The utility classes help us work within the constraints of a system instead of adding arbitrary values to the stylesheets. They make it easy to be consistent with color choices, spacing, typography, shadows, and everything else that make up a well-engineered design-system. - -Seriously, if you haven’t tried it yet, you should! - -## One more major change for Test Wallet: GateHub Sandbox integration - -We mentioned briefly above that we integrated the GateHub Sandbox into Test Wallet. This is a big change, as we were previously using a payments and KYC API called [Rapyd](https://www.rapyd.net/). The problem with Rapyd was that we had some limitations when multiple people were using Test Wallet at the same time, as it has a rate limit. And you know we like to organize hackathons, so we always had to use workarounds to solve this problem. - -In order to have many people using Test Wallet at the same time, with a much higher limitation of transactions per minute, we decided with our partner, [GateHub](https://gatehub.net/) from Ljubljana, Slovenia, that we should integrate their backend API services to handle KYC, for depositing and withdrawing play money. For our long term goals, integrating the GateHub Sandbox API is a much better solution. - -The integration was a collaborative effort between the teams at the Interledger Foundation and GateHub. The GateHub team adapted their onboarding styles to ensure a seamless fit with our new design for the Test Wallet. We also implemented iFrames to incorporate GateHub's onboarding and KYC processes, as well as the ability to deposit and withdraw play money. While the onboarding process may appear complex, it follows the standard protocol for wallet applications, especially as we aim to surprise you with Interledger cards at events. - -Our initial aim was to retain all active users with registered emails on rafiki.money; however, the new GateHub integration requires existing users to complete the KYC process again and recreate their wallets along with their respective payment pointers. Due to these changes, we now require all users to have a strong password, again standard protocol for security reasons. Therefore, those who registered with rafiki.money are kindly asked to register again and create a strong password. _As a result, previous wallets, [payment pointers](https://rafiki.dev/resources/glossary/#payment-pointer), and play money will not be accessible anymore_. - -## Final Words - -We truly hope you enjoy this new design as much as we do! We look forward to your [feedback and opinion](https://github.com/interledger/testnet/discussions)! Don’t be shy! - -We put our heart and soul into making it both fun and user-friendly. So continue exploring wallet.interledger-test.dev and boutique.interledger-test.dev, and remember, we value and welcome all your feedback! Reach out to us either on [Slack community channels](https://communityinviter.com/apps/interledger/interledger-working-groups-slack), or open a discussion on [Github Test Wallet](https://github.com/interledger/testnet/discussions). diff --git a/src/content/blog/2024-10-25-rafikis-first-security-audit.mdx b/src/content/blog/2024-10-25-rafikis-first-security-audit.mdx deleted file mode 100644 index e43f12a9..00000000 --- a/src/content/blog/2024-10-25-rafikis-first-security-audit.mdx +++ /dev/null @@ -1,106 +0,0 @@ ---- -title: "Rafiki's First Security Audit" -description: "Takeaways from Rafiki's 2024 security audit." -date: 2024-10-25 -slug: rafikis-first-security-audit -authors: - - Max Kurapov -author_urls: - - https://www.linkedin.com/in/mkurapov -tags: - - Rafiki - - security - - audit ---- - -At the beginning of the year, we were in contact with a security and penetration testing company to do an audit of Rafiki. Even though the software is still in its early stages, it is essential to gather feedback early to build a strong foundation for the software's security. The primary goals of the assessment were to evaluate several Rafiki components: the GraphQL Admin APIs, the frontend Admin UI component, as well as our underlying [ILPv4 protocol](https://interledger.org/developers/get-started/). The assessment was done using Rafiki’s local playground, based on the [Open Source Security Testing Methodology Manual (OSSTMM)](https://www.isecom.org/research.html) and [Open Source Web Application Security Project (OWASP)](https://owasp.org/) methodologies. - -The audit presented eight vulnerabilities in total, with different risk levels: - -![Results of the assessment](/developers/img/blog/2024-10-25/results.png) - -Two items were not applicable to us: - -- Lack of Transport Layer Protection: given this is our local playground environment, we run HTTP for ease of use. -- Hardcoded Secret Credentials: automatic code review flagged hardcoded credentials, but this was only in our test files where we use mock data. - -The remaining six vulnerabilities were addressed as follows: - -## Admin APIs - -### HMAC signing - -While the assessment mentioned adding a security mechanism to our GraphQL Admin APIs in both the backend and auth packages as part of the report, this was a known priority even before the audit. - -Although these APIs should not be exposed to the wider internet in a Rafiki deployment, we want to provide our [Account Servicing Entities (ASEs)](https://rafiki.dev/overview/overview/#more-about-account-servicing-entities) running the software with as many security safeguards as possible. Given that the typical usage of the Admin APIs is through service-to-service requests, we chose to go with a simple and effective solution similar to what we had already integrated for our webhooks: adding HMAC signature support. This is done by having a shared key between the Rafiki services, and the integrator’s service. When calling the Admin API, the integrator generates a signature of the payload (containing a timestamp and request body) using HMAC with sha256 algorithm and the secret key. Upon receiving the request in the Admin API, backend and auth verifies this signature to validate the request’s authenticity & integrity. - -### Disabling GraphQL Introspection Query - -The assessment outlined a few recommendations against common GraphQL attacks, one of which was the disabling of the introspection query in production. Introspection can provide access into the whole structure of a GraphQL schema (types, queries, mutations, deprecated fields) which is useful during development, but can provide a lot of knowledge to bad actors looking to exploit the API. Based on the recommendation, we turned off introspection in production. - -### Preventing Denial of Service GraphQL Attacks - -Due to the structure of GraphQL APIs, it is sometimes not possible to avoid circular relationships between the models. As a result, bad actors can create requests containing circular queries, causing nested fields to be resolved recursively: - -```gql -query { - __schema { - types { - name - fields { - name - type { - name - fields { <- recursing into the fields object - name - type { - name - ... - } - } - } - } - } - } -} -``` - -Without proper handling, this can cause the server to grind to a halt trying to process these kinds of requests. The recommendation from the audit was to limit the depth of valid queries, such that no requests past a certain level of nesting can be resolved. - -In addition, there is a threat of field duplication attacks in GraphQL APIs: an attacker tries to overload the API by excessively requesting fields many times over in a single query. Even though standard GraphQL implementations end up correctly calling the underlying resolver (handler) only once per field, the GraphQL parser needs to do a lot of work to actually process the request, potentially leading to a denial of service (DoS). - -```gql -query GetAsset($id: String!) { - asset(id: $id) { - id - code - scale - liquidity - liquidity - liquidity - ... - } -} -``` - -This can be prevented by limiting queries that go above a certain complexity threshold, or by limiting the number of "tokens" (such as fields) allowed in a request. -We ended up [using](https://github.com/interledger/rafiki/pull/2537) a library called [`@escape.tech/graphql-armor`](https://escape.tech/graphql-armor/docs/getting-started/) to set different kinds of constraints to protect us from these kinds of attacks. - -## Rafiki Admin UI (frontend package) - -As part of the Rafiki software stack, we also publish a frontend package. This is a Remix application that enables managing a Rafiki instance using a web-interface. There are few common vulnerabilities that are common across web applications. One of them is clickjacking: a bad actor embeds a legitimate site in an iframe, and then uses several techniques to trick users into entering information meant for the original site in order to gather (or "highjack") clicks and keystrokes ultimately performing unintended actions on behalf of the user, such as submitting forms, making transactions, or altering account settings. In order to prevent this, the testers recommended adding an `X-Frame-Options` header in the responses from the web application. In the `X-Frame-Options` header, we now return the value of `SAMEORIGIN`, which allows framing only by pages with the same domain as in the response itself, preventing bad actors from framing the Rafiki Admin UI on malicious sites. - -## Library vulnerabilities - -Given we use a lot of different published libraries in Rafiki, we are vulnerable to security exploits found in them, and of course, their dependencies. During the assessment, the testers used the code scanning tool Snyk to find several vulnerable dependencies in the Rafiki repository. Even though we have renovate-bot to automatically open PRs to manage any outdated libraries, it wasn’t directly incorporated into our continuous integration (CI) pipeline. After the assessment, we added a few code scanners into our CI workflow: Trivy and Grype. Both of these tools have large databases of known exploits for libraries (similar to Snyk), and with each commit into a PR, Trivy and Gripe scan the build (and potentially, the to-be-published Docker image) for high-risk vulnerabilities and indicates it in the PR. This allows us to be more proactive in fixing these issues, instead of relying on less regular merges of dependency updates with renovate-bot. - -## ILP & STREAM - -Rafiki runs an ILP connector (using [STREAM](https://interledger.org/developers/rfcs/stream-protocol/) as the transport protocol) as part of its software. This is how two peered Rafiki instances interact and make payments between one another. -One of the questions we wanted to ask the auditors during the assessment was, _"if you intercept an ILP packet, could you get any useful information out of it?"_. Using the local playground, we worked together with the auditors to capture ILP (STREAM) packets between the ILP connectors of two Rafiki instances. In the local playground, these requests are done over HTTP. Even though in a production environment the connector-to-connector communication would be done over HTTPS, it gave us visibility into what the requests & responses look like over an insecure protocol. The auditors observed that the ILP STREAM packets were properly encrypted in transit: - -![Inspected ILP Packet](/developers/img/blog/2024-10-25/packet.png) - -The STREAM transport protocol relies on exchanging a symmetric secret to encrypt messages out-of-band, i.e., without any cryptographic handshakes. In Rafiki, this is done through the use of Open Payments APIs, meaning that even if a bad actor were to inspect packets from the beginning of a STREAM connection, they wouldn't be able to gain any information to decrypt the messages. With this knowledge, combined with the actual observation of the STREAM packets, the auditors found that the encryption & secure data handling using the STREAM transport protocol was sound. This answered our question above: no, no useful information would be gained if one were to intercept these packets. - -These were some of the findings we had as part of Rafiki's first security audit. Going forward, we will be doing annual reviews of this nature within Rafiki, as well as the ecosystem as a whole. It is always helpful to get an experienced, outside perspective about how we can continue improving - particularly when it comes to vital aspects of building software, such as security. diff --git a/src/content/blog/2024-12-03-e2e-testing-wm-browser-extension.mdx b/src/content/blog/2024-12-03-e2e-testing-wm-browser-extension.mdx deleted file mode 100644 index d307e25b..00000000 --- a/src/content/blog/2024-12-03-e2e-testing-wm-browser-extension.mdx +++ /dev/null @@ -1,379 +0,0 @@ ---- -title: 'End-to-end testing the Web Monetization browser extension' -description: "E2E testing browser extensions? It's tricky, but we've got it covered." -date: 2024-12-03 -slug: e2e-testing-wm-browser-extension -authors: - - Sid Vishnoi -author_urls: - - https://sidvishnoi.com?ref=ilf_engg_blog -tags: - - Web Monetization ---- - -A sharp test suite anticipates problems, catching them before they surprise users. As developers, we often get a bit too familiar with our own code. With our inevitably biased views, we often end up overlooking issues from outlier scenarios, or usability considerations, all the way to the "oops" bugs. End-to-end tests help us view the product more objectively, from a user's perspective. Automating these tests saves time and ensures consistent quality throughout the development process. - -However, automated end-to-end testing of a browser extension is a bit… complicated. But don't worry, we're up for this challenge! I'll share with you how we do end-to-end testing of the [Web Monetization browser extension](https://github.com/interledger/web-monetization-extension). - -For context, [Web Monetization](https://webmonetization.org) (WM) is a new way to support websites without ads or subscriptions. You, as the sender, specify your [Open Payments wallet address](https://openpayments.dev/introduction/wallet-addresses/) (think of it like an email address to send or receive money on the web), and the [websites specify theirs](https://webmonetization.org/developers/get-started/). As you browse the web, your browser sends small payments to the websites. Sounds cool, right? It's promising, but it lacks native support in browsers today. We created a browser extension to bridge this gap, enabling you to support your favorite creators today. - -![Playwright UI while testing the Web Monetization browser extension](/developers/img/blog/2024-12-03/playwright.png) - -## How does it work? - -While there are plenty of tools to facilitate end-to-end (E2E) testing for web apps, extension testing can be a different story. Most options lack maturity or comprehensive documentation. - -We use [Playwright](https://playwright.dev/) to run E2E tests. It has [some pointers](https://playwright.dev/docs/chrome-extensions) to get us started. - -Right now, Playwright only plays nicely with Chromium-based browsers, so we run our tests in Chrome and Edge. Firefox support is nearly there! We'll dive into the Chromium stuff first, then I'll give a quick status update on Firefox. The code snippets are written in TypeScript, so it's helpful to have a basic understanding of its syntax. - -## Loading the extension - -Since the WM browser extension works its magic on web pages, we can test its core features by loading it up and watching how websites behave under its influence. For example, websites may unlock exclusive content when they receive payments, or hide obtrusive adverts. - -We can launch the browser with our extension loaded using the `--load-extension=${pathToExtension}` CLI argument. We need to launch it in a persistent context with Playwright. - -```ts -function loadExtension(pathToExtension: string): Promise { - const context = await chromium.launchPersistentContext('', { - headless: true, - args: [ - `--headless=true`, - `--disable-extensions-except=${pathToExtension}`, - `--load-extension=${pathToExtension}` - ] - }) - return context -} -``` - -## Accessing the background service worker - -We require access to the extension's background service worker to interact with the browser extension APIs, including its local storage. - -```ts -let background = context.serviceWorkers()[0] -if (!background) { - background = await context.waitForEvent('serviceworker') -} -``` - -To access and modify the extension's local storage, we need to evaluate the Storage API calls within the context of the extension's service worker: - -```ts -const storageData = await background.evaluate(() => { - return chrome.storage.local.get([key1, key2]) -}) -// note the use of `chrome.` namespace - this works in Chromium as well as Firefox! -``` - -For instance, to verify if a user has connected their wallet to the WM extension, we can check the extension's local storage: - -```ts -const data = await background.evaluate(() => { - return chrome.storage.local.get(['connected']) -}) -expect(data.connected).toBe(true) -``` - -A nice thing with Playwright is we get TypeScript support out of the box, even in `evaluate` contexts: - -```ts -const { connected } = await background.evaluate(() => { - return chrome.storage.local.get < { connected: boolean } > ['connected'] -}) -expect(connected).toBe(true) -``` - -We can use this trick to mess with all sorts of extension APIs - opening and closing tabs, listening in on events, and more. This gives us the power to test our extension's behavior from top to bottom. - -## Accessing the popup - -While we can trigger some of the background API requests and test the extension's behavior, there's one missing piece: the user interface, the nifty [default popup](https://developer.mozilla.org/en-US/docs/Mozilla/Add-ons/WebExtensions/manifest.json/action) that the users will actually interact with. Without testing how users interact with it, we're just checking the engine, not taking the whole car for a spin – not exactly end-to-end testing, right? - -In case you haven't seen the extension yet, this is what it looks like: - -![Web Monetization extension's popup](/developers/img/blog/2024-12-03/extension-popup.png) - -Playwright doesn't have a magic button to open and poke around in the popup just yet. We can try using `chrome.action.openPopup()`, but that's a bit tricky. It needs user input in Firefox, and in Chrome, it's picky about which window it shows up in (i.e. only in the currently focused one). Even if we manage to open it, getting to its content is still a puzzle. - -The good news: the extension's UI is just a fancy HTML page. There's no bad news in this part. This means we can open it like any other webpage in a new tab! But what's the URL for this popup page? That varies from browser to browser, it's something like: `chrome-extension://{extensionId}/{path/to/popup.html}`. And how do we get the extension ID? Luckily, the background service worker has a URL too! We can extract the ID from its URL using a bit of JavaScript magic: `background.url().split('/')[2]`. Or, if you prefer a more semantic approach, you can use `new URL(background.url()).hostname`. Alternatively, we can get the full popup URL by evaluating `browser.action.getPopup({})` in the background worker context. Sometimes the simplest solutions come to mind only after you've gone knee-deep into a hacking challenge! So always take a break! - -```ts -const popup = await context.newPage() -await popup.goto(popupUrl) -// Now we can access the popup as a regular Playwright page -``` - -To make this more compatible with the way the extension opens its popup, we can open it in a literal popup window. This way we can keep the popup visible separately when we run Playwright tests in UI mode (or view captured screenshots or traces), and it looks like a popup this way - not a tab. - -```ts -async function getPopup(context: BrowserContext, popupUrl: string) { - const page = await context.newPage() - const popupPromise = page.waitForEvent('popup') - await page.evaluate(() => { - return window.open('', '', 'popup=true,width=448,height=600') - }) - const popup = await popupPromise - await page.close() // we don't need it anymore - await popup.goto(popupUrl) // window.open doesn't allow internal browser pages - - return popup // now we can access the popup as a regular Playwright page -} -``` - -Note that opening the popup by clicking the extension icon is equivalent to loading that popup page. So, we can reload the popup page before each test to simulate that. - -## Writing tests - -We can test the above helper functions to work well enough: - -```ts -import { test, expect } from '@playwright/test' - -test('popup has connect form', async ({ browserName }) => { - const context = await loadExtension(browserName) - const background = await getBackground(context) - const popup = await getPopup(context, background) - - const { connected } = await background.evaluate(() => { - /* chrome.storage... */ - }) - expect(connected).toBe(false) - - await expect(popup.locator('form')).toBeAttached() -}) -``` - -But why bother setting up the stage for each test, or even once per file with `beforeAll` (and a responsible cleanup in `afterAll`)? Enter fixtures! - -## Less repetition with fixtures - -We've been doing a lot of manual work, and repeating the same steps for each test can get tedious. Let's automate some of this with Playwright's fixtures. We can create a "base" fixture to handle the heavy lifting, like loading the extension and giving us access to its background and popup. This way, we can focus on writing the actual tests. - -```ts -// tests/e2e/fixtures/base.ts -import { test as base } from "@playwright/test"; - -type TestScope = { context: BrowserContext, background: Worker, popup: Page }; - -export const test = - base.extend < - TestScope > - { - context: async ({ browserName }, use) => { - const context = await loadExtension(browserName); // launch browser with extension loaded - await use(context); // use it - await context.close(); // close browser after use - }, - background: async ({ context }, use) => { - const background = await getBackground(context); - await use(background); - }, - popup: async ({ context, background }, use) => { - const popupUrl = await getExtensionPopupUrl(background); - const popup = await getPopup(context, popupUrl); - await use(popup); - await popup.close(); - }, - page: async ({ context }, use) => { - const page = await context.newPage(); - await use(page); - await page.close(); - }, - }; -export const expect = test.expect; -``` - -We then use this fixture as: - -```ts -// real tests are of course more complex -import { test, expect } from './fixtures/base' - -test('popup has connect form', async ({ background, popup }) => { - const { connected } = await background.evaluate(() => { - /* ... */ - }) - expect(connected).toBe(false) - - await expect(popup.locator('form')).toBeAttached() -}) - -test('can connect to wallet', async ({ background, popup }) => { - await popup.getByRole('button').submit() - - const { connected } = await background.evaluate(() => { - /* ... */ - }) - expect(connected).toBe(true) -}) - -test('monetizes page', async ({ background, popup, page }) => { - await connectWallet(popup) - await page.goto('https://example.com') - - await expect(popup.getByLabel('status')).toHaveText('monetizing...') - await expect(popup.locator('url')).toHaveText('example.com') -}) -``` - -Much better! We can make our fixtures even more powerful by customizing them for specific tests. For instance, when we're testing payments, we can set up the wallet connection within the fixture itself. This way, we can focus on the specific payment tests without repeating the connection process each time. - -## Organizing and optimizing tests - -Let's be honest, opening and closing a whole browser window for each test is a bit overkill and time-consuming. You wouldn't do that manually either. Time to optimize! - -We can optimize by using a single browser instance and popup for all tests in a file. This way, we can improve performance and resource usage. While we lose some parallelism, it's a fair trade-off for better efficiency. - -Playwright has a neat trick to scope resources per test worker, aptly named: `{ scope: 'worker' }`. Let's refactor our fixture to use worker scope. - -```ts -// tests/e2e/fixtures/base.ts -import { test as base } from '@playwright/test' - -// created once per test -type TestScope = { page: Page } -// created once per worker -type WorkerScope = { - persistentContext: BrowserContext - background: Worker - popup: Page -} - -export const test = base.extend({ - persistentContext: [ - // Ideally we wanted this fixture to be named "context", but it's already defined in the default base fixture under the scope "test", so we can't override it. - async ({ browserName }, use, workerInfo) => { - const context = await loadExtension(browserName) - await use(context) - await context.close() - }, - { scope: 'worker' } // yep, that's it. The default is { scope: "test" } - ], - background: [ - async ({ persistentContext: context }, use) => { - const background = await getBackground(context) - await use(background) - }, - { scope: 'worker' } - ], - popup: [ - async ({ background, persistentContext: context }, use) => { - const popupUrl = await getExtensionPopupUrl(background) - const popup = await getPopup(context, popupUrl) - await use(popup) - await popup.close() - }, - { scope: 'worker' } - ], - - page: async ({ persistentContext: context }, use) => { - const page = await context.newPage() - await use(page) - await page.close() - } -}) -export const expect = test.expect -``` - -Now, we'll have a single browser and popup instance per test file, not per test. This also means we need to be cautious to avoid interfering with each other's state. We don't want one test to mess up the setup for another. - -To ensure this, we'll split our tests into smaller, more focused files. This might mean more files, but it has a big advantage: each file can run independently in its worker. With enough CPU and memory (like in most dev machines), we can run all these files in parallel, making our tests fly! - -When running tests with a single worker, they're executed (queued in case of multiple workers) in chronological order. To ensure a logical test flow (most basic tests first and gradually move towards more specific scenarios), we can name our test files strategically. To enforce a strict order, we can add numerical prefixes to our file names, like 001-, 002-, etc. if needed. - -## Testing priorities: Let's get the big stuff right first - -We test for the essential product features first, then dive into specific behaviors. Test for things that are difficult to repeat manually and go after things that'll make our life harder if they regress. The goal still is to test as much as we can, but priorities! - -Here's what gets the spotlight in our testing of the Web Monetization extension: - -- Wallet connection: We test the connection process with different wallet providers to make sure new users can easily onboard. The wallet connection process varies a bit depending on the provider, so we've [built some clever tricks](https://github.com/interledger/web-monetization-extension/issues/613) behind the scenes to smooth things out for users. We test for expected and unexpected errors, and how gracefully they're handled. But even magic needs a checkup, so we run these tests daily in our nightly builds. -- Making payments to websites you visit: We test both automatic micro-payments and custom one-time payments to websites you visit. The [Web Monetization playground](https://webmonetization.org/play/) and the [Interledger test wallet](https://wallet.interledger-test.dev) are essential parts for these tests. In these tests, we ensure the payment amount and frequency are what the user wants them to be. - -These are the top priorities, and we cover more features in other tests. - -## Intercepting requests - -While we can observe the extension's behavior on a page, it's often helpful to tap into network requests. This way, we can verify that the right things are happening behind the scenes, especially when multiple actions can lead to similar outcomes. Plus, we can time our tests to wait for specific network requests to complete before checking the page's state. - -We can snoop on network requests on a page (like the popup or a regular webpage) using `page.on('request', handler)`. For example, in the extension, we intercept some API responses when adding keys to certain wallets, so we can revoke the right key during the test's cleanup. - -For the Web Monetization extension, it's more useful for us to intercept requests in the background service worker. To catch these requests, we can use `context.on('request', handler)`. If we only care about the response, we can listen to `'requestfinished'` instead. Just a heads up that this service worker request interception is still experimental in Playwright, so we need to set the `PW_EXPERIMENTAL_SERVICE_WORKER_NETWORK_EVENTS=1` environment variable to enable it. - -```ts -context.on('requestfinished', async function intercept(req) { - if (!req.serviceWorker()) return // we only care about service worker requests here - - if (isTheRequestWeAreAfter(req)) { - const json = await req.response().then((res) => res.json()) - // ... use response body - - context.off('requestfinished', intercept) // we're responsible citizens - } -}) -``` - -## More optimization tricks up our sleeve - -### Saving login cookies - -Most of our payment tests require logging in to the [Interledger Test Wallet](https://wallet.interledger-test.dev). It's a drag to log in every single time. That's why we handle login during an initial setup phase and then store these cookies securely in the filesystem. It's like having a "Remember me" feature (for websites that get it right!) for our tests. It saves us a bunch of clicks and makes our tests more efficient. - -```ts -// tests/e2e/auth.setup.ts -setup('authenticate', async ({ page }) => { - setup.skip(existsSync(AUTH_FILE), 'Already authenticated') - - await page.goto(`${TEST_WALLET_ORIGIN}/auth/login`) - await page.getByLabel('E-mail').fill(TEST_WALLET_USERNAME) - await page.getByLabel('Password').fill(TEST_WALLET_PASSWORD) - await page.getByRole('button', { name: 'login' }).click() - - await page.context().storageState({ path: AUTH_FILE }) -}) - -//Later, load the cookies into the browser context - -// tests/e2e/fixtures/base.ts -export const test = base.extend({ - persistentContext: [ - async ({ browserName }, use, workerInfo) => { - const context = await loadExtension(browserName) - - if (workerInfo.project.name !== 'setup') { - const { cookies } = await readFile(AUTH_FILE).then(JSON.parse) - await context.addCookies(cookies) - } - - await use(context) - await context.close() - }, - { scope: 'worker' } - ] - // ... -}) -``` - -### Saving the wallet's connected state - -When a user connects their wallet with the extension, we receive their permission to send their funds in the form of tokens. With Open Payments, we use [GNAP grants & tokens](https://interledger.org/developers/blog/open-payments-cinderella-story/) to facilitate that. GNAP is a next-generation protocol for delegating access to APIs securely & flexibly; you can consider it a successor of OAuth. - -While testing, we're exploring ways to store these grants and tokens after the test wallet is connected. This would eliminate the need to reconnect the wallet before every single test. Imagine having to unlock your phone every time you wanted to open a new app – not ideal, right? - -## Firefox? - -This is the only section with bad news in this article. - -Firefox doesn't yet have a straightforward API like Chrome's `--load-extension` flag to load extensions with Playwright. However, we can explore using Remote Debugging Protocol (RDP) to call the `installTemporaryAddon` function. This requires [adding a little RDP client](https://github.com/microsoft/playwright/issues/7297#issuecomment-1211763085) to communicate with Firefox. - -Even if we manage to load the extension, there's still a roadblock: we can't directly load extension pages into the browser [yet](https://github.com/microsoft/playwright/issues/7297#issuecomment-1655443442). We're still a few steps away from a seamless extension testing experience in Firefox with Playwright. - -Let's keep an eye on Playwright's development and hope for future updates that might bridge this gap. [Upvote this issue on GitHub](https://github.com/microsoft/playwright/issues/7297) if this will be helpful for you too. - -## Next steps: Expanding test coverage - -We'll level up our testing game by diving deeper into browser-specific features like Edge's split view and adding more tests to cover every corner case. We'll push the extension to its limits, simulating different user interactions and trying to break it, to build a rock-solid extension that provides a great user experience. - -Want to dive deeper into our testing strategy? Check out our [GitHub repo for the Web Monetization extension](https://github.com/interledger/web-monetization-extension). We're always open to feedback and contributions, so feel free to submit a pull request! diff --git a/src/content/blog/2024-12-11-rafiki-beta-release.mdx b/src/content/blog/2024-12-11-rafiki-beta-release.mdx deleted file mode 100644 index 730133d8..00000000 --- a/src/content/blog/2024-12-11-rafiki-beta-release.mdx +++ /dev/null @@ -1,69 +0,0 @@ ---- -title: 'Rafiki Beta Release' -description: 'The Wild is Calling.' -date: 2024-12-11 -slug: rafiki-beta-release -authors: - - Tadej Golobic -author_urls: - - https://www.linkedin.com/in/tadej-golobic -tags: - - Interledger - - Interledger Protocol - - Interledger Stack - - Interledger Foundation - - Open Payments - - Rafiki - - Dassie - - Web Monetization - - STREAM - - SPSP - - Beta ---- - -It is with great pride, and perhaps a touch of relief, that we deliver on a promise made: **[Rafiki Beta is here](https://github.com/interledger/rafiki/releases/tag/v1.0.0-beta)**. - -Yes, it took longer than we planned. And yes, there were detours along the way. But as all great journeys teach us, the destination is not merely a point on a map. It is the culmination of every challenge overcome, every detail refined, and every dream made real. - -We are perfectionists at heart, and there was always one more feature, one more improvement, one more bug to fix, one more piece to perfect. But the time has come to release Rafiki into the wild. - -## Rafiki Beta is here! - -Rafiki is a comprehensive software package that bundles up all Interledger functionality into an easy-to-use solution. It’s free for any licensed account servicing entity to use, enabling them to unlock the power of Interledger straight out of the box. - -Let us reflect on what this milestone includes - a powerful suite of capabilities that are set to redefine the world of open payments. - -- **ILP connector:** The backbone of interconnectivity, housing the core functionality of the [Interledger Protocol](https://interledger.org/developers/get-started/). Like the internet transmits information in packets, the connector splits payments into small, self-contained payment packets with details such as amount, sender, and routing information. This approach enables seamless, secure and scalable financial transactions, ensuring compatibility across platforms, currencies and technologies while optimizing routing, distributing workloads, reducing bottlenecks, and enhancing reliability and scalability. -- **SPSP and Open Payments:** These application-layer protocols simplify and standardize payment flows, ensuring interoperability across diverse systems while reducing the complexity of integration. By facilitating clear communication between senders and receivers, they deliver a safe, reliable and user-friendly experience for online payments -- **Admin APIs:** Empowering you with the tools to take control. The Backend Admin API allows you to oversee critical operational aspects of your Rafiki instance, such as network relationships, assets, account configurations and liquidity. The Auth Admin API offers detailed oversight of grants, providing visibility into their status, associated accounts and permissions, while enabling key management actions, including revocation. -- **Rafiki Admin UI:** The Rafiki Admin application is a user-friendly interface designed to streamline the management of your Rafiki operations. It features a comprehensive dashboard for overseeing peers, assets, wallet addresses, liquidity, webhooks, and payments, serving as an intuitive front–end to Rafiki’s backend service. Additionally, Rafiki supports an optional integration with open-source identity & user management system [Ory Kratos](https://www.ory.sh/kratos/), allowing you to invite or remove users as needed. - ![Rafiki Admin UI](/developers/img/blog/2024-12-11/rafiki-admin.png) - ![Rafiki Admin UI - Payments](/developers/img/blog/2024-12-11/admin-ui-payments.png) -- **Security audit fixes:** Rafiki underwent its first comprehensive [security audit](https://interledger.org/developers/blog/rafikis-first-security-audit/), conducted by an external security firm to identify and address vulnerabilities. Key improvements include implementing HMAC signing for API requests, disabling GraphQL introspection in production, adding protections against denial-of-service (DoS) attacks, and preventing clickjacking in the Admin UI. Proactive dependency scanning was also integrated into the CI pipeline to mitigate library vulnerabilities. This external audit ensures a robust and secure foundation for Rafiki. Security is not a one-time effort; it is an ongoing commitment. As Rafiki evolves, we will continue conducting regular audits and proactively strengthening its defenses to maintain the highest standard of safety and trustworthiness. -- **Telemetry:** Gain real-time insights to understand and optimize your deployments. Rafiki’s telemetry captures key metrics, including the number of packets flowing through the system, the number of transactions, the total value sent through the network, as well as the average transaction value and time - all while preserving user privacy. [Check telemetry in action](https://rafikitelemetry.grafana.net/public-dashboards/f70c8a6033b14da5a9f1cb974def602a). - ![Telemetry](/developers/img/blog/2024-12-11/telemetry.png) -- **New documentation:** Clear, concise, and crafted with precision, Rafiki’s [new documentation](https://rafiki.dev) is designed to empower integrators, developers and administrators alike. We understand that great tools need great instructions and our updated documentation reflects this philosophy. By making our documentation more accessible, detailed and actionable, we are ensuring that Rafiki is not only powerful but also easy to adopt and extend - -## What comes next? - -The journey does not end here; in fact, it begins anew. In the months ahead, we will focus on: - -- **Performance Improvements:** Rethinking architecture for even greater efficiency. -- **Multi-hop Functionality:** Expanding Rafiki’s reach by enabling payments to traverse multiple nodes in the network. This feature allows for greater flexibility, as payments can flow across intermediary connectors to reach their destination, even when direct paths do not exist. Multi-hop functionality is key to creating a truly interconnected and scalable payment network. -- **Multi-tenancy Support:** Enabling a single instance of Rafiki software to service multiple account servicing entities (aka tenants). Each tenant operates independently with isolated configurations, data and resources, ensuring security and privacy while sharing the underlying infrastructure. A single Rafiki deployment could simultaneously support multiple digital wallet providers, each with their own branching, users and payment configurations, all while leveraging the same core system. This makes it easier for organizations to scale operations, reduce costs and support diverse business models within a unified environment. - -## Reflecting on the journey - -Let’s take a moment to reflect on the significance of this Beta release. Rafiki Beta is more than just a set of features. It represents what can be achieved when a global community unites around a common vision. It stands as a proof that open payments can be secure, scalable and accessible to all. - -The journey here hasn’t been without its challenges. From feature creep to overcoming performance bottlenecks, we have faced numerous hurdles. Yet, each challenge has been an opportunity to learn, improve and create something stronger. - -At the recent [Interledger Summit in Cape Town](https://interledger.org/summit), the impact of our efforts was clear. From live demonstrations of the newly rebranded [Test Wallet](https://wallet.interledger-test.dev), to integrations with partners like [People’s Clearinghouse](https://lacamara.mx) and [GateHub](https://gatehub.net), Rafiki’s potential was on full display. It was a moment of pride, a reminder of our purpose, and a glimpse into the promising future we are building together. - -## Join us in the future of Open Payments - -Our journey is far from complete. Now, we extend an invitation to you - our community, partners, and collaborators, to join us in the next chapter of this endeavour. - -Whether you are building integrations, testing performance, or simply exploring Rafiki’s capabilities, you are an integral part of this story. Together, we can create a world where payments flow as seamlessly as ideas and financial inclusion becomes more than just a goal - it becomes a reality. Together, we can make sending payments as simple as sending an email. - -The time has come to move forward together. Rafiki Beta has arrived. The power of Rafiki has been set free, ready to transform the world of payments. diff --git a/src/content/blog/2024-12-17-rafiki-tigerbeetle-integration.mdx b/src/content/blog/2024-12-17-rafiki-tigerbeetle-integration.mdx deleted file mode 100644 index a67aecc7..00000000 --- a/src/content/blog/2024-12-17-rafiki-tigerbeetle-integration.mdx +++ /dev/null @@ -1,138 +0,0 @@ ---- -title: "Balancing the Ledger: Rafiki's TigerBeetle Integration" -description: 'How TigerBeetle Supercharges Rafiki’s Financial Core.' -date: 2024-12-17 -slug: rafiki-tigerbeetle-integration -authors: - - Jason Bruwer -author_urls: - - https://github.com/koekiebox - - https://www.linkedin.com/in/jason-bruwer-8110766/ -tags: - - Interledger - - Interledger Protocol - - Interledger Stack - - Interledger Foundation - - Open Payments - - Rafiki - - TigerBeetle ---- - -[Rafiki](https://rafiki.dev/) is an open-source platform that enables Account Servicing Entities (ASEs) like banks and digital wallet providers to integrate [Interledger Protocol](/developers/get-started) (ILP) functionality into their systems. - -A critical component of Rafiki’s architecture is its accounting database, which manages financial transactions and account balances. To enhance performance and reliability, Rafiki offers the option to use TigerBeetle, a specialized financial accounting database, as its accounting backend. - -![Tigerbeetle + Rafiki](/developers/img/blog/2024-12-17/tigerbeetle.png) - -## What is TigerBeetle? - -[TigerBeetle](https://tigerbeetle.com/) is a financial accounting database designed for mission-critical safety and performance, aiming to power the future of financial services. It provides high-throughput transaction processing and ensures data integrity, making it well-suited for handling complex financial operations. - -#### Why Integrate TigerBeetle with Rafiki? - -Integrating TigerBeetle into Rafiki brings several advantages: - -- **High Performance:** TigerBeetle is optimized for online transaction processing (OLTP) workloads, offering significantly higher performance compared to general-purpose databases. -- **Financial Consistency:** With its built-in debit/credit schema, TigerBeetle enforces financial consistency at the database level, reducing the risk of errors in transaction processing. -- **Fault Tolerance:** TigerBeetle is designed for faults and ensures data durability, which is crucial for financial applications. - -## How Does Rafiki Utilize TigerBeetle? - -In Rafiki, the backend service is responsible for handling business logic and external communications, including managing accounts and tracking liquidity. - -For the accounting database, Rafiki provides the option to use TigerBeetle instead of PostgreSQL. -This setup allows Rafiki to leverage TigerBeetle’s high-performance transaction processing capabilities for managing account balances and financial transactions. - -It’s important to note that while TigerBeetle can be used for accounting purposes, Rafiki still requires a PostgreSQL instance for the authentication services and [Open Payments](https://openpayments.dev/) resources. -Therefore, both databases operate in tandem within Rafiki’s infrastructure. - -## Double-Entry Accounting: The Backbone of Financial Systems - -At the heart of any financial system lies the principle of **double-entry accounting**. - -This system, which dates back to the 15th century, ensures financial accuracy by recording every transaction as two corresponding entries: a **debit** in one account and a **credit** in another. - -The total debits and credits always balance, providing a clear view of financial transactions and reducing the risk of errors or fraud. - -### How Double-Entry Accounting Works - -In double-entry accounting: - -- **Debit** entries represent increases in assets or expenses and decreases in liabilities, equity, or revenue. -- **Credit** entries represent increases in liabilities, equity, or revenue and decreases in assets or expenses. - -**For example, if a customer pays $100 into a digital wallet**: - -**1.** The Cash account (an asset) is debited by $100. - -**2.** The Customer Balance account (a liability) is credited by $100. - -This systematic approach ensures that the books are always balanced, providing transparency and consistency in financial operations. - -### Why Double-Entry Accounting Matters in Rafiki and TigerBeetle - -Both Rafiki and TigerBeetle embrace double-entry accounting as a fundamental concept, ensuring that all financial transactions are meticulously tracked and balanced. - -Here’s how it applies to their integration: - -- **Financial Integrity at Scale:** TigerBeetle is built with a native debit/credit schema, ensuring that every transaction adheres to double-entry accounting principles. This guarantees that the accounting records are always accurate and consistent, even under high-throughput conditions. -- **Error Detection:** By requiring every transaction to balance debits and credits, double-entry accounting makes it easier to identify discrepancies or anomalies, enhancing the overall reliability of the system. -- **Operational Transparency:** For Account Serving Entities (ASEs) using Rafiki, double-entry accounting provides clear insights into financial flows, helping operators manage liquidity, track balances, and ensure compliance with financial regulations. - -### Enhanced Features with TigerBeetle - -TigerBeetle elevates double-entry accounting by embedding these principles directly into its architecture: - -- **Atomic Transactions:** TigerBeetle ensures that transactions are either fully completed or not executed at all, preventing partial updates that could lead to discrepancies. -- **Performance and Consistency:** By using double-entry accounting, TigerBeetle can process an exceptionally high volume of transactions per second while ensuring financial accuracy. - -### Real-World Example - -Thanks to double-entry accounting, both sides of the transaction are recorded in balance, providing a clear audit trail for reconciliation and compliance. - -Double-entry accounting is more than a bookkeeping method, it is the foundation of trust and accuracy in financial systems. By integrating TigerBeetle, which seamlessly implements this principle, Rafiki ensures that its users have a robust, reliable, and high-performing platform to manage financial operations with precision and confidence. - -#### Detailed Example: Sending and Receiving Rafiki instances - -Here’s a practical illustration involving sending and receiving connectors within Rafiki, and how the underlying TigerBeetle accounts are affected. In this example, a sending Rafiki (where an [outgoing payment](https://openpayments.dev/resources/glossary/#outgoing-payment-resource) is) sends ILP packets to a peer Rafiki over ILP, (paying into an [incoming payment](https://openpayments.dev/resources/glossary/#incoming-payment-resource)). The peering relationship is defined using USD for simplicity in this example. - -This process happens **for each ILP packet**, which can occur at an extremely high frequency, especially during real-time payments. TigerBeetle’s high-performance architecture is critical here, as it ensures accurate and consistent accounting for these high transaction volumes while maintaining financial integrity. - -##### Sending Rafiki - -The sender creates an outgoing payment of `100 USD` to an incoming payment at a peer’s Rafiki instance. The peering relationship between instances is in USD. - -| TigerBeetle Ledger Entry #1 | Debit | Credit | -| ----------------------------------- | ----- | ------ | -| USD outgoing payment liquidity acct | `100` | | -| USD peer liquidity acct | | `100` | - -##### Receiving Rafiki - -An incoming payment at the recipient’s end receives `100 USD` from the outgoing payment at the peer’s Rafiki instance. - -| TigerBeetle Ledger Entry #2 | Debit | Credit | -| ----------------------------------- | ----- | ------ | -| USD peer liquidity acct | `100` | | -| USD incoming payment liquidity acct | | `100` | - -Rafiki documentation has more [accounting explanations](https://rafiki.dev/overview/concepts/accounting). - -## Implementing TigerBeetle in Rafiki - -To deploy Rafiki with TigerBeetle, you can use Docker Compose or Helm with Kubernetes. - -[Rafiki documentation](https://rafiki.dev/) provides example configurations for these deployment methods, including how to set up TigerBeetle as the accounting database. - -For instance, when using Helm, you can specify the TigerBeetle version by adjusting the respective tag in the `values.yaml` file. -During the deployment process, it’s crucial to ensure that the environment variables are correctly set, especially when preparing for a production environment. - -## Conclusion - -The integration of TigerBeetle into Rafiki significantly enhances the platform’s capacity to process financial transactions with exceptional performance and reliability. - -By harnessing TigerBeetle’s advanced features, Rafiki offers Account Servicing Entities (ASEs) a powerful and dependable solution for implementing Interledger functionality, ensuring seamless and secure financial operations. - -More than just a bookkeeping method, double-entry accounting is the cornerstone of trust and precision in financial systems. - -With TigerBeetle seamlessly incorporating this principle, Rafiki delivers a high-performance platform that empowers its users to manage financial transactions with accuracy, confidence, and efficiency. diff --git a/src/content/blog/2025-02-05-ilp-packet-lifecycle.mdx b/src/content/blog/2025-02-05-ilp-packet-lifecycle.mdx deleted file mode 100644 index ec5f4265..00000000 --- a/src/content/blog/2025-02-05-ilp-packet-lifecycle.mdx +++ /dev/null @@ -1,108 +0,0 @@ ---- -title: 'The Lifecycle of an Interledger Packet' -description: 'A look under the hood of how Rafiki orchestrates an Interledger payment.' -date: 2025-02-05 -slug: ilp-packet-lifecycle -authors: - - Nathan Lie -author_urls: - - https://www.linkedin.com/in/nathan-lie-138a73121 -tags: - - Interledger ---- - -## Introduction - -The Interledger Summit just wrapped up recently, along with its celebrations of & discussions on open standards and financial inclusion. -In Cape Town last year, a humble part of Interledger Protocol (ILP for short) history took center stage as Stefan Thomas [gave a keynote](https://www.youtube.com/watch?v=Djw6wMXFv1Q&t=3240s) on how years ago, the Interledger Protocol became more fast & reliable by introducing packetization to the protocol. -With packetization breaking payments up into small amounts, sending a payment no longer had a single failure point and both parties in a payment would be able to manage risk more effectively. - -Most integrators of the Interledger protocol are leveraging the ILP packet through [Rafiki](https://rafiki.dev/overview/overview/), as it provides an efficient way for those integrators to participate in an ILP network. -Though Rafiki's main purpose is to abstract this, the fresh intrigue around Interledger's transport method invites a look at the connection between Rafiki's high-level payment orchestration to ILP's atomic, low-level concept of the ILP packet. - -### Setting the Terms - -Before it starts sending all those ILP packets, a higher-level protocol is needed to set the terms of the payment as Interledger doesn't have any mechanism to negotiate payment terms. Rafiki uses the [Open Payments Standard](https://openpayments.dev/introduction/overview/) to achieve this. - -A prospective debitor will use this standard to set payment terms by first creating an _incoming payment_ on the Rafiki instance hosting the receiving wallet address, during which the receiving Rafiki instance will associate an ILP address with the incoming payment and generate a shared secret for it. -Then it may acquire a _quote_ from the Rafiki instance hosting the sending wallet address, which becomes associated with the incoming payment. - -Finally, it creates an _outgoing payment_ against the sending wallet address by acquiring consent from its owner to charge them with particular terms such as its amount and its currency. -The prior incoming payment (or the quote, if created), is provided during this process and becomes associated with the outgoing payment. -Once that is complete, these Rafiki instances can begin to fulfill the terms of this payment using Interledger. - -On a more technical point, ILP packets may be involved before an outgoing payment is even created. During the acquisition of a _quote_ from the sending Rafiki instance, it will send a ILP packet across the network to the destination. This packet will need to traverse each hop on the network as each node's fees and rates won't be known to the sender without doing so. - -_For more on Open Payments, consider reading the [Simple Open Payments Guide](https://interledger.org/developers/blog/simple-open-payments-guide/) on this blog._ - -### Preparing the Packet - -Now that the sending Rafiki instance has an outgoing payment created against one of its wallet addresses, it can pass the terms of that payment into an Interledger payment. First, it uses the incoming payment associated with it (or the quote if it was created) to acquire an ILP address for the receiver and the shared secret, which is used to generate a [_condition_](/developers/rfcs/stream-protocol/#6-condition-and-fulfillment-generation). -A condition is a hash that can only be generated with a secret that is available only to the receiver and the sender. -Using the receiving ILP address, the condition, the receiver, and send amount described in the outgoing payment, Rafiki creates a connection over ILP to the receiver using [STREAM](/developers/rfcs/stream-protocol/). -STREAM is a transport-layer protocol for Interledger that is responsible for breaking a payment down into the aforementioned packets and sends them _en masse_ until the payment is fulfilled. - -For each packet, STREAM handles them using a two-phase execution process that is similar to the request-response process that HTTP packets sent over the internet use. - -![Two-Phase Transfer Diagram](/developers/img/blog/2025-02-05/2p-transfer.png) - -In the first phase, the sender creates packets known as [Prepare packets](/developers/rfcs/interledger-protocol/), which each contain some fraction of the total send amount configured by each peer along the ILP network path, the condition generated using SPSP, the final destination of the packet in the network, and an expiration time for the packet. -All of these packets are sent across the network to the receiver, and must be fulfilled or rejected by the receiver in the second phase in order to be considered successfully sent. - -#### ILP Prepare Packet Specification - -![ILP Prepare Packet Specification](/developers/img/blog/2025-02-05/ilp-prepare-spec.png) - -### Walking the Path - -Every Rafiki instance contains an Interledger connector, the nodes that make up the Interledger network. -As a packet travels through the network, it may be forwarded by multiple connectors as it traverses its path to the connector of the receiving Rafiki instance. - -Every connector is "peered", or connected, to at least one other connector in order to access the greater ILP network, and each peering relationship is maintained with each party keeping a balance of funds that the other is able to send to the other. -Each connector the packet reaches must ensure that connector it received it from has sufficient balance to fulfill the amount encoded in the packet, and deduct it from that balance if that is the case. - -Once that connector determines there is sufficient liquidity, it must then determine the next connector to which it should forward the packet. -It references its other peering relationships from a routing table and then forwards the packet to the next connector along the path. -In the process it may also apply an exchange rate or a fee to the amount in the packet - a prospect that may necessitate the creation of a _quote_ during Open Payments to accurately determine the total of these charges. - -If a connector on the packet's path to the receiver determines that the connector before it doesn't have enough balance to fulfill the packet, it will send a [Reject packet](/developers/rfcs/interledger-protocol/) back along that path to the receiver. -This signals to the sender that a given packet didn't reach the receiver. -The Reject packet contains an error code that describes what would have caused the rejection, and the address of the connector that sent the rejection. - -#### ILP Reject Packet Specification - -![ILP Reject Packet Specification](/developers/img/blog/2025-02-05/ilp-reject-spec.png) - -Provided there are no rejections, this process is repeated until the packet reaches the connector of the receiving Rafiki instance. - -### Responding with Fulfill or Reject - -In the second phase of STREAM's execution process, the receiver will need to handle each Prepare packet with either a fulfillment or a rejection, so that the sender knows whether or not a given Prepare packet was successfully sent or not. -The receiver fulfills a Prepare packet by sending a [Fulfill packet](/developers/rfcs/interledger-protocol/) back to the sender with the preimage from which the Prepare packet's condition was generated. - -#### ILP Fulfill Packet Specification - -![ILP Fulfill Packet Specification](/developers/img/blog/2025-02-05/ilp-fulfill-spec.png) - -Since only the receiver can provide the preimage needed to fulfill a "prepare" packet's condition, no node in the network in between it and the sender can intercept the packet and send back a fulfilled packet without it reaching the receiver first. -As soon as the sender receives enough fulfillments from the receiver to complete the payment, it stops sending packets over STREAM and closes the connection. - -If the received Prepare packet expired before it reached the receiver, the amount received already from prior packets exceeds the total, or some other error occurs, the receiver will instead respond with a Reject packet instead. - -On another technical point, there also exist some types of packets that cannot be fulfilled. For instance, an unfulfillable packet gets sent when an Open Payments quote is created. This packet's purpose is to determine what the total cost of the payment will be by factoring any fees or conversion rates that may be applied, instead of trying to fulfill a payment. - -### Wrapping Things Up - -As "fulfill" packets arrive at the sender, it verifies that the preimage sent back in each of them does indeed fulfill the condition sent with the corresponding "prepare" packet. -Once the STREAM connection finishes sending all of the packets and verifies the corresponding fulfillments from the receiver, it closes. The sending Rafiki instance will then update its outgoing payment as being complete, and the receiving Rafiki instance publishes a webhook to its account servicing entity directing it to mark its incoming payment as complete as well. - -## Conclusion - -The Interledger Protocol had to go through a number of important changes in order to position itself as a future-proof way to send money. Packetizing Interledger payments was a significant change that brought it in line with the design patterns of the modern web that it seeks to be a part of. -These packets continue to play this role as a part of Rafiki, as it coordinates those payments with Open Payments and manages the ILP connections used to fulfill them. - -If this post has established the connection between Rafiki, Open Payments, and Interledger in a way that piques interest, consider delving futher into these Interledger concepts: - -- [ILP v4](/developers/rfcs/interledger-protocol/) -- [STREAM](/developers/rfcs/stream-protocol/) -- [Connectors & Peering](/developers/rfcs/peering-clearing-settling) diff --git a/src/content/blog/2025-03-12-breakpoint-it-work-week.mdx b/src/content/blog/2025-03-12-breakpoint-it-work-week.mdx deleted file mode 100644 index 2fa7ad44..00000000 --- a/src/content/blog/2025-03-12-breakpoint-it-work-week.mdx +++ /dev/null @@ -1,47 +0,0 @@ ---- -title: 'The first work week of the season - BreakPoint IT Work Week' -description: 'The first work week of the season' -date: 2025-03-12 -slug: breakpoint-it-work-week -authors: - - Timea Nagy -author_urls: - - https://www.linkedin.com/in/nagy-timea-35483024 -tags: - - Interledger ---- - -After shaking off the January blues, the Interledger Engineering Managers and Product Managers gathered with the BreakPoint IT team for a productive work week in Cluj-Napoca, Romania. -Why did we do this? It's crucial to reflect on the achievements of the past year, set goals and expectations for the year ahead, and, of course, meet face-to-face to finally enjoy that coffee we've been talking about. - -The plan for the week was straightforward: review all the projects that the BreakPoint IT team is working on, present the latest developments, discuss ongoing work, and outline the direction of these projects in the coming months. -The projects covered included the [Open Payments](https://openpayments.dev/)/[Rafiki](https://rafiki.dev/) integration projects, our Test Network - [Test Wallet](https://wallet.interledger-test.dev/), [Test Boutique](https://boutique.interledger-test.dev/), [Interledger Pay](https://interledgerpay.com/), and [Interledger Cards](https://wallet.interledger.cards/). - -We also highlighted our goal of expanding Open Payment SDKs across multiple programming languages. While our TypeScript SDK is ready, we're also working on PHP, Rust, Go, and Python SDKs with the support of our developers and the community, and we're aiming to add Java and mobile SDKs as well. -Rafiki updates and goals were also discussed, with more detailed conversations expected during the Rafiki work week later this year. - -Two of the most talked-about topics were the [Web Monetization Extension](https://webmonetization.org/) and the Publisher Tools. -The progress these products are making, week by week, is a testament to the incredible people behind them. There are exciting plans ahead for these projects, so stay tuned. - -![BreakPoint IT Work Week](/developers/img/blog/2025-03-12/bp-ww.jpg) - -As always, plans are set, but sometimes a team or two must shift focus to tackle something urgent or higher priority, and they’re sent into the "dungeon" to make it happen. -This year, the fortunate team was the Interledger Wallet team. The [Interledger Wallet](https://interledger.app/) is currently under development, with Interledger taking over the old Fynbos wallet and giving it a complete revamp, along with new features. -Remember the new cards we were all excited about after the [Interledger Summit 2024](https://interledger.org/summit)? -The Interledger Wallet team will be the ones to bring the new cards vision to life. - -![Group Photo at Work Week](/developers/img/blog/2025-03-12/bp-ww-group.jpg) - -We also had the chance to discuss ways the team can improve community involvement. -Speaking of which, Ioana, our Community-focused Engineering Manager, organized an incredible event in collaboration with [Women in Tech](https://www.womenintechcluj.com/)—a meetup focused on Web Monetization, titled Pay the Web Forward. -After listening to insightful presentations and demos by [Ioana Chiorean](https://interledger.org/team/ioana-chiorean), [Rabeb Othmani](https://interledger.org/team/rabeb-othmani), and [Timea Nagy](https://interledger.org/team/timea-nagy), the participants had numerous questions, and the level of engagement was impressive. -The networking session even ran longer than expected, reflecting the strong interest and enthusiasm. - -![Interledger Meetup](/developers/img/blog/2025-03-12/bp-ww-wt.jpg) - -In summary, the BreakPoint Work Week was both productive and successful. Decisions were made, discussions took place, coffees were enjoyed, and Romanian sweets were consumed in abundance. -The consensus was clear: we’re all looking forward to the next time we meet up again. - -Bonus picture, Romanian Floating Island dessert, AKA Birds Milk, if you haven’t tried it yet, we highly recommend it. - -![Birds Milk](/developers/img/blog/2025-03-12/bp-ww-bm.png) diff --git a/src/content/blog/2025-05-06-introducing-open-payments-php.mdx b/src/content/blog/2025-05-06-introducing-open-payments-php.mdx deleted file mode 100644 index 8716abb8..00000000 --- a/src/content/blog/2025-05-06-introducing-open-payments-php.mdx +++ /dev/null @@ -1,144 +0,0 @@ ---- -title: 'Introducing Open Payments PHP: A New Bridge for Financial Interoperability' -description: 'Explaining the Open Payments PHP Library features and how it works' -date: 2025-05-06 -slug: introducing-open-payments-php -authors: - - Adi Boros -author_urls: - - https://www.linkedin.com/in/adiboros/ -tags: - - Interledger - - Open Payments - - PHP ---- - -## 🎯 Why Open Payments Matters - -The [**Open Payments**](https://openpayments.dev/) standard is reshaping how applications initiate, manage, and complete digital transactions — enabling truly interoperable financial systems across different wallets, services, and financial institutions. - -But while the specification has seen growing adoption across JavaScript platforms, there was one massive gap: **PHP developers**, who still power a huge portion of the web, had no simple, native way to implement Open Payments in their applications. - -Today, we're excited to change that with the release of Open Payments PHP — an open-source library that makes it easy for PHP applications to participate in the future of financial interoperability. - -## 💡 What We Built: Open Payments PHP - -[interledger/open-payments-php](https://github.com/interledger/open-payments-php) is a modern, Composer-installable PHP package that provides full client support for the Open Payments API. -It includes: - -✅ Full support for [Grants](https://openpayments.dev/introduction/grants/), [Incoming Payments](https://openpayments.dev/introduction/op-flow/#incoming-payment), [Outgoing Payments](https://openpayments.dev/introduction/op-flow/#outgoing-payment), [Quotes](https://openpayments.dev/introduction/op-flow/#quote), and [Token](https://openpayments.dev/snippets/token-rotate/) management. - -✅ Built for PHP 8.3+, using strict typing, PSR-4 autoloading, and Composer dependency management. - -✅ Clean, service-oriented architecture: - -- `Services/` : Interact with the Open Payments endpoints. -- `Models/` : Represent API resources like grants, payments, and quotes. -- `Validators/` : Validate API request payloads before sending. - -✅ Fully tested with PHPUnit, ready for production use. - -Here's what it looks like to request a grant: - -```php -use OpenPayments\AuthClient; -use OpenPayments\Config\Config; - -$config = new Config( - $WALLET_ADDRESS, $PRIVATE_KEY, $KEY_ID -); -$opClient = new AuthClient($config); - -$wallet = $opClient->walletAddress()->get([ - 'url' => $config->getWalletAddressUrl() -]); - -$grant = $opClient->grant()->request( - [ - 'url' => $wallet->authServer - ], - [ - 'access_token' => [ - 'access' => [ - [ - 'type' => 'incoming-payment', - 'actions' => ['read', 'complete', 'create', 'list' ] - ] - ] - ], - 'client' => $config->getWalletAddressUrl() - ] -); -``` - -With just a few lines of code, PHP developers can now create grants, request quotes, initiate outgoing payments, or complete incoming payments — all fully compliant with Open Payments specifications (including generating HTTP signature headers). - -Detailed fully functional code snippets covering all endpoints can be found on the [Open Payments PHP Snippets](https://github.com/interledger/open-payments-php-snippets/) repo. - -## 🛠️ How It Works: Inside the Library - -At its core, **Open Payments PHP** is designed around simplicity and modularity. -The library is structured into clear layers, making it easy for developers to jump in, extend, or contribute: - -## 📁 Project Structure - -| Folder | Purpose | -| ------------- | ------------------------------------------------------------------- | -| `Contracts/` | Interface definitions (routes) for Services | -| `Services/` | Interact with Open Payments endpoints like grants, payments, quotes | -| `Models/` | Data models representing Open Payments resources | -| `Validators/` | Input validation logic before making API calls | -| `Traits/` | Helpers like ID parsing and URL extraction | -| `Utils/` | Utility functions used for the http signature | - -Each service corresponds directly to parts of the Open Payments API. -For example: - -- GrantService handles grant requests and continuations. -- QuoteService manages the creation and retrieval of quotes. -- IncomingPaymentService handles creation and retrieval of IncomingPayments. -- OutgoingPaymentService covers creation and retrieval of OutgoingPayments. - -All these services are incorporated into the main AuthClient and can be used directly from the client like in the above examples. -Developers interact mainly through these service classes, without worrying about low-level HTTP requests or Open Payments internal mechanics, like the [http signatures](https://openpayments.dev/introduction/http-signatures/) — the library abstracts that complexity away. - -## 🌎 Why It Matters for the PHP Community - -PHP remains one of the most widely used languages across the web — powering platforms like WordPress, Laravel, Drupal, and countless custom-built applications. - -By bringing first-class Open Payments support to PHP, we unlock: - -🔗 Broader ecosystem participation: more web apps, e-commerce platforms, and financial services can integrate Open Payments without switching tech stacks. - -🚀 Faster innovation: developers can focus on building user-facing products instead of re-implementing complex financial protocols. - -🛡️ Secure by default: thanks to strict type enforcement, request validation, and adherence to the Open Payments specification. - -🤝 Stronger community collaboration: easier onboarding for teams that already speak PHP. - -This marks an important step toward making Open Payments truly universal across different languages and platforms. - -## 🚀 What's Next - -We’re just getting started! - -Here’s what’s coming soon for Open Payments PHP: - -📚 [Open Payments official documentation](https://openpayments.dev/introduction/overview/) will soon include usage examples (snippets) for the PHP library also. - -🔥 Example projects — showcasing integration with popular frameworks like Laravel, Symfony Console apps ([Snippets library](https://github.com/interledger/open-payments-php-snippets/)), and WordPress - ecommerce - plugins. - -🤝 Community-driven development — pull requests, issues, and feature discussions are welcome! - -We encourage developers to test the library, provide feedback, open issues, or submit pull requests. -Together, we can grow the Open Payments ecosystem across the entire PHP world. - -## 🔗 Resources - -GitHub: [interledger/open-payments-php](https://github.com/interledger/open-payments-php) - -GitHub: [interledger/open-payments-php-snippets](https://github.com/interledger/open-payments-php-snippets) - -[Open Payments Specification](https://openpayments.dev/) - -[Interledger Foundation](https://interledger.org/) diff --git a/src/content/blog/2025-05-20-Introducing-publisher-tools.mdx b/src/content/blog/2025-05-20-Introducing-publisher-tools.mdx deleted file mode 100644 index 86054b4b..00000000 --- a/src/content/blog/2025-05-20-Introducing-publisher-tools.mdx +++ /dev/null @@ -1,167 +0,0 @@ ---- -title: 'Introducing publisher tools: simple monetization for content owners and publishers' -description: 'An overview of the toolset and a sneak peek of the future plans' -date: 2025-05-20 -slug: introducing-publisher-tools -authors: - - Arpad Lengyel -author_urls: - - https://www.linkedin.com/in/lengyel-arpad85/ -tags: - - Interledger - - Web Monetization - - publisher tools ---- - -In a digital world where supporting content owners and publishers is becoming increasingly important, we wanted to build something that didn’t just preach the value of [Web Monetization](https://webmonetization.org/) - but made it accessible and easy for anyone to use. That’s how our [publisher tools](https://webmonetization.org/tools/) came to life. - -This toolset was born from a simple marketing goal: get the word out about Web Monetization. At its core, the publisher tools are a set of customizable elements that site owners can embed with just a simple script tag, designed to promote Web Monetization as a model for visitors to support their websites. - -## Built for Simplicity and Flexibility - -When designing the tools, we looked at popular one-time payment systems and donation models for inspiration, asking: -**How can we offer similar ease of use, but tailored for Web Monetization?** - -We decided early that our publisher tools would revolve around a handful of elements: - -- **Banner** -- **Widget** -- **Button** -- **Exclusive Content** - -Each element is fully customizable via a visual admin interface where users can configure text, colors, fonts, and more - with live preview support. _No coding skills needed._ - -![Create a banner](/developers/img/blog/2025-05-20/pt_example_banner.png) - -## Designed for a Set-It-and-Forget-It Experience - -Instead of manually updating every element whenever a change is needed (like tweaking a color or call-to-action text), creators can simply update their settings in the admin panel. - -Since all customization is stored server-side, any updates instantly reflect across all embedded elements. - -> The generated script tag is intentionally kept minimal: it contains only the wallet address and element type. All other settings are automatically pulled based on the wallet address. - -This approach lowers the technical barrier and ensures creators can focus on their content - not website maintenance. - -## Ensuring Wallet Ownership and Security - -To protect users and maintain integrity, wallet ownership confirmation is integrated. -Currently, this relies on an _interactive grant_, in the future, a more streamlined wallet verification mechanism will replace this process. - -Users are asked to confirm they own their wallets before a script tag is generated. -Without ownership confirmation, their customizations are not saved on the server. The validation steps help to maintain integrity, protect wallet owners, and ensure only the rightful owner of a wallet can update the configuration. - -## Default Configurations - -Every element type comes with a default, allowing anyone to get started instantly. - -If a site owner embeds an element without making any customizations, the default style and behavior will be applied automatically. - -Additionally, all embedded elements automatically insert the necessary `` tag, so adding the script is all it takes to enable Web Monetization - eliminating yet another technical step for creators. - -## Flexible Styling with Versioning - -A key feature of the publisher tools is **versioning**, which gives creators even more control over how the elements appear across different contexts - without needing multiple wallet addresses. - -### 💡 Why Versioning? - -Many creators publish content on multiple platforms or maintain websites with different styles. Others may run a single site that supports various themes (e.g., light and dark mode) or sections with distinct branding. With versioning, you can define multiple configurations - each with its own text, colors and fonts - for the same wallet address. - -For example, a creator might use: - -- A blue banner with a professional tone for their portfolio site -- A playful, colorful widget on their personal blog -- A dark-mode themed paywall for a night reading experience - -All of these can point to the same wallet, keeping the monetization flow unified while allowing the presentation to be fully tailored. - -### 🛠️ How It Works - -In the admin interface, creators can: - -- Create new versions of an element configuration (e.g., portfolio, blog, dark-mode) -- Edit existing versions at any time - -On the embed side, the script remains simple. By passing a `tag` value along with the wallet address in the script tag, the correct version is automatically selected and rendered. - -```html - -``` - -If no tag is provided, the default version is used - ensuring backward compatibility and quick setup for users who don't need multiple styles. - -![Banner display](/developers/img/blog/2025-05-20/pt_embedded_banner.png) - -
    - -## Deep Dive: The Elements - -### 🌎 Available Now: **Banner** - -- **Visibility**: Displayed to visitors who do not have the Web Monetization extension enabled. -- **Behavior**: Shown until the visitor either installs/enables the Web Monetization extension or manually closes the banner (dismissal lasts for the session). -- **Customization**: Site owners can define the text and the visual details (color, font, etc). The web store link for the Web Monetization extension, which is based on the visitor’s browser, is added automatically. -- **Fallback**: If the browser cannot be identified, the link points to the [Web Monetization website](https://webmonetization.org/). - -### 🔥 Coming Soon: Widget, Button, and Exclusive Content - -These features are actively in development and will soon be available: - -#### Widget - -- **Location**: Fixed position in the lower-right corner of the screen. -- **Purpose**: Always visible icon that, when clicked, opens a payment panel. -- **Functionality**: Enables visitors to make a one-time payment, supporting creators even without the extension installed. -- **Technical Note**: Payment proof of concept uses a secure iframe (using interledgerpay.com). Future iterations aim to fully embed Open Payments for seamless payment without page redirection. - -
    - -Widget example - -
    - -#### Button - -- **Flexibility**: Can be placed inline within content or components. -- **Behavior**: Opens the same payment panel as the widget. -- **Extras**: Has the option to add tooltips for additional explanations or calls to action. - -Create a button - -
    - -#### Exclusive Content - -- **Concept**: A simple paywall system. -- **Behavior**: Reveals only a preview (e.g., the first few paragraphs) and requires payment for full access. -- **Implementation**: Entirely client-side for simplicity. -- **Return Visitors**: Visitors who have previously paid can verify their payment and gain access without repurchasing during the set time interval. -- **Privacy**: Publishers and content owners never see or store a visitor's personal data - privacy is fully preserved. The verification is done by matching the payment against the visitor’s transaction history, as recorded by their wallet or identity provider. - -## What’s Next? - -**Publisher tools** is an evolving project. We’re watching how people use it, listening to feedback, and working toward a better integration with Open Payments. Our goal is to make supporting creators as frictionless as reading their content. - -With minimal setup, dynamic updates, and a focus on user-friendliness, it offers a straightforward way to embrace Web Monetization - whether you're a seasoned developer or just starting out. - -![publisher tools](/developers/img/blog/2025-05-20/pt_tools_collection.png) - -## Resources - -GitHub: [interledger/publisher-tools](https://github.com/interledger/publisher-tools/) - -[Web Monetization](https://webmonetization.org/) - -[Interledger Foundation](https://interledger.org/) diff --git a/src/content/blog/2025-06-04-ES-El-Universo-Interledger.mdx b/src/content/blog/2025-06-04-ES-El-Universo-Interledger.mdx deleted file mode 100644 index b9d4acf5..00000000 --- a/src/content/blog/2025-06-04-ES-El-Universo-Interledger.mdx +++ /dev/null @@ -1,254 +0,0 @@ ---- -title: 'El Universo Interledger' -description: 'Explora el sistema abierto que conecta pagos globales con Interledger.' -date: 2025-07-02 -lang: es -slug: el-universo-interledger -authors: - - Marian Villa -author_urls: - - https://www.linkedin.com/in/marianvilla/ -tags: - - Interledger - - Pagos Abiertos - - Inclusión Financiera ---- - -> ⚠️ **Nota:** Este artículo es una adaptación al español. -> 📘 This article is also available in English: [Interledger Universe – interledger.org](https://interledger.org/developers/blog/interledger-universe/). - -Si estás un poco abrumado por términos como el **Protocolo de Interledger, Interledger Stack, o Fundación Interledger, Estándar de Pagos Abiertos, Rafiki, La Moneda Rafiki, Dassie, Monetización Web (Extensión), STREAM, SPSP, Paquetes,etc…** Y te sientes aún un poco perdido, aquí estamos para aclararlo. Vamos a desglozar cada uno de los términos para traer a la luz el significado y de esta manera mostrar más claramente el Universo de Interledger. Empecemos con el término más obvio: - -## Interledger - -El término **Interledger** puede dividirse en el prefijo **“Inter”** que puede entenderse como “entre” y **ledger**, que en su definición más pura en el [diccionario](https://www.merriam-webster.com/dictionary/ledger). -, traduciría: Un libro. Un libro que contiene las cuentas donde se registran los débitos y créditos de los libros de registro contables. Si juntamos ambos conceptos, Interledger significa que el sistema de pagos puede hacerse entre múltiples libros contables, conocidos como ledgers. - -**¿Qué significa esto, cómo funciona?** -Digamos que tengo una cuenta en Alemania y quiero transferir dinero a mi amigo Allan en Sur África. ¿Qué opciones tengo? Puedo empezar una transferencia internacional desde mi cuenta bancaria a la de Allan, lo que usará será una red SWIFT para intercambiar mensajes de pago. Probablemente la transferencia va a tomar al menos 3 días para verse reflejada en la cuenta bancaria de Allan y va a costarme relativamente una gran comisión. También puedo usar un servicio como [Wise](https://wise.com/), pero esta es una plataforma cerrada que requerirá que me registre, y Allan necesitará también crear su cuenta o diligenciar un formulario con el Banco de Reserva de Sur África antes de recibir sus fondos. - -También es importante entender que este servicio es regulado, porque utiliza sotware privado, entonces como usuario no tengo una manera de entender cómo es procesada mi data, y en este caso hipotético solo queda confiar. Y el caso se complicaría si asumimos que Allan no tiene una cuenta tradicional de banco, ya que solo tiene un proveedor de dinero móvil. **¿Cómo transferiríamos fondos a él, entonces?** - -Interledger fue diseñado para ser una serie de nodos que impulsa mensajes de pago en el sistema, teniendo en cuenta la conversión de la moneda, donde la ‘moneda’ puede ser cualquiera que incluya un valor para transar, incluyendo monedas fiduciarias, criptomonedas o dinero móvil. - -![Imagen 1 - Red de Pagos](/developers/img/blog/2025-06-04/network-Interledger.png) - -Ahora, continuando con el ejemplo, si mi cuenta bancaria está en Euros y la cuenta Móvil de Allan está en Moneda Surafricana, Rands, como vemos en el gráfico de la Red de Pagos, hay múltiples maneras de que mi dinero pueda ser enviado y enrutado a Allan. Lo que hace **Interledger** especial, es que se asegura que los paquetes lleguen con la ruta más rápida y barata, desde mi nodo de Interledger hasta el nodo de Interledger de Allan. Interledger está diseñado para ser una red encima de las redes existentes de pagos, y por esto logra interoperabilidad entre todas las capas. - -## Fundación Interledger - -Ahora que entendimos la parte técnica, podemos introducir **la Fundación Interledger**. -Es una fundación constituída en Estados Unidos cuya visión es enviar dinero de manera fácil, como si enviaras un correo electrónico. - -La Fundación Interledger tiene bajo su dirección y cuidado, el Protocolo Interledger y sus protocolos asociados, y se dedica a desarrollar y fomentar la inclusión financiera en sus sistemas alrededor del mundo. - -La estrategia global es soportar la investigación y desarrollo de los sistemas de inclusión financiera en áreas vulnerable, fondear a través de subvenciones a poblaciones no representadas en el ecosistema financiero, cambiando el paradigma en este sistema; adicional creando una comunidad abierta e inclusiva, **la Comunidad Interledger**, que crece a través de la conversación abierta, uniendo otras voces y perspectivas, al espacio fintech. - -**¿Cuáles son los protocolos que desarrolla y mantiene la fundación?** - -## La Arquitectura de Interledger - -La Arquitectura de Interledger tiene un gran parecido a la Arquitectura de Internet, consiste en múltiples capas, y esto no es una coincidencia, ya que la arquitectura de Interledger fue modelada posterior a la arquitectura de Internet. Entonces por esto, cada capa de la arquitectura de Internet tiene su equivalente en la Arquitectura de Interledger. Cada capa sirve para una función específica que interactúa con capas tanto arriba como abajo. - -Exploremos cada una de las capas de la Arquitectura de Interledger desde abajo. - -![Imagen 2 - Infraestructura Interledger](/developers/img/blog/2025-06-04/infraestructura-Interledger-espanol.jpg) - -Si prefieres una versión en video de la Arquitectura de Interledger puedes ver esta presentación de su Arquitectura en [Youtube](https://youtu.be/sqGjkZKFjgo). - -### Capa de Infrastructura - -La infraestructura por si misma no es una parte técnica de la arquitectura, pero es una parte esencial para que los protocolos funcionen. En esta capa es donde se define el valor a intercambiar entre las las partes. Este acuerdo de intercambio puede ocurrir entre monedas _Fiat, Crypto, o Dinero Móvil, o cualquier activo de valor acordado_. Aquí es donde se define el valor intercambiado entre las partes, como créditos de Starbucks o incluso granos de café físicos. - -Esta capa se asegura que una vez el pago se haya liquidado, la transferencia de valor ha sido ejecutada con las partes involucradas. Usualmente, los nodos conectados, también llamados conectores, entran en un acuerdo de sincronización para definir la línea de crédito que se están extendiendo entre cada nodo para facilitar que el acuerdo se de. Esta transacción puede ocurrir en un punto predefinido o donde esté la línea de crédito, también llamada liquidación entre pares, y allí es completada. - -Es importante aclarar que en caso de que se utilice una crypto para realizar el acuerdo entre los nodos, esto pasa automáticamente en el acuerdo entre pares, porque las blockchains fuerzan el acuerdo según sus capacidades cryptográficas y a su ejecución. - -### Capa de Enlace - -La Capa de enlace define como los conectores en pares se comunican. Actualmente existen dos grandes protocolos en esta capa: - -> **Protocolo Bilateral de Transporte [(BTP)](https://interledger.org/developers/rfcs/bilateral-transfer-protocol/):** Usa comunicación basada en **WebSocket\*** entre conectores. -> **[ILPoverHTTP](https://interledger.org/developers/rfcs/ilp-over-http/):** Utiliza HTTPS para la comunicación entre conectores. -> Estos protocolos establecen la conexión necesaria para que las capas superiores funcionen. - -### Capa de Protocolo - Protocolo Interledger (ILP) - -El core de la Arquitectura de Interledger está basado en el [Protocolo Interledger (ILP)](https://interledger.org/developers/rfcs/interledger-protocol/). Este protocolo divide grandes pagos en pequeños paquetes cuyo contenido prescribe y define un protocolo de transferencia de dos fases. - -_¿Por qué se usa un proceso de dos fases en vez de un proceso de una sola fase en este protocolo de transferencia?_ - -Empecemos ejemplificando una **Transferencia de una Fase**. - -![Imagen 3 - Transferencia de una sola fase](/developers/img/blog/2025-06-04/transferencia-ILP-Interledger-Protocol-unafase.png) - -Alice representada a la izquierda en la imagen, es un cliente de un servicio de cuentas de Identidad (ASE) que corre sobre un nodo conector de Interledger (Nodo A). - -- Un **Servicio de Cuentas de Identidad** ayuda a proveer y sostener la cuenta entre el pagador y el recibidor del pago, y es regulado por una entidad en el país o países donde opera, algunos ejemplos pueden ser bancos, proveedores de dinero móvil, etc. - -Bob en el gráfico representado a la derecha es un cliente del Servicio de Cuentas de Identidad que corre sobre el Conector de Interledger (D). - -Para que el pago de Alice llegue a Bob, el conector (A) necesita pasar los paquetes al conector (B), que necesita a su vez pasar los paquetes al conector (C), que a su vez necesita pasar al conector (D). En el escenario más optimista, ASE debitará de la cuenta de Alice, para pasar el pago hasta Bob. - -Pero, ¿Qué pasaría si por alguna razón, el conector (C) no logra pasar el pago al conector (D)? De la cuenta de Alice el dinero ya fue debitado, pero Bob no recibió los fondos. - -Para evitar el riesgo de que la transacción falle y no llegue al usuario final, entre Alice y Bob, a través de los nodos conectores, el Protocolo de Interledger define una **Transferencia de Dos Fases**. - -### Transferencia de Dos Fases: - -![Imagen 4 - Transferencia de dos fases](/developers/img/blog/2025-06-04/2p-transfer-es.jpg) - -A través del **Protocolo Interledger ILP**, la transferencia comenzará enviando al conector (A) un paquete construido en ILP que contiene la dirección ILP del que recibe, una condición de ejecución que tendrá un monto y un tiempo de expiración. El conector que envía también incluirá información adicional como el formato que se determinará por el protocolo de más alto nivel en uso. Luego, el paquete irá al conector (B) sobre un canal autenticado, y tendrá una configuración usando una capa de enlace al protocolo. - -El Conector (B) verificará con el conector (A) el balance de liquidez, y si hay recursos suficientes, debitará el monto desde la cuenta del conector de liquidez. El conector usa el enrutamiento a través de tablas para determinar el siguiente salto, y así ajustar la cantidad de recursos en el paquete que envía, y el tiempo de expiración de la tasa de la transacción, impulsando finalmente el paquete a seguir su camino. - -Este proceso se repitirá hasta que el paquete haya llegado a su destino, el conector recibidor (D). El recibidor validará que el paquete cumpla, basado en un protocolo de alto nivel, que aceptará retornando con un **_Paquete ILP Completado_** con una preimagen de la condición, o rechazando con un **_Paquete ILP Rechazado_**. Si es aceptado, cada conector en la cadena verificará el cumplimiento y créditos disponibles al siguiente conector hasta que el emisario original es alcanzado. - -El conector emisor revisa el cumplimiento de los párametros enviados contra la condición original, y graba la transacción, y repetirá el proceso hasta completar la cantidad deseada para realizar la transferencia. Este ciclo garantiza la seguridad, eficiencia y uso de múltiple monedas que se pueden manejar a través de la red de conectores, manteniendo la integridad y tiempo de cada paquete transferido. - -El protocolo es específicamente diseñado para transacciones de valores pequeños. Si el conector (A) y (B) se conectan usando paquetes, digamos que 1 centavo, perdiendo un par de ellos debido a problemas en la red, pueden realizarlo rápidamente. - -Sin embargo la conexión de (A) y (B) está basado en transacciones pequeñas por un centavo sobre un billón (1/1,000,000,000), así que perder una pequeña cantidad será inconsecuente cuando se cierre el arreglo. - -### Capa de transporte: Direcciones de ILP y Apuntadores de Pago - -Las [direcciones ILP](https://interledger.org/developers/rfcs/ilp-addresses/) son parte fundamental del Protocolo Interledger, sirviendo como un identificador único de cuentas en la red de Interledger. Estas direcciones siguen un formato jerárquico similar a las direcciones IP de Internet, habilitando el enrutamiento eficiente de paquetes entre diferentes **_‘Ledgers’_**. - -La **Estructura de una Dirección ILP** consiste en diversos componentes: - -- **Asignación (Allocation)**: Esta es la primera parte de la dirección que indica el tipo de red. Por ejemplo, `g` es usado para redes globales disponibles, y `test` es usado para pruebas de red. -- **Vecindario (Neighborhood)**: Siguiendo la lógica de asignación, el vecindario especifica el grupo de conectores o **_‘ledger’_** o instituciones. Por ejemplo, `sepa` puede representar las ‘ledgers’ [solamente en pagos en Euros](https://en.wikipedia.org/wiki/Single_Euro_Payments_Area) en un área o `us-fed`, puede representar la Reserva Federal de los Estados Unidos. El objetivo de los vecindarios es agrupar conectors y **_‘ledgers’_** que se conocen, o son compatibles, para que el enrutamiento sea más eficiente. -- **Identificación de Cuenta**: Esta parte identifica específicamente una cuenta dentro de el **_‘Ledger’_**, es único en cada propietario de cuenta, y se asegura que los fondos están siendo enrutados correctamente al destinatario. -- **Interacción (Opcional)**: Finalmente, la interacción codifica la lógica del negocio y varia para cada transacción, permitiendo que múltiples llamados sean identificados. - -Un ejemplo de una dirección ILP luce así: -`g.us-fed.ach.acmebank.acmecorp.~ipr.73WakrfVbNJBaAmhQtEeDv.2` - -- La `g` indica que es una red global. -- `Us-fed.ach` representa el vecindario (La Reserva Federal de los Estados Unidos dentro de una red de ACH). -- `Acmebank.acmecorp` es la identificación de la cuenta. -- `~ipr.73WakrfVbNJBaAmhQtEeDv.2` es la interacción. - -### Apuntadores de Pagos - -Los [Apuntadores de pagos](https://paymentpointers.org/) son una forma amigable de representar las **direcciones ILP**, similar a cómo las URLs presentan las direcciones IP. Esto hace que sea más fácil para el usuario manejar y compartir sus direcciones de pago. - -Un apuntador de pago siempre tiene un prefijo de señal de dólar ($) seguido de una estructura similar a la de una URL. Por ejemplo: `$wallet.com/alice` este apuntador de pago resuelve en una url`https://wallet.com/alice` que apunta a una dirección ILP. - -Un ejemplo: `test.wallet.alice` (Que no contiene la parte de interacción). - -Los apuntadores de pagos pueden ser hosteados en la raíz del dominio. En ese caso, un apuntador de pagos como `$mymarketplace.com` enmascara esta dirección: **https://marketplace.com/.well-known/pay** y dirije a una dirección ILP como: `g.wallet.mymarketplace` - -Cuando avancemos a la sección de la **_Capa de Aplicación_**, volveremos sobre este concepto de apuntado de pagos, específicamente en la sección del **_Protocolo Simple de Configuración de Pagos (SPSP)_**. Sí quieres saltar directamente a este área, puedes dar el salto hasta esa [sección específica](https://interledger.org/developers/blog/interledger-universe/#capa-de-aplicacion). - -### Capa de transporte: - El Protocolo STREAM - -La Capa de Transporte construida sobre ILP provee funcionalidades adicionales por manejar la transferencia de valor. El único protocolo soportado al momento es el Protocolo [STREAM](https://interledger.org/developers/rfcs/stream-protocol/) **_(Streaming Transport for Real-time Exchange of Assets and Messages)_**. - -![Imagen 4 - Protocolo STREAM](/developers/img/blog/2025-06-04/Protocolo-STREAM.gif) - -**STREAM** es un protocolo versátil y seguro para transportar el **Protocolo ILP**, facilitando eficientemente y de forma escalable la transmisión de dinero e información. El protocolo ofrece un rango de funcionalidades diseñadas para optimizar las transacciones basadas en ILP como: - -- **Transferencia de dinero e información**: Permitiendo simultáneamente dinero e información. -- **Segmentación y reensamblado de paquetes**: Segmentación de grandes pagos o mensajes en pequeños paquetes de información, para mejor transmisión y reensamblado final. -- **Comunicación Bidireccional**: Soporta comunicación en dos vías, facilitando el intercambio de dinero o información en ambas direcciones. -- **Multiplexidad de transmisión**: La lógica múltiple de transmisión puede ser enviada sobre una conección ILP, con IDs numéricos asignados para evitar que colapse. -- **Flujo y Control de Congestión**: Ajustar la tasa de intercambio entre monedas y transferencia de datos basado en las condiciones de la red para mantener la eficiencia. -- **Autenticación y Encriptación**: Asegura la seguridad a través de la autenticación y encriptación de los paquetes de datos. -- **Generación de condiciones y cumplimiento**: Manejar las condiciones de generacion de paquetes ILP y su cumplimiento, asegurando la integridad de la transacción. -- **Migración de Conexión**: Soporte ininterrumpido de transmisión, a pesar de cambios en la conexión. - -**STREAM** también maneja los patrones de cambios de tarifas efectivamente, e incluye un minimo aceptable de cantidad en ILP para preparar los paquetes y recibir un monto, y verificar de esta forma cumplimiento o de lo contrario, rechazar paquetes. De esta manera le permite a quien envía, fijar las cantidades y el precio en sus propias unidades, usando una calculadora para calcular la tarifa de intercambio en esa transacción, y también descartar el paquete de **_test_** que fue usado al iniciar la conexión. El protocolo se asegura que la preparación de paquetes que siguen con cantidades menores a las especificaciones, no serán tomadas en cuenta para el cumplimiento. - -**_Nota_**: Los paquetes de STREAM incluyen en el campo de datos, un paquete de ILP. - -### Capa de Aplicación - -La Capa de Aplicación es la capa final de la Arquitectura Interledger, haciendo visible para el desarrollador funcionalidades y habilitando varios posibles implementaciones. Los dos protocolos habilitados en esta capa son SPSP (protocolo de Configuración Simple de Pagos) y el Protocolo de Pagos Abiertos. - -[SPSP](https://interledger.org/developers/rfcs/simple-payment-setup-protocol/) simplifica el proceso de configuración de pagos. Cuando llega una petición **_GET_** relacionada a una URL asociada a un apuntador de pago que usa la petición de encabezamiento SPSP, SPSP define que necesita ser retornado - -```http wrap -HTP/1.1 200 OK -Content-Type: application/spsp4+json -{ - "destination_account": "example.ilpdemo.red.bob", - "shared_secret": "6jR5iNIVRvqeasJeCty6C+YB5X9FhSOUPCL/5nha5Vs=", - "Receipts_enabled": true -} -``` - -Esto incluye la `destination_account` (Cuenta de destino) que es la dirección ILP del que recibe, y comparte un `shared_secret` (Secreto) para encriptar los paquetes de STREAM, que incluye un identificador `receipts_enabled`, (recibos habilitados) indicando cuando un [recibo de STREAM](https://interledger.org/developers/rfcs/stream-receipts/) fue requerido. SPSP asegura una configuración de pago segura y directa para entidades o individuos con un acceso ILP, esto significa que entidades o individuos pueden crear, enviar, y recibir paquetes ILP directamente sin ayuda de otra entidad. - -[Pagos Abiertos](https://interledger.org/developers/rfcs/simple-payment-setup-protocol/) es un API estándar API para entidades de servicios financieros, permitiendo que terceros puedan asegurar acceso a sus cuentas digitales para ver su información de cuenta e iniciar un pago. Pagos Abiertos soporta complejos escenarios de pagos como e-commerce o pagos recurrentes, facilitando un robusto marco de trabajo para autorizar e iniciar pagos digitales. Emplea una [Negociación de Subvenciones (Grant Negotiation)](https://datatracker.ietf.org/doc/html/draft-ietf-gnap-core-protocol-20) y un Protocolo de Autorización (GNAP) para Control de acceso preciso y autorización segura. - -Para entender de forma más amplia Pagos Abiertos, puedes revisar este artículo en inglés ['Open Payments Guide'](https://interledger.org/developers/blog/simple-open-payments-guide/). Si deseas hacer una revisión de más alto nivel del Protocolo de Autorización (GNAP) y dónde está siendo usando en Pagos Abiertos, puedes revisar este [Artículo de Nathan’s (EN)](https://interledger.org/developers/blog/open-payments-cinderella-story/) - La historia de Cenicienta: cómo encontrar un método de autorización adecuado. - ---- - -## Monetización Web - -La [Monetización Web](https://webmonetization.org/) no es parte de la Arquitectura de Interledger, pero de cara al usuario es una aplicación que se sitúa en el top de la Arquitectura ILP. - -![Imagen 5 - Monetización Web](/developers/img/blog/2025-06-04/MonetizacionWeb-Infraestructura-Interledger.jpg) - -La Monetización Web es un estándar propuesto por la [W3C](https://www.w3.org/) que facilitará los pagos sin complicaciones directamente desde el navegador. Permitirá a los visitantes de un sitio web con interacciones mínimas pagar la cantidad elegida. Como un estandar propuesto, la meta con la Monetización Web es que nativamente pueda realizarse a través de los navegadores estas transacciones; Sin embargo, ningún navegador actualmente soporta esta funcionalidad. Por esto la Fundación Interledger está trabajando en una extensión de Monetización Web para habilitar esta funcionalidad inmediatamente. - -Cuando un navegador web (o en su defecto, la extensión para Monetización Web) encuentre la manera de ‘monetizar’ un sitio web, el sitio automáticamente podrá enviar una señal con su habilidad para aceptar pagos. Una vez la extensión o el navegador obtiene la autorización del Usuario de usar la Monetización Web en la fase de configuración, traerá todos los detalles del pago necesarios y las instrucciones para mover el dinero utilizando la API de Pagos Abiertos. - -El navegador luego creará una sesión de pago y comunicará el evento de pago de vuelta al sitio. En respuesta, el sitio web puede proveer beneficios para retribuir a los visitantes de su sitio, como remover anuncios o darle acceso a contenido exclusivo. - -Este acercamiento pretende crear una manera más intuitiva de integrar la experiecia de los usuarios y los creadores de contenido, promoviendo un nuevo modelo para la monetización web que sea eficiente y preserve la privacidad, y se enfoque en la experiencia del usuario. - ---- - -## Rafiki - -Rafiki fue creado como una [referencia en la implementación](https://github.com/interledger/rafiki) de la Arquitectura de Interledger. No es una billetera, no es una plataforma o servicio, es un software. - -![Imagen 6 - Monetización Web](/developers/img/blog/2025-06-04/rafiki-es.jpg) - -[Rafiki](https://rafiki.dev/) es un software de código abierto, esto significa que puede usarse de manera gratuita y abierta. El propósito de Rafiki es minimizar el esfuerzo de las organizaciones de incorporar Interledger en las cuentas de los usuarios y ser conector con la red ILP. Rafiki usa ILPoverHTTP en vez de BTP porque se asume que los paquetes serán grandes al igual que las transacciones. Por esto los pagos son divididos en pocos paquetes, haciendo que establecer una conexión tipo socket sea excesiva. - -### Rafiki.money, ‘testnet’, y ‘test network’ - -Tenemos que admitir que fuimos un poco perezosos para elegir nombres en nuestra red de pruebas para demostrar nuestra tecnología. Comenzamos creando una billetera de prueba que en ese momento, y hasta ahora, no tenía nombre pero lo alojamos en [rafiki.money](https://rafiki.money/). -En esta Simulación de una Cuenta de Servicio de Entidades, el usuario puede crear su cuenta, pasar por un flujo simulado tipo **_KYC_**, y tener la posibilidad de retener un balance de prueba y enviar o recibir pagos a través de Interledger. -La Billetera de prueba está integrada con el ambiente de prueba [Rapyd’s](https://www.rapyd.net/) para tener los balances y con Rafiki para facilitar los pagos. Sin embargo el ambiente de prueba de Rapyd’s es muy limitado de acuerdo a las restricciones de su API, entonces seguimos explorando mejores alternativas. - -Actualmente estamos: - -- En el proceso de encontrar un nombre para nuestra billetera de prueba, para que la gente no se confunda con 'Rafiki', la referencia de implementación del ILP. -- Adicionalmente estamos cambiando como luce la interfaz de la billetera para alejarnos un poco más, dándole más identidad. - -La Billetera de prueba despliega una isntancia de Rafiki, lo que significa que en ese nodo de prueba en Interledger está corriendo un conector de Interledger también. - -Estamos trabajando para tener futuras integradores de Rafiki a través de Cuentas de Licencia en el Servicio de Entidades, para conectar al menos con la billetera de prueba en vez de probar su funcionalidad y crear una red grande de pruebas. - -También usamos el término **_‘testnet’_** para describir toda las herramientas que hemos desarrollado alrededor de la billetera de pruebas. -Ejemplo: Una [Boutique](https://rafiki.boutique/products) para experimentar como se comporta en un eCommerce el sistema de Pagos Abiertos. Sin embargo, hemos decidido no seguir usando este término para reducir la confusión con la red de pruebas. - ---- - -## ¿Qué es Dassie? - -[Dassie](https://dassie.land/) es la segunda referencia de implementación de la Arquitectura ILP, pero está dirigida a usuarios de Crypto Monedas y desarrolladores lejos de Entidades de Servicio de Cuentas. No es desarrollado directamente por la Fundación Interledger, es un proyecto personal liderado por [Stefan Thomas](https://x.com/justmoon), uno de los creadores del Protocolo Interledger. - -Si bien sirve a dos mundos diferentes, un nodo de Dassie puede emparejarse con un nodo de Rafiki, por ejemplo, si el nodo de Rafiki está ejecutando un intercambio de Crypto Monedas. - ---- - -## Reflexiones Finales - -- Navegar el Universo de Interledger puede ser un poco abrumador al inicio por la cantidad de términos y conceptos para asimilar. Sin embargo en su core, Interledger busca facilitar de manera práctica, simple, eficiente y segura la forma de transferir valor a través de diversos ‘Ledgers’ y Monedas. Desde la Arquitectura Interledger hasta la Referencia de Implementación, Rafiki o Aplicaciones de uso específicas como Monetización Web. Cada componente juega un rol crucial en la realización de nuestra misión: **Una red financiera interoperable y unificada**. -- El ecosistema Interledger está diseñado para promover innovación y accesibilidad en el mundo financiero, sea habilitando pagos a través de la monetización web, simplificando el servicio de cuentas de Pagos Abiertos, o probando nuevas funcionalidades en la billetera de prueba. Entendiendo estos elementos y sus interacciones, podemos apreciar el potencial del Protocolo Interledger para revolucionar el panorama global de pagos y el intercambio de valores. -- Te invitamos a seguir refinando y expandiendo estas herramientas, contribuyendo a que la visión de Interledger sea una realidad. Nuestra misión es enviar dinero o activos a través de la red tan fácil como si fuera un correo electrónico, impulsando un ecosistema inclusivo donde la innovación construye puentes en el sistema financiero. El futuro interconectado del sistema financiero está aquí, y estamos muy emocionados del futuro que nos espera. - -**_Gracias al equipo que hace esto posible, y a los contribuidores principales de este artículo: Sarah, Radu, Melissa, Tseli, Mohammed, Max, y Chris_**. - ---- - -**_En Interledger somos de código abierto, así qué puedes verificar fácilmente nuestro trabajo en [GitHub](https://github.com/interledger/). Si este blogpost y las tecnologías aquí mencionadas te inspiraron, agradecemos tus contribuciones. Puedes unirte a nuestra [Comunidad en Slack](https://communityinviter.com/apps/interledger/interledger-working-groups-slack) o participar en la próxima llamada de la [Comunidad Interledger](https://community.interledger.org/), que tiene lugar el segundo miércoles de cada mes. -Si deseas mantenerte actualizado con todas las oportunidades y noticias de la Fundación Interledger, puedes suscribirte a nuestro boletín 🤓💪_**. - -**_🇺🇸🇬🇧[English article](https://interledger.org/developers/blog/interledger-universe/) Written by [Sabine Schaller](https://www.linkedin.com/in/sabineschaller)_** diff --git a/src/content/blog/2025-09-09-memorable-wallet-addresses-custom-domain.md b/src/content/blog/2025-09-09-memorable-wallet-addresses-custom-domain.md deleted file mode 100644 index 61b40899..00000000 --- a/src/content/blog/2025-09-09-memorable-wallet-addresses-custom-domain.md +++ /dev/null @@ -1,246 +0,0 @@ ---- -title: 'Memorable wallet addresses on your own domain' -description: 'If you own a domain, you can set your Open Payments wallet address to be the same as your domain!' -date: 2025-09-09 -slug: memorable-wallet-addresses-custom-domain -authors: - - Sid Vishnoi -author_urls: - - https://sidvishnoi.com?ref=ilf_engg_blog -tags: - - Open Payments - - Web Monetization ---- - -Wallet addresses are meant to be easy to remember or identify, unless your wallet provider chooses them for you. The address might include a long subdomain or even a random series of numbers and characters. But did you know that if you own a domain, you can set your wallet address to be the same as your domain? - -So, instead of `https://ilp.wallet.example/12345432/usd`, you can have `$mywebsite.com` as your wallet address! Technically, wallet addresses with the `$` are referred to as [payment pointers](https://paymentpointers.org/) and those starting with `https://` are the actual Open Payment wallet addresses. However, we'll use both terms interchangeably in this article. - -![Illustration showing turning long wallet addresses to custom domain payment pointers](/developers/img/blog/2025-09-09/memorable-wallet-addresses-on-own-domain.png) - -You can change the underlying wallet without having to share a new address. Not to forget the advantages of branding and the trust that comes with it, as people will know that this is your wallet attached to your online identity - your domain. The [Web Monetization extension](https://webmonetization.org/supporters/get-started/) also supports these custom wallet addresses. - -I personally use `$sidvishnoi.com` (which maps to my [GateHub wallet](https://gatehub.net/)). Feel free to send me money now that you remember the address! - -Alright, so how do we get that address? - -## Set up custom payment pointer domain - -Having a domain is a must-have for this to work. If you don't own one, you can use a subdomain provided by your web hosting provider, but your own domain is better. - -I'll share a few approaches in this article, and later explain how these custom addresses relate to Web Monetization. The essential part in each approach is to have `https://{yourdomain.com}/.well-known/pay` to either redirect or rewrite to the wallet address you want to alias. - -### Configure in web host - -If your web hosting provider allows custom URL redirects or rewrites, you can use this approach. It's the easiest to maintain and set up, requiring no coding. - -#### Cloudflare - -For instance, let's consider Cloudflare. They have a concept of "rules" that execute specific actions when a request (or URL) satisfies certain conditions. For simplicity, we will utilize what they refer to as a "page rule". - -1. Access your website in the Cloudflare dashboard. -2. Go to the Rules section in the sidebar and create a new Page rule. -3. In the URL field, enter `{yourdomain.com}/.well-known/pay*`. -4. In settings, select "Forwarding URL" with a 302 - Temporary Redirect. -5. In the Destination URL field, write the wallet address your wallet provider gave and type `$1` at the end.\ - E.g., if your wallet address is `https://ilp.wallet.com/abc/zyz`, enter `https://ilp.wallet.com/abc/xyz$1`.\ - The `$1` gets replaced by whatever content was there in place of `*`: `/.well-known/pay/jwks.json` will become `/abc/xyz/jwks.json`; `/.well-known/pay/` will become `/abc/xyz/`. -6. Click Save Page Rule, and you're ready. - -![Screenshot showing Cloudflare dashboard with the Page rule editor showing setup of my domain using a redirect to my GateHub wallet](/developers/img/blog/2025-09-09/cloudflare-page-rule.png) - -There's a small catch, though. If there are tools that retrieve your payment pointer contents on the client side (for instance, [Web Monetization Publisher tools](https://webmonetization.org/tools/)), you'll need to ensure that your wallet address is set up to enable cross-origin requests, otherwise those tools may fail with a CORS error. No worries, we can easily add a new rule that lets any website access our wallet address directly from the client side. - -1. In the Rules section of Cloudflare dashboard, create a new Transform rule. -2. Use anything in the Rule name, say, "Enable CORS for wallet address" -3. Choose a custom filter expression for the incoming request:\ - Field: URI Path\ - Operator: wildcard\ - Value: `/.well-known/pay*` -4. Then, modify the response header:\ - Header name: `Access-Control-Allow-Origin`\ - Value: `*` -5. Save. Now, this will enable CORS for your wallet address on any website. - -![Screenshot of Cloudflare dashboard with a transform rule to support CORS for our wallet address](/developers/img/blog/2025-09-09/cloudflare-transform-rule.png) - -#### NGINX - -With a server using NGINX, the configuration is straightforward: - -```nginx -# nginx.conf -location ~ /.well-known/pay(.*) { - add_header Access-Control-Allow-Origin *; # enable cors - return 302 https://ilp.wallet.com/abc/xyz$1; # do a 302 redirect to original wallet address -} -``` - -You get the idea. - -### `_redirects` file - -Some providers, especially those that host static websites, support a [`_redirects` file](https://docs.netlify.com/manage/routing/redirects/overview/) where you can create a mapping of URL redirects. There's also a companion [`_headers` file](https://docs.netlify.com/manage/routing/headers/) in some providers, which is particularly practical for enabling CORS support. - -Static sites hosted on platforms like Netlify and Cloudflare can utilize this approach. It's important to ensure that the `_redirects` and `_headers` files are located in the top-level of your build directory. Depending on your static site generator, you may need to place these files in the "public" folder of your site's source code. - -```ini -# _redirects -/.well-known/pay https://ilp.wallet.com/abc/xyz 302 -/.well-known/pay/jwks.json https://ilp.wallet.com/abc/xyz/jwks.json 302 -``` - -```ini -# _headers -/.well-known/pay* - Access-Control-Allow-Origin: * -``` - -Other providers like [Surge](https://surge.sh/help/adding-redirects) and [Vercel](https://vercel.com/docs/redirects) may have their own syntax for the redirects file. - -### Dynamic rewrite/redirect from website - -If you have complete control over your website's routes, you'll get the best results. Instead of redirecting, you can use rewrites. This way, the people directly visiting your wallet address won't see the URL in the address bar change to your original wallet address. I'll share a few examples. - -#### Node.js / Express - -```javascript -// app.js -const WALLET_ADDRESS = 'https://ilp.wallet.com/abc/xyz'; - -app.get('/.well-known/pay', (req, res) => { - const json = await fetch(WALLET_ADDRESS).then(r => r.json()) - res.json(json) -}) -app.options('/.well-known/pay', (req, res) => { - res.set('Access-Control-Allow-Origin', '*') -}) - -app.get('/.well-known/pay/jwks.json', (req, res) => { - res.redirect(302, `${WALLET_ADDRESS}/jwks.json`) -}); -``` - -#### Cloudflare Workers - -Even if you don't host your entire website on Cloudflare, you can create a worker there (using your domain or subdomain), just for the custom wallet address. - -```javascript -// index.js -const WALLET_ADDRESS = 'https://ilp.wallet.com/abc/xyz' - -export default { - async fetch(request) { - const url = new URL(req.url) - if (url.pathname === '/.well-known/pay') { - const json = await fetch(WALLET_ADDRESS).then((r) => r.json()) - return Response.json(json) - } - // ... handle other requests, or by default, return error - // return new Response('not found', { status: 404 }) - } -} -``` - -#### Wordpress - -While you can write [some PHP code](https://learn.wordpress.org/tutorial/wordpress-rest-api-custom-routes-endpoints/) for this functionality, I would recommend using an existing plugin to manage redirects and rewrites. The [redirection](https://wordpress.org/plugins/redirection/) plugin can be a good option. Their docs should guide you well. - -### Static website - -If you only have a static website and are stuck with a hosting provider that doesn't allow customizations like mentioned above (e.g., GitHub Pages), you can still have a custom payment pointer, but there might be some limitations. - -You cannot have `$mywebsite.com`, but `$mywebsite.com/pay.json` may work. It might be better or worse than what your wallet provider gave you. - -In case you're wondering, no, the [`http-equiv` HTML meta tag](https://stackoverflow.com/a/5411567) based redirect won't work. The redirects have to be at HTTP-level. - -1. Grab the JSON response for your original wallet address.\ - You can use online services like [https://jsonviewer.stack.hu](https://jsonviewer.stack.hu) or [https://hoppscotch.io](https://hoppscotch.io) to view the JSON response, or use `curl` if you're into those things.\ - Opening the wallet address URL directly in your browser may not show you the JSON response, as some wallets use it as a landing page for others to send you money. -2. Create a `pay.json` file (or use any other name, it just needs to have the `.json` extension), and paste in the above JSON. For example: - - ```shell - # for example, with curl - $ curl -sSL -H 'Accept: application/json' 'https://ilp.gatehub.net/981946513/eur' > pay.json - - $ cat pay.json - # {"id":"https://ilp.gatehub.net/981946513/eur","publicName":"981946513","assetCode":"EUR","assetScale":2,"authServer":"https://rafiki.gatehub.net","resourceServer":"https://ilp.gatehub.net"} - ``` - -3. Deploy your static site. - -Note that, given the lack of control over headers, you may face CORS issues as explained above, but it'll work with most other Open Payment uses. - -More importantly, you will have to ensure the content of your `pay.json` matches the JSON response that was received from your original wallet address, in case the response changes in future. - -## How does it work with Web Monetization - -### The `` element - -A payment pointer, such as `$mywebsite.com`, is convenient for many purposes. However, when you want to add your wallet address to your website as a Web Monetization receiver, you need to convert that payment pointer to the Open Payments "wallet address" format for use with the `` tag ([why?](https://github.com/WICG/webmonetization/issues/19#issuecomment-705407129)). Slightly disappointing, but hey, you only need to do this once. - -You can use the [link tag generator](https://webmonetization.org/tools/link-tag/) to convert the payment pointer to a valid link tag. - -If you're using the same domain as your website, you can use the link element's URL resolution by writing the `` tag like following: - -```html - -``` - -Aside: During local development (i.e. with localhost or custom dev domain), if you have the Web Monetization browser extension installed and are using a CDN or host-level configuration, the extension won't resolve to your actual wallet address. No real money will be sent during regular website development, which can be very handy. And when you want to actually test Web Monetization integrations, you can resolve the URL to a different [test wallet](https://wallet.interledger-test.dev/) address. - -### Usage in extension - -The Web Monetization browser extension allows you to use any payment pointer or wallet address to connect your wallet as a sender. This means using a custom, branded payment pointer works the same as using your wallet's address directly. So, feel free to use your branded payment pointer there as well! - -Once you've connected using your custom payment pointer, the extension will show you both the custom payment pointer and the corresponding original wallet address on the Settings screen. - -Aside: The Web Monetization API doesn't inform websites which wallet address was used by the sender, so websites cannot directly correlate the payments to your identity (your domain name). The website's wallet may display information about the sender, but the wallet itself will only know of the original Open Payments wallet address, not your custom alias. - -### `MonetizationEvent` - -When we add such an aliased wallet address to our website, there's an indirection involved. This sort of indirection can also arise from using the [probabilistic revenue sharing generator](https://webmonetization.org/tools/prob-revshare/). The wallet itself can provide a wallet address alias, for example, as it's shorter or promotes the brand better. - -When a `monetization` event is emitted, how do we know what wallet address was actually paid? And how do we know what wallet address we originally provided? This is even more relevant if your webpage includes multiple monetization link tags. - -Thankfully, the [`MonetizationEvent`](https://webmonetization.org/specification/#dom-monetizationevent) includes both these details: - -```webidl -interface MonetizationEvent : Event { - readonly attribute MonetizationCurrencyAmount amountSent; - readonly attribute USVString paymentPointer; - readonly attribute USVString? incomingPayment; - // other details not relevant in this context -} -``` - -Here, `paymentPointer` is the resolved wallet address that was paid. For example, with a custom domain payment pointer - like I've mapped `$sidvishnoi.com` to resolve to my GateHub wallet address (`https://ilp.gatehub.net/981946513/eur`) above - the `paymentPointer` will resolve to my GateHub address. When using a wallet address from the probabilistic revenue sharing tool, it'll correspond to the wallet address that was chosen randomly. - -And how do we get the original wallet address? - -If you look closely, the `MonetizationEvent` inherits the `Event` interface. So, everything that belongs to `Event` is also part of `MonetizationEvent`. The part we're looking for here is the event's `target` attribute. This target corresponds to the `` element where you added your wallet address. The link element has the `href` attribute, which corresponds to the original wallet address we provided on our page. We can get the original wallet address as follows: - -```html - - - - - -``` - -```ts -window.addEventListener('monetization', (event) => { - const linkElement: HTMLLinkElement = event.target - const originalWalletAddress = linkElement.href - // -> https://sidvishnoi.com/.well-known/pay - - const walletAddressThatGotPaid = event.paymentPointer - // -> https://ilp.gatehub.net/981946513/eur -}) -``` - -Depending on the use case, you may care about either or both of the wallet addresses. I hope it was helpful for you to know how to get each. - -## Closing words - -I'm excited to see your wallet addresses on your own domains! It would be wonderful if you could share your solutions for different hosting providers and servers with our [community](https://community.interledger.org/), or even better, on your own blog. Looking forward to your contributions! diff --git a/src/content/blog/2025-09-30-wallet-address-smart-redirect.mdx b/src/content/blog/2025-09-30-wallet-address-smart-redirect.mdx deleted file mode 100644 index 25987180..00000000 --- a/src/content/blog/2025-09-30-wallet-address-smart-redirect.mdx +++ /dev/null @@ -1,111 +0,0 @@ ---- -title: 'Wallet Address Smart Redirect' -description: 'One Wallet Address, Two Experiences: Introducing Wallet Address Smart Redirect in Rafiki' -date: 2025-09-30 -slug: wallet-address-smart-redirect -authors: - - Cozmin Ungureanu -author_urls: - - https://www.linkedin.com/in/nodejs-dev/ -tags: - - Interledger - - Rafiki ---- - -When you think of a **wallet address**, you probably think of a string of characters (or in Open Payments's case, a neat URL) that acts as a source or destination for payments. For example: - -`https://ilp.interledger.cards/007` - -Traditionally, when an Open Payments client queries that address, it receives a structured JSON: - -```json -{ - "id": "https://ilp.interledger.cards/007", - "publicName": "John Doe", - "assetCode": "EUR", - "assetScale": 2, - "authServer": "https://auth.interledger.cards", - "resourceServer": "https://ilp.interledger.cards" -} -``` - -This is perfect for developers and systems that need machine-readable data. But what about people? - -If someone clicks that same wallet address in a browser, JSON isn't exactly a friendly experience. That's where our new **Wallet Address Smart Redirect** comes in. - ---- - -## What's new? - -As of v1.1.2-beta, enabling **Smart Redirect**, Rafiki detects when a request is coming from a browser (using the `Accept: text/html` header). Instead of showing raw JSON, the request automatically redirects to a user-friendly payment page defined in your environment settings. - -For example, when opened in a browser, `https://ilp.interledger.cards/007` redirects to: -`https://interledgerpay.com/payment-choice?receiver=https://ilp.interledger.cards/007` - -![Interledger Payment Page](/developers/img/blog/2025-09-30/ilp-payment-page.png) - -The result: **a single wallet address that works seamlessly for both developers and end-users.** - ---- - -## Example in Action - -Let's say **Alice wants to send money to Bob**. - -1. Bob shares his wallet address: `https://ilp.interledger.cards/007` -2. Alice pastes that link into her browser. -3. Instead of confusing JSON, Alice is redirected straight to Bob's payment page (configured by the wallet operator). -4. Alice chooses her payment method and completes the transfer. - -Meanwhile, behind the scenes: - -- If Alice's wallet operator queried Bob's wallet address directly, it would still get the standard JSON response. -- One address, two different but perfectly matched experiences. - ---- - -## Why it matters - -- **One address, two roles**: The same URL can serve structured data for APIs _and_ a human-friendly payment experience for browsers. -- **Frictionless payments**: Users who aren't developers don't have to stare at JSON—they land on a simple payment page. -- **Configurable**: You control where browser requests are redirected by setting the URL in your environment variable. - ---- - -## Configuration - -Enable **Wallet Address Smart Redirect** by setting a single environment variable: - -```bash -WALLET_ADDRESS_REDIRECT_HTML_PAGE="https://interledgerpay.com/payment-choice?receiver=%ewa" -``` - -### Header behavior - -- If the request has `Accept` header with `text/html`, Rafiki **redirects** to the URL from `WALLET_ADDRESS_REDIRECT_HTML_PAGE` (after substituting tokens below). -- Otherwise, Rafiki returns the standard **JSON** response (no redirect). - -### URL tokens (drop-in placeholders) - -You can compose the redirect URL using these tokens: - -| Token | What it inserts | Example redirect template | Example resolved URL (for `http://ilp.dev/007`) | -| ------ | ------------------------------------ | -------------------------------- | ------------------------------------------------------ | -| `%wa` | Full wallet address including scheme | `http://rafiki.dev/%wa` | `http://rafiki.dev/http://ilp.dev/007` | -| `%ewa` | URI-encoded full wallet address | `http://rafiki.dev/?wallet=%ewa` | `http://rafiki.dev/?wallet=http%3A%2F%2Filp.dev%2F007` | -| `%wp` | Host + path (no scheme) | `http://rafiki.dev/%wp` | `http://rafiki.dev/ilp.dev/007` | -| `%ewp` | URI-encoded host + path (no scheme) | `http://rafiki.dev/?wallet=%ewp` | `http://rafiki.dev/?wallet=ilp.dev%2F007` | - ---- - -## How to use it - -1. Set `WALLET_ADDRESS_REDIRECT_HTML_PAGE` in your `backend` environment variables. -2. Choose the right token for your redirect style. -3. That's it, your wallet addresses now double as developer endpoints _and_ payment links. - ---- - -## Looking ahead - -**Wallet Address Smart Redirect** is another step toward making Rafiki wallets more accessible, discoverable, and user-friendly. Whether you're a developer integrating APIs or a user just trying to send money, one address now does it all. diff --git a/src/content/blog/2025-12-03-stephan-helping-on-setting-things-up.mdx b/src/content/blog/2025-12-03-stephan-helping-on-setting-things-up.mdx new file mode 100644 index 00000000..c943d1bc --- /dev/null +++ b/src/content/blog/2025-12-03-stephan-helping-on-setting-things-up.mdx @@ -0,0 +1,8 @@ +--- +title: "Stephan blog title" +description: "happening now" +date: 2025-12-03 +slug: stephan-helping-on-setting-things-up +--- + +body here diff --git a/src/content/blog/2025-12-07-new-expo-23.mdx b/src/content/blog/2025-12-07-new-expo-23.mdx new file mode 100644 index 00000000..7f232929 --- /dev/null +++ b/src/content/blog/2025-12-07-new-expo-23.mdx @@ -0,0 +1,17 @@ +--- +title: "New expo 23" +description: "what happened in 2025" +date: 2025-12-07 +slug: new-expo-23 +image: "http://localhost:1337/uploads/10257219_10153391503200616_5726895117654316664_o_ee8f6faf8c.webp" +--- + +On November 8 & 9, 2025, more than 170 developers, students, designers, and entrepreneurs gathered at InSpark in Mexico City for the Interledger Hackathon. Over 24 hours, 47 teams worked side by side to turn the promise of interoperable payments into working prototypes. + +The challenge was clear: how can open-source tools like the [Interledger Protocol (ILP)](https://interledger.org/interledger) and [Open Payments API](https://openpayments.dev/overview/getting-started/) help reduce cash dependency, lower remittance costs, and improve digital acceptance for small businesses? + +## Celebrating the winners + +### Los VibeCoders - _VibePayments_ + +First place went to Los VibeCoders with _VibePayments_, a cloud-based SaaS platform designed to tackle the barriers of currency exchange, high bank fees, and slow processing times that affect international tourism and global supply chains. diff --git a/src/content/events/annual-events.mdx b/src/content/events/annual-events.mdx new file mode 100644 index 00000000..1bc18792 --- /dev/null +++ b/src/content/events/annual-events.mdx @@ -0,0 +1,16 @@ +--- +title: "Annual Events" +order: 0 +--- + +### Interledger Summit + +Join us for our annual gathering where we bring together developers, financial institutions, and innovators to explore the future of interoperable payments. + +### Hackathon + +A hands-on exploration opportunity where teams develop practical solutions for interoperable payments, tackling challenges like global remittances, device-less payments, and financial inclusion. + +### Hacktoberfest + +Every October, we participate in Hacktoberfest, an initiative for open source contributions. Join our community in making meaningful contributions to the Interledger ecosystem. diff --git a/src/content/events/community-call.mdx b/src/content/events/community-call.mdx new file mode 100644 index 00000000..535266d6 --- /dev/null +++ b/src/content/events/community-call.mdx @@ -0,0 +1,13 @@ +--- +title: "Community Call" +order: 1 +--- + +Join us for our monthly community gathering where we share project updates, discuss new developments, and engage with the Interledger community. + +**Frequency:** Monthly, second Wednesday +**Time:** 10:00 AM CDT / 3:00 PM UTC +**Platform:** Google Meet +**Access code:** `enb-ayiv-rnw` + +Come share your projects, ask questions, and connect with other members of the Interledger community working on making payments as easy as sending an email. diff --git a/src/content/events/easter-hackathon.mdx b/src/content/events/easter-hackathon.mdx new file mode 100644 index 00000000..df51422a --- /dev/null +++ b/src/content/events/easter-hackathon.mdx @@ -0,0 +1,6 @@ +--- +title: "Easter hackathon" +order: 0 +--- + +Japan has numerous annual events, including national holidays like New Year (Jan 1), major festivals such as Obon (August) and cherry blossom viewing (Hana diff --git a/src/content/events/news-event.mdx b/src/content/events/news-event.mdx new file mode 100644 index 00000000..2aa7f82b --- /dev/null +++ b/src/content/events/news-event.mdx @@ -0,0 +1,6 @@ +--- +title: "Christmas time1" +order: 0 +--- + +Japan has numerous annual events, including national holidays like New Year (Jan 1), major festivals such as Obon (August) and cherry blossom viewing (Hanami, March-April), seasonal celebrations like Tanabata (July), and children's events like Shichi-Go-San (Nov). Major seasonal festivals are also key, such as autumn leaves viewing (Nov-Dec) and winter illumination diff --git a/src/content/financial-services/.gitkeep b/src/content/financial-services/.gitkeep new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/src/content/financial-services/.gitkeep @@ -0,0 +1 @@ + diff --git a/src/content/grant-tracks/.gitkeep b/src/content/grant-tracks/.gitkeep new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/src/content/grant-tracks/.gitkeep @@ -0,0 +1 @@ + diff --git a/src/content/press/.gitkeep b/src/content/press/.gitkeep new file mode 100644 index 00000000..0f8d22ab --- /dev/null +++ b/src/content/press/.gitkeep @@ -0,0 +1 @@ +# Press content generated by Strapi CMS will be stored here diff --git a/src/layouts/BaseLayout.astro b/src/layouts/BaseLayout.astro index ba50c38b..ecb09542 100644 --- a/src/layouts/BaseLayout.astro +++ b/src/layouts/BaseLayout.astro @@ -49,7 +49,7 @@ const { property="og:image" content={ogImageUrl ? ogImageUrl - : new URL('/developers/img/og-image.png', Astro.site).href} + : new URL('/img/og-image.png', Astro.site).href} /> - +