diff --git a/.agents/skills/supabase-postgres-best-practices/AGENTS.md b/.agents/skills/supabase-postgres-best-practices/AGENTS.md new file mode 100644 index 00000000..a7baf445 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/AGENTS.md @@ -0,0 +1,68 @@ +# Supabase Postgres Best Practices + +## Structure + +``` +supabase-postgres-best-practices/ + SKILL.md # Main skill file - read this first + AGENTS.md # This navigation guide + CLAUDE.md # Symlink to AGENTS.md + references/ # Detailed reference files +``` + +## Usage + +1. Read `SKILL.md` for the main skill instructions +2. Browse `references/` for detailed documentation on specific topics +3. Reference files are loaded on-demand - read only what you need + +Comprehensive performance optimization guide for Postgres, maintained by Supabase. Contains rules across 8 categories, prioritized by impact to guide automated query optimization and schema design. + +## When to Apply + +Reference these guidelines when: +- Writing SQL queries or designing schemas +- Implementing indexes or query optimization +- Reviewing database performance issues +- Configuring connection pooling or scaling +- Optimizing for Postgres-specific features +- Working with Row-Level Security (RLS) + +## Rule Categories by Priority + +| Priority | Category | Impact | Prefix | +|----------|----------|--------|--------| +| 1 | Query Performance | CRITICAL | `query-` | +| 2 | Connection Management | CRITICAL | `conn-` | +| 3 | Security & RLS | CRITICAL | `security-` | +| 4 | Schema Design | HIGH | `schema-` | +| 5 | Concurrency & Locking | MEDIUM-HIGH | `lock-` | +| 6 | Data Access Patterns | MEDIUM | `data-` | +| 7 | Monitoring & Diagnostics | LOW-MEDIUM | `monitor-` | +| 8 | Advanced Features | LOW | `advanced-` | + +## How to Use + +Read individual rule files for detailed explanations and SQL examples: + +``` +references/query-missing-indexes.md +references/schema-partial-indexes.md +references/_sections.md +``` + +Each rule file contains: +- Brief explanation of why it matters +- Incorrect SQL example with explanation +- Correct SQL example with explanation +- Optional EXPLAIN output or metrics +- Additional context and references +- Supabase-specific notes (when applicable) + +## References + +- https://www.postgresql.org/docs/current/ +- https://supabase.com/docs +- https://wiki.postgresql.org/wiki/Performance_Optimization +- https://supabase.com/docs/guides/database/overview +- https://supabase.com/docs/guides/auth/row-level-security diff --git a/.agents/skills/supabase-postgres-best-practices/CLAUDE.md b/.agents/skills/supabase-postgres-best-practices/CLAUDE.md new file mode 100644 index 00000000..a7baf445 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/CLAUDE.md @@ -0,0 +1,68 @@ +# Supabase Postgres Best Practices + +## Structure + +``` +supabase-postgres-best-practices/ + SKILL.md # Main skill file - read this first + AGENTS.md # This navigation guide + CLAUDE.md # Symlink to AGENTS.md + references/ # Detailed reference files +``` + +## Usage + +1. Read `SKILL.md` for the main skill instructions +2. Browse `references/` for detailed documentation on specific topics +3. Reference files are loaded on-demand - read only what you need + +Comprehensive performance optimization guide for Postgres, maintained by Supabase. Contains rules across 8 categories, prioritized by impact to guide automated query optimization and schema design. + +## When to Apply + +Reference these guidelines when: +- Writing SQL queries or designing schemas +- Implementing indexes or query optimization +- Reviewing database performance issues +- Configuring connection pooling or scaling +- Optimizing for Postgres-specific features +- Working with Row-Level Security (RLS) + +## Rule Categories by Priority + +| Priority | Category | Impact | Prefix | +|----------|----------|--------|--------| +| 1 | Query Performance | CRITICAL | `query-` | +| 2 | Connection Management | CRITICAL | `conn-` | +| 3 | Security & RLS | CRITICAL | `security-` | +| 4 | Schema Design | HIGH | `schema-` | +| 5 | Concurrency & Locking | MEDIUM-HIGH | `lock-` | +| 6 | Data Access Patterns | MEDIUM | `data-` | +| 7 | Monitoring & Diagnostics | LOW-MEDIUM | `monitor-` | +| 8 | Advanced Features | LOW | `advanced-` | + +## How to Use + +Read individual rule files for detailed explanations and SQL examples: + +``` +references/query-missing-indexes.md +references/schema-partial-indexes.md +references/_sections.md +``` + +Each rule file contains: +- Brief explanation of why it matters +- Incorrect SQL example with explanation +- Correct SQL example with explanation +- Optional EXPLAIN output or metrics +- Additional context and references +- Supabase-specific notes (when applicable) + +## References + +- https://www.postgresql.org/docs/current/ +- https://supabase.com/docs +- https://wiki.postgresql.org/wiki/Performance_Optimization +- https://supabase.com/docs/guides/database/overview +- https://supabase.com/docs/guides/auth/row-level-security diff --git a/.agents/skills/supabase-postgres-best-practices/README.md b/.agents/skills/supabase-postgres-best-practices/README.md new file mode 100644 index 00000000..f1a374e1 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/README.md @@ -0,0 +1,116 @@ +# Supabase Postgres Best Practices - Contributor Guide + +This skill contains Postgres performance optimization references optimized for +AI agents and LLMs. It follows the [Agent Skills Open Standard](https://agentskills.io/). + +## Quick Start + +```bash +# From repository root +npm install + +# Validate existing references +npm run validate + +# Build AGENTS.md +npm run build +``` + +## Creating a New Reference + +1. **Choose a section prefix** based on the category: + - `query-` Query Performance (CRITICAL) + - `conn-` Connection Management (CRITICAL) + - `security-` Security & RLS (CRITICAL) + - `schema-` Schema Design (HIGH) + - `lock-` Concurrency & Locking (MEDIUM-HIGH) + - `data-` Data Access Patterns (MEDIUM) + - `monitor-` Monitoring & Diagnostics (LOW-MEDIUM) + - `advanced-` Advanced Features (LOW) + +2. **Copy the template**: + ```bash + cp references/_template.md references/query-your-reference-name.md + ``` + +3. **Fill in the content** following the template structure + +4. **Validate and build**: + ```bash + npm run validate + npm run build + ``` + +5. **Review** the generated `AGENTS.md` + +## Skill Structure + +``` +skills/supabase-postgres-best-practices/ +├── SKILL.md # Agent-facing skill manifest (Agent Skills spec) +├── AGENTS.md # [GENERATED] Compiled references document +├── README.md # This file +└── references/ + ├── _template.md # Reference template + ├── _sections.md # Section definitions + ├── _contributing.md # Writing guidelines + └── *.md # Individual references + +packages/skills-build/ +├── src/ # Generic build system source +└── package.json # NPM scripts +``` + +## Reference File Structure + +See `references/_template.md` for the complete template. Key elements: + +````markdown +--- +title: Clear, Action-Oriented Title +impact: CRITICAL|HIGH|MEDIUM-HIGH|MEDIUM|LOW-MEDIUM|LOW +impactDescription: Quantified benefit (e.g., "10-100x faster") +tags: relevant, keywords +--- + +## [Title] + +[1-2 sentence explanation] + +**Incorrect (description):** + +```sql +-- Comment explaining what's wrong +[Bad SQL example] +``` +```` + +**Correct (description):** + +```sql +-- Comment explaining why this is better +[Good SQL example] +``` + +``` +## Writing Guidelines + +See `references/_contributing.md` for detailed guidelines. Key principles: + +1. **Show concrete transformations** - "Change X to Y", not abstract advice +2. **Error-first structure** - Show the problem before the solution +3. **Quantify impact** - Include specific metrics (10x faster, 50% smaller) +4. **Self-contained examples** - Complete, runnable SQL +5. **Semantic naming** - Use meaningful names (users, email), not (table1, col1) + +## Impact Levels + +| Level | Improvement | Examples | +|-------|-------------|----------| +| CRITICAL | 10-100x | Missing indexes, connection exhaustion | +| HIGH | 5-20x | Wrong index types, poor partitioning | +| MEDIUM-HIGH | 2-5x | N+1 queries, RLS optimization | +| MEDIUM | 1.5-3x | Redundant indexes, stale statistics | +| LOW-MEDIUM | 1.2-2x | VACUUM tuning, config tweaks | +| LOW | Incremental | Advanced patterns, edge cases | +``` diff --git a/.agents/skills/supabase-postgres-best-practices/SKILL.md b/.agents/skills/supabase-postgres-best-practices/SKILL.md new file mode 100644 index 00000000..f80be156 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/SKILL.md @@ -0,0 +1,64 @@ +--- +name: supabase-postgres-best-practices +description: Postgres performance optimization and best practices from Supabase. Use this skill when writing, reviewing, or optimizing Postgres queries, schema designs, or database configurations. +license: MIT +metadata: + author: supabase + version: "1.1.0" + organization: Supabase + date: January 2026 + abstract: Comprehensive Postgres performance optimization guide for developers using Supabase and Postgres. Contains performance rules across 8 categories, prioritized by impact from critical (query performance, connection management) to incremental (advanced features). Each rule includes detailed explanations, incorrect vs. correct SQL examples, query plan analysis, and specific performance metrics to guide automated optimization and code generation. +--- + +# Supabase Postgres Best Practices + +Comprehensive performance optimization guide for Postgres, maintained by Supabase. Contains rules across 8 categories, prioritized by impact to guide automated query optimization and schema design. + +## When to Apply + +Reference these guidelines when: +- Writing SQL queries or designing schemas +- Implementing indexes or query optimization +- Reviewing database performance issues +- Configuring connection pooling or scaling +- Optimizing for Postgres-specific features +- Working with Row-Level Security (RLS) + +## Rule Categories by Priority + +| Priority | Category | Impact | Prefix | +|----------|----------|--------|--------| +| 1 | Query Performance | CRITICAL | `query-` | +| 2 | Connection Management | CRITICAL | `conn-` | +| 3 | Security & RLS | CRITICAL | `security-` | +| 4 | Schema Design | HIGH | `schema-` | +| 5 | Concurrency & Locking | MEDIUM-HIGH | `lock-` | +| 6 | Data Access Patterns | MEDIUM | `data-` | +| 7 | Monitoring & Diagnostics | LOW-MEDIUM | `monitor-` | +| 8 | Advanced Features | LOW | `advanced-` | + +## How to Use + +Read individual rule files for detailed explanations and SQL examples: + +``` +references/query-missing-indexes.md +references/schema-partial-indexes.md +references/_sections.md +``` + +Each rule file contains: +- Brief explanation of why it matters +- Incorrect SQL example with explanation +- Correct SQL example with explanation +- Optional EXPLAIN output or metrics +- Additional context and references +- Supabase-specific notes (when applicable) + +## References + +- https://www.postgresql.org/docs/current/ +- https://supabase.com/docs +- https://wiki.postgresql.org/wiki/Performance_Optimization +- https://supabase.com/docs/guides/database/overview +- https://supabase.com/docs/guides/auth/row-level-security diff --git a/.agents/skills/supabase-postgres-best-practices/references/_contributing.md b/.agents/skills/supabase-postgres-best-practices/references/_contributing.md new file mode 100644 index 00000000..10de8ecb --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/_contributing.md @@ -0,0 +1,171 @@ +# Writing Guidelines for Postgres References + +This document provides guidelines for creating effective Postgres best +practice references that work well with AI agents and LLMs. + +## Key Principles + +### 1. Concrete Transformation Patterns + +Show exact SQL rewrites. Avoid philosophical advice. + +**Good:** "Use `WHERE id = ANY(ARRAY[...])` instead of +`WHERE id IN (SELECT ...)`" **Bad:** "Design good schemas" + +### 2. Error-First Structure + +Always show the problematic pattern first, then the solution. This trains agents +to recognize anti-patterns. + +```markdown +**Incorrect (sequential queries):** [bad example] + +**Correct (batched query):** [good example] +``` + +### 3. Quantified Impact + +Include specific metrics. Helps agents prioritize fixes. + +**Good:** "10x faster queries", "50% smaller index", "Eliminates N+1" +**Bad:** "Faster", "Better", "More efficient" + +### 4. Self-Contained Examples + +Examples should be complete and runnable (or close to it). Include `CREATE TABLE` +if context is needed. + +```sql +-- Include table definition when needed for clarity +CREATE TABLE users ( + id bigint PRIMARY KEY, + email text NOT NULL, + deleted_at timestamptz +); + +-- Now show the index +CREATE INDEX users_active_email_idx ON users(email) WHERE deleted_at IS NULL; +``` + +### 5. Semantic Naming + +Use meaningful table/column names. Names carry intent for LLMs. + +**Good:** `users`, `email`, `created_at`, `is_active` +**Bad:** `table1`, `col1`, `field`, `flag` + +--- + +## Code Example Standards + +### SQL Formatting + +```sql +-- Use lowercase keywords, clear formatting +CREATE INDEX CONCURRENTLY users_email_idx + ON users(email) + WHERE deleted_at IS NULL; + +-- Not cramped or ALL CAPS +CREATE INDEX CONCURRENTLY USERS_EMAIL_IDX ON USERS(EMAIL) WHERE DELETED_AT IS NULL; +``` + +### Comments + +- Explain _why_, not _what_ +- Highlight performance implications +- Point out common pitfalls + +### Language Tags + +- `sql` - Standard SQL queries +- `plpgsql` - Stored procedures/functions +- `typescript` - Application code (when needed) +- `python` - Application code (when needed) + +--- + +## When to Include Application Code + +**Default: SQL Only** + +Most references should focus on pure SQL patterns. This keeps examples portable. + +**Include Application Code When:** + +- Connection pooling configuration +- Transaction management in application context +- ORM anti-patterns (N+1 in Prisma/TypeORM) +- Prepared statement usage + +**Format for Mixed Examples:** + +````markdown +**Incorrect (N+1 in application):** + +```typescript +for (const user of users) { + const posts = await db.query("SELECT * FROM posts WHERE user_id = $1", [ + user.id, + ]); +} +``` +```` + +**Correct (batch query):** + +```typescript +const posts = await db.query("SELECT * FROM posts WHERE user_id = ANY($1)", [ + userIds, +]); +``` + +--- + +## Impact Level Guidelines + +| Level | Improvement | Use When | +|-------|-------------|----------| +| **CRITICAL** | 10-100x | Missing indexes, connection exhaustion, sequential scans on large tables | +| **HIGH** | 5-20x | Wrong index types, poor partitioning, missing covering indexes | +| **MEDIUM-HIGH** | 2-5x | N+1 queries, inefficient pagination, RLS optimization | +| **MEDIUM** | 1.5-3x | Redundant indexes, query plan instability | +| **LOW-MEDIUM** | 1.2-2x | VACUUM tuning, configuration tweaks | +| **LOW** | Incremental | Advanced patterns, edge cases | + +--- + +## Reference Standards + +**Primary Sources:** + +- Official Postgres documentation +- Supabase documentation +- Postgres wiki +- Established blogs (2ndQuadrant, Crunchy Data) + +**Format:** + +```markdown +Reference: +[Postgres Indexes](https://www.postgresql.org/docs/current/indexes.html) +``` + +--- + +## Review Checklist + +Before submitting a reference: + +- [ ] Title is clear and action-oriented +- [ ] Impact level matches the performance gain +- [ ] impactDescription includes quantification +- [ ] Explanation is concise (1-2 sentences) +- [ ] Has at least 1 **Incorrect** SQL example +- [ ] Has at least 1 **Correct** SQL example +- [ ] SQL uses semantic naming +- [ ] Comments explain _why_, not _what_ +- [ ] Trade-offs mentioned if applicable +- [ ] Reference links included +- [ ] `npm run validate` passes +- [ ] `npm run build` generates correct output diff --git a/.agents/skills/supabase-postgres-best-practices/references/_sections.md b/.agents/skills/supabase-postgres-best-practices/references/_sections.md new file mode 100644 index 00000000..8ba57c23 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/_sections.md @@ -0,0 +1,39 @@ +# Section Definitions + +This file defines the rule categories for Postgres best practices. Rules are automatically assigned to sections based on their filename prefix. + +Take the examples below as pure demonstrative. Replace each section with the actual rule categories for Postgres best practices. + +--- + +## 1. Query Performance (query) +**Impact:** CRITICAL +**Description:** Slow queries, missing indexes, inefficient query plans. The most common source of Postgres performance issues. + +## 2. Connection Management (conn) +**Impact:** CRITICAL +**Description:** Connection pooling, limits, and serverless strategies. Critical for applications with high concurrency or serverless deployments. + +## 3. Security & RLS (security) +**Impact:** CRITICAL +**Description:** Row-Level Security policies, privilege management, and authentication patterns. + +## 4. Schema Design (schema) +**Impact:** HIGH +**Description:** Table design, index strategies, partitioning, and data type selection. Foundation for long-term performance. + +## 5. Concurrency & Locking (lock) +**Impact:** MEDIUM-HIGH +**Description:** Transaction management, isolation levels, deadlock prevention, and lock contention patterns. + +## 6. Data Access Patterns (data) +**Impact:** MEDIUM +**Description:** N+1 query elimination, batch operations, cursor-based pagination, and efficient data fetching. + +## 7. Monitoring & Diagnostics (monitor) +**Impact:** LOW-MEDIUM +**Description:** Using pg_stat_statements, EXPLAIN ANALYZE, metrics collection, and performance diagnostics. + +## 8. Advanced Features (advanced) +**Impact:** LOW +**Description:** Full-text search, JSONB optimization, PostGIS, extensions, and advanced Postgres features. diff --git a/.agents/skills/supabase-postgres-best-practices/references/_template.md b/.agents/skills/supabase-postgres-best-practices/references/_template.md new file mode 100644 index 00000000..91ace90e --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/_template.md @@ -0,0 +1,34 @@ +--- +title: Clear, Action-Oriented Title (e.g., "Use Partial Indexes for Filtered Queries") +impact: MEDIUM +impactDescription: 5-20x query speedup for filtered queries +tags: indexes, query-optimization, performance +--- + +## [Rule Title] + +[1-2 sentence explanation of the problem and why it matters. Focus on performance impact.] + +**Incorrect (describe the problem):** + +```sql +-- Comment explaining what makes this slow/problematic +CREATE INDEX users_email_idx ON users(email); + +SELECT * FROM users WHERE email = 'user@example.com' AND deleted_at IS NULL; +-- This scans deleted records unnecessarily +``` + +**Correct (describe the solution):** + +```sql +-- Comment explaining why this is better +CREATE INDEX users_active_email_idx ON users(email) WHERE deleted_at IS NULL; + +SELECT * FROM users WHERE email = 'user@example.com' AND deleted_at IS NULL; +-- Only indexes active users, 10x smaller index, faster queries +``` + +[Optional: Additional context, edge cases, or trade-offs] + +Reference: [Postgres Docs](https://www.postgresql.org/docs/current/) diff --git a/.agents/skills/supabase-postgres-best-practices/references/advanced-full-text-search.md b/.agents/skills/supabase-postgres-best-practices/references/advanced-full-text-search.md new file mode 100644 index 00000000..582cbeaa --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/advanced-full-text-search.md @@ -0,0 +1,55 @@ +--- +title: Use tsvector for Full-Text Search +impact: MEDIUM +impactDescription: 100x faster than LIKE, with ranking support +tags: full-text-search, tsvector, gin, search +--- + +## Use tsvector for Full-Text Search + +LIKE with wildcards can't use indexes. Full-text search with tsvector is orders of magnitude faster. + +**Incorrect (LIKE pattern matching):** + +```sql +-- Cannot use index, scans all rows +select * from articles where content like '%postgresql%'; + +-- Case-insensitive makes it worse +select * from articles where lower(content) like '%postgresql%'; +``` + +**Correct (full-text search with tsvector):** + +```sql +-- Add tsvector column and index +alter table articles add column search_vector tsvector + generated always as (to_tsvector('english', coalesce(title,'') || ' ' || coalesce(content,''))) stored; + +create index articles_search_idx on articles using gin (search_vector); + +-- Fast full-text search +select * from articles +where search_vector @@ to_tsquery('english', 'postgresql & performance'); + +-- With ranking +select *, ts_rank(search_vector, query) as rank +from articles, to_tsquery('english', 'postgresql') query +where search_vector @@ query +order by rank desc; +``` + +Search multiple terms: + +```sql +-- AND: both terms required +to_tsquery('postgresql & performance') + +-- OR: either term +to_tsquery('postgresql | mysql') + +-- Prefix matching +to_tsquery('post:*') +``` + +Reference: [Full Text Search](https://supabase.com/docs/guides/database/full-text-search) diff --git a/.agents/skills/supabase-postgres-best-practices/references/advanced-jsonb-indexing.md b/.agents/skills/supabase-postgres-best-practices/references/advanced-jsonb-indexing.md new file mode 100644 index 00000000..e3d261ea --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/advanced-jsonb-indexing.md @@ -0,0 +1,49 @@ +--- +title: Index JSONB Columns for Efficient Querying +impact: MEDIUM +impactDescription: 10-100x faster JSONB queries with proper indexing +tags: jsonb, gin, indexes, json +--- + +## Index JSONB Columns for Efficient Querying + +JSONB queries without indexes scan the entire table. Use GIN indexes for containment queries. + +**Incorrect (no index on JSONB):** + +```sql +create table products ( + id bigint primary key, + attributes jsonb +); + +-- Full table scan for every query +select * from products where attributes @> '{"color": "red"}'; +select * from products where attributes->>'brand' = 'Nike'; +``` + +**Correct (GIN index for JSONB):** + +```sql +-- GIN index for containment operators (@>, ?, ?&, ?|) +create index products_attrs_gin on products using gin (attributes); + +-- Now containment queries use the index +select * from products where attributes @> '{"color": "red"}'; + +-- For specific key lookups, use expression index +create index products_brand_idx on products ((attributes->>'brand')); +select * from products where attributes->>'brand' = 'Nike'; +``` + +Choose the right operator class: + +```sql +-- jsonb_ops (default): supports all operators, larger index +create index idx1 on products using gin (attributes); + +-- jsonb_path_ops: only @> operator, but 2-3x smaller index +create index idx2 on products using gin (attributes jsonb_path_ops); +``` + +Reference: [JSONB Indexes](https://www.postgresql.org/docs/current/datatype-json.html#JSON-INDEXING) diff --git a/.agents/skills/supabase-postgres-best-practices/references/conn-idle-timeout.md b/.agents/skills/supabase-postgres-best-practices/references/conn-idle-timeout.md new file mode 100644 index 00000000..40b9cc50 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/conn-idle-timeout.md @@ -0,0 +1,46 @@ +--- +title: Configure Idle Connection Timeouts +impact: HIGH +impactDescription: Reclaim 30-50% of connection slots from idle clients +tags: connections, timeout, idle, resource-management +--- + +## Configure Idle Connection Timeouts + +Idle connections waste resources. Configure timeouts to automatically reclaim them. + +**Incorrect (connections held indefinitely):** + +```sql +-- No timeout configured +show idle_in_transaction_session_timeout; -- 0 (disabled) + +-- Connections stay open forever, even when idle +select pid, state, state_change, query +from pg_stat_activity +where state = 'idle in transaction'; +-- Shows transactions idle for hours, holding locks +``` + +**Correct (automatic cleanup of idle connections):** + +```sql +-- Terminate connections idle in transaction after 30 seconds +alter system set idle_in_transaction_session_timeout = '30s'; + +-- Terminate completely idle connections after 10 minutes +alter system set idle_session_timeout = '10min'; + +-- Reload configuration +select pg_reload_conf(); +``` + +For pooled connections, configure at the pooler level: + +```ini +# pgbouncer.ini +server_idle_timeout = 60 +client_idle_timeout = 300 +``` + +Reference: [Connection Timeouts](https://www.postgresql.org/docs/current/runtime-config-client.html#GUC-IDLE-IN-TRANSACTION-SESSION-TIMEOUT) diff --git a/.agents/skills/supabase-postgres-best-practices/references/conn-limits.md b/.agents/skills/supabase-postgres-best-practices/references/conn-limits.md new file mode 100644 index 00000000..cb3e400c --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/conn-limits.md @@ -0,0 +1,44 @@ +--- +title: Set Appropriate Connection Limits +impact: CRITICAL +impactDescription: Prevent database crashes and memory exhaustion +tags: connections, max-connections, limits, stability +--- + +## Set Appropriate Connection Limits + +Too many connections exhaust memory and degrade performance. Set limits based on available resources. + +**Incorrect (unlimited or excessive connections):** + +```sql +-- Default max_connections = 100, but often increased blindly +show max_connections; -- 500 (way too high for 4GB RAM) + +-- Each connection uses 1-3MB RAM +-- 500 connections * 2MB = 1GB just for connections! +-- Out of memory errors under load +``` + +**Correct (calculate based on resources):** + +```sql +-- Formula: max_connections = (RAM in MB / 5MB per connection) - reserved +-- For 4GB RAM: (4096 / 5) - 10 = ~800 theoretical max +-- But practically, 100-200 is better for query performance + +-- Recommended settings for 4GB RAM +alter system set max_connections = 100; + +-- Also set work_mem appropriately +-- work_mem * max_connections should not exceed 25% of RAM +alter system set work_mem = '8MB'; -- 8MB * 100 = 800MB max +``` + +Monitor connection usage: + +```sql +select count(*), state from pg_stat_activity group by state; +``` + +Reference: [Database Connections](https://supabase.com/docs/guides/platform/performance#connection-management) diff --git a/.agents/skills/supabase-postgres-best-practices/references/conn-pooling.md b/.agents/skills/supabase-postgres-best-practices/references/conn-pooling.md new file mode 100644 index 00000000..e2ebd581 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/conn-pooling.md @@ -0,0 +1,41 @@ +--- +title: Use Connection Pooling for All Applications +impact: CRITICAL +impactDescription: Handle 10-100x more concurrent users +tags: connection-pooling, pgbouncer, performance, scalability +--- + +## Use Connection Pooling for All Applications + +Postgres connections are expensive (1-3MB RAM each). Without pooling, applications exhaust connections under load. + +**Incorrect (new connection per request):** + +```sql +-- Each request creates a new connection +-- Application code: db.connect() per request +-- Result: 500 concurrent users = 500 connections = crashed database + +-- Check current connections +select count(*) from pg_stat_activity; -- 487 connections! +``` + +**Correct (connection pooling):** + +```sql +-- Use a pooler like PgBouncer between app and database +-- Application connects to pooler, pooler reuses a small pool to Postgres + +-- Configure pool_size based on: (CPU cores * 2) + spindle_count +-- Example for 4 cores: pool_size = 10 + +-- Result: 500 concurrent users share 10 actual connections +select count(*) from pg_stat_activity; -- 10 connections +``` + +Pool modes: + +- **Transaction mode**: connection returned after each transaction (best for most apps) +- **Session mode**: connection held for entire session (needed for prepared statements, temp tables) + +Reference: [Connection Pooling](https://supabase.com/docs/guides/database/connecting-to-postgres#connection-pooler) diff --git a/.agents/skills/supabase-postgres-best-practices/references/conn-prepared-statements.md b/.agents/skills/supabase-postgres-best-practices/references/conn-prepared-statements.md new file mode 100644 index 00000000..555547d8 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/conn-prepared-statements.md @@ -0,0 +1,46 @@ +--- +title: Use Prepared Statements Correctly with Pooling +impact: HIGH +impactDescription: Avoid prepared statement conflicts in pooled environments +tags: prepared-statements, connection-pooling, transaction-mode +--- + +## Use Prepared Statements Correctly with Pooling + +Prepared statements are tied to individual database connections. In transaction-mode pooling, connections are shared, causing conflicts. + +**Incorrect (named prepared statements with transaction pooling):** + +```sql +-- Named prepared statement +prepare get_user as select * from users where id = $1; + +-- In transaction mode pooling, next request may get different connection +execute get_user(123); +-- ERROR: prepared statement "get_user" does not exist +``` + +**Correct (use unnamed statements or session mode):** + +```sql +-- Option 1: Use unnamed prepared statements (most ORMs do this automatically) +-- The query is prepared and executed in a single protocol message + +-- Option 2: Deallocate after use in transaction mode +prepare get_user as select * from users where id = $1; +execute get_user(123); +deallocate get_user; + +-- Option 3: Use session mode pooling (port 5432 vs 6543) +-- Connection is held for entire session, prepared statements persist +``` + +Check your driver settings: + +```sql +-- Many drivers use prepared statements by default +-- Node.js pg: { prepare: false } to disable +-- JDBC: prepareThreshold=0 to disable +``` + +Reference: [Prepared Statements with Pooling](https://supabase.com/docs/guides/database/connecting-to-postgres#connection-pool-modes) diff --git a/.agents/skills/supabase-postgres-best-practices/references/data-batch-inserts.md b/.agents/skills/supabase-postgres-best-practices/references/data-batch-inserts.md new file mode 100644 index 00000000..997947cb --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/data-batch-inserts.md @@ -0,0 +1,54 @@ +--- +title: Batch INSERT Statements for Bulk Data +impact: MEDIUM +impactDescription: 10-50x faster bulk inserts +tags: batch, insert, bulk, performance, copy +--- + +## Batch INSERT Statements for Bulk Data + +Individual INSERT statements have high overhead. Batch multiple rows in single statements or use COPY. + +**Incorrect (individual inserts):** + +```sql +-- Each insert is a separate transaction and round trip +insert into events (user_id, action) values (1, 'click'); +insert into events (user_id, action) values (1, 'view'); +insert into events (user_id, action) values (2, 'click'); +-- ... 1000 more individual inserts + +-- 1000 inserts = 1000 round trips = slow +``` + +**Correct (batch insert):** + +```sql +-- Multiple rows in single statement +insert into events (user_id, action) values + (1, 'click'), + (1, 'view'), + (2, 'click'), + -- ... up to ~1000 rows per batch + (999, 'view'); + +-- One round trip for 1000 rows +``` + +For large imports, use COPY: + +```sql +-- COPY is fastest for bulk loading +copy events (user_id, action, created_at) +from '/path/to/data.csv' +with (format csv, header true); + +-- Or from stdin in application +copy events (user_id, action) from stdin with (format csv); +1,click +1,view +2,click +\. +``` + +Reference: [COPY](https://www.postgresql.org/docs/current/sql-copy.html) diff --git a/.agents/skills/supabase-postgres-best-practices/references/data-n-plus-one.md b/.agents/skills/supabase-postgres-best-practices/references/data-n-plus-one.md new file mode 100644 index 00000000..2109186f --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/data-n-plus-one.md @@ -0,0 +1,53 @@ +--- +title: Eliminate N+1 Queries with Batch Loading +impact: MEDIUM-HIGH +impactDescription: 10-100x fewer database round trips +tags: n-plus-one, batch, performance, queries +--- + +## Eliminate N+1 Queries with Batch Loading + +N+1 queries execute one query per item in a loop. Batch them into a single query using arrays or JOINs. + +**Incorrect (N+1 queries):** + +```sql +-- First query: get all users +select id from users where active = true; -- Returns 100 IDs + +-- Then N queries, one per user +select * from orders where user_id = 1; +select * from orders where user_id = 2; +select * from orders where user_id = 3; +-- ... 97 more queries! + +-- Total: 101 round trips to database +``` + +**Correct (single batch query):** + +```sql +-- Collect IDs and query once with ANY +select * from orders where user_id = any(array[1, 2, 3, ...]); + +-- Or use JOIN instead of loop +select u.id, u.name, o.* +from users u +left join orders o on o.user_id = u.id +where u.active = true; + +-- Total: 1 round trip +``` + +Application pattern: + +```sql +-- Instead of looping in application code: +-- for user in users: db.query("SELECT * FROM orders WHERE user_id = $1", user.id) + +-- Pass array parameter: +select * from orders where user_id = any($1::bigint[]); +-- Application passes: [1, 2, 3, 4, 5, ...] +``` + +Reference: [N+1 Query Problem](https://supabase.com/docs/guides/database/query-optimization) diff --git a/.agents/skills/supabase-postgres-best-practices/references/data-pagination.md b/.agents/skills/supabase-postgres-best-practices/references/data-pagination.md new file mode 100644 index 00000000..633d8393 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/data-pagination.md @@ -0,0 +1,50 @@ +--- +title: Use Cursor-Based Pagination Instead of OFFSET +impact: MEDIUM-HIGH +impactDescription: Consistent O(1) performance regardless of page depth +tags: pagination, cursor, keyset, offset, performance +--- + +## Use Cursor-Based Pagination Instead of OFFSET + +OFFSET-based pagination scans all skipped rows, getting slower on deeper pages. Cursor pagination is O(1). + +**Incorrect (OFFSET pagination):** + +```sql +-- Page 1: scans 20 rows +select * from products order by id limit 20 offset 0; + +-- Page 100: scans 2000 rows to skip 1980 +select * from products order by id limit 20 offset 1980; + +-- Page 10000: scans 200,000 rows! +select * from products order by id limit 20 offset 199980; +``` + +**Correct (cursor/keyset pagination):** + +```sql +-- Page 1: get first 20 +select * from products order by id limit 20; +-- Application stores last_id = 20 + +-- Page 2: start after last ID +select * from products where id > 20 order by id limit 20; +-- Uses index, always fast regardless of page depth + +-- Page 10000: same speed as page 1 +select * from products where id > 199980 order by id limit 20; +``` + +For multi-column sorting: + +```sql +-- Cursor must include all sort columns +select * from products +where (created_at, id) > ('2024-01-15 10:00:00', 12345) +order by created_at, id +limit 20; +``` + +Reference: [Pagination](https://supabase.com/docs/guides/database/pagination) diff --git a/.agents/skills/supabase-postgres-best-practices/references/data-upsert.md b/.agents/skills/supabase-postgres-best-practices/references/data-upsert.md new file mode 100644 index 00000000..bc95e230 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/data-upsert.md @@ -0,0 +1,50 @@ +--- +title: Use UPSERT for Insert-or-Update Operations +impact: MEDIUM +impactDescription: Atomic operation, eliminates race conditions +tags: upsert, on-conflict, insert, update +--- + +## Use UPSERT for Insert-or-Update Operations + +Using separate SELECT-then-INSERT/UPDATE creates race conditions. Use INSERT ... ON CONFLICT for atomic upserts. + +**Incorrect (check-then-insert race condition):** + +```sql +-- Race condition: two requests check simultaneously +select * from settings where user_id = 123 and key = 'theme'; +-- Both find nothing + +-- Both try to insert +insert into settings (user_id, key, value) values (123, 'theme', 'dark'); +-- One succeeds, one fails with duplicate key error! +``` + +**Correct (atomic UPSERT):** + +```sql +-- Single atomic operation +insert into settings (user_id, key, value) +values (123, 'theme', 'dark') +on conflict (user_id, key) +do update set value = excluded.value, updated_at = now(); + +-- Returns the inserted/updated row +insert into settings (user_id, key, value) +values (123, 'theme', 'dark') +on conflict (user_id, key) +do update set value = excluded.value +returning *; +``` + +Insert-or-ignore pattern: + +```sql +-- Insert only if not exists (no update) +insert into page_views (page_id, user_id) +values (1, 123) +on conflict (page_id, user_id) do nothing; +``` + +Reference: [INSERT ON CONFLICT](https://www.postgresql.org/docs/current/sql-insert.html#SQL-ON-CONFLICT) diff --git a/.agents/skills/supabase-postgres-best-practices/references/lock-advisory.md b/.agents/skills/supabase-postgres-best-practices/references/lock-advisory.md new file mode 100644 index 00000000..572eaf0d --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/lock-advisory.md @@ -0,0 +1,56 @@ +--- +title: Use Advisory Locks for Application-Level Locking +impact: MEDIUM +impactDescription: Efficient coordination without row-level lock overhead +tags: advisory-locks, coordination, application-locks +--- + +## Use Advisory Locks for Application-Level Locking + +Advisory locks provide application-level coordination without requiring database rows to lock. + +**Incorrect (creating rows just for locking):** + +```sql +-- Creating dummy rows to lock on +create table resource_locks ( + resource_name text primary key +); + +insert into resource_locks values ('report_generator'); + +-- Lock by selecting the row +select * from resource_locks where resource_name = 'report_generator' for update; +``` + +**Correct (advisory locks):** + +```sql +-- Session-level advisory lock (released on disconnect or unlock) +select pg_advisory_lock(hashtext('report_generator')); +-- ... do exclusive work ... +select pg_advisory_unlock(hashtext('report_generator')); + +-- Transaction-level lock (released on commit/rollback) +begin; +select pg_advisory_xact_lock(hashtext('daily_report')); +-- ... do work ... +commit; -- Lock automatically released +``` + +Try-lock for non-blocking operations: + +```sql +-- Returns immediately with true/false instead of waiting +select pg_try_advisory_lock(hashtext('resource_name')); + +-- Use in application +if (acquired) { + -- Do work + select pg_advisory_unlock(hashtext('resource_name')); +} else { + -- Skip or retry later +} +``` + +Reference: [Advisory Locks](https://www.postgresql.org/docs/current/explicit-locking.html#ADVISORY-LOCKS) diff --git a/.agents/skills/supabase-postgres-best-practices/references/lock-deadlock-prevention.md b/.agents/skills/supabase-postgres-best-practices/references/lock-deadlock-prevention.md new file mode 100644 index 00000000..974da5ed --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/lock-deadlock-prevention.md @@ -0,0 +1,68 @@ +--- +title: Prevent Deadlocks with Consistent Lock Ordering +impact: MEDIUM-HIGH +impactDescription: Eliminate deadlock errors, improve reliability +tags: deadlocks, locking, transactions, ordering +--- + +## Prevent Deadlocks with Consistent Lock Ordering + +Deadlocks occur when transactions lock resources in different orders. Always +acquire locks in a consistent order. + +**Incorrect (inconsistent lock ordering):** + +```sql +-- Transaction A -- Transaction B +begin; begin; +update accounts update accounts +set balance = balance - 100 set balance = balance - 50 +where id = 1; where id = 2; -- B locks row 2 + +update accounts update accounts +set balance = balance + 100 set balance = balance + 50 +where id = 2; -- A waits for B where id = 1; -- B waits for A + +-- DEADLOCK! Both waiting for each other +``` + +**Correct (lock rows in consistent order first):** + +```sql +-- Explicitly acquire locks in ID order before updating +begin; +select * from accounts where id in (1, 2) order by id for update; + +-- Now perform updates in any order - locks already held +update accounts set balance = balance - 100 where id = 1; +update accounts set balance = balance + 100 where id = 2; +commit; +``` + +Alternative: use a single statement to update atomically: + +```sql +-- Single statement acquires all locks atomically +begin; +update accounts +set balance = balance + case id + when 1 then -100 + when 2 then 100 +end +where id in (1, 2); +commit; +``` + +Detect deadlocks in logs: + +```sql +-- Check for recent deadlocks +select * from pg_stat_database where deadlocks > 0; + +-- Enable deadlock logging +set log_lock_waits = on; +set deadlock_timeout = '1s'; +``` + +Reference: +[Deadlocks](https://www.postgresql.org/docs/current/explicit-locking.html#LOCKING-DEADLOCKS) diff --git a/.agents/skills/supabase-postgres-best-practices/references/lock-short-transactions.md b/.agents/skills/supabase-postgres-best-practices/references/lock-short-transactions.md new file mode 100644 index 00000000..e6b8ef26 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/lock-short-transactions.md @@ -0,0 +1,50 @@ +--- +title: Keep Transactions Short to Reduce Lock Contention +impact: MEDIUM-HIGH +impactDescription: 3-5x throughput improvement, fewer deadlocks +tags: transactions, locking, contention, performance +--- + +## Keep Transactions Short to Reduce Lock Contention + +Long-running transactions hold locks that block other queries. Keep transactions as short as possible. + +**Incorrect (long transaction with external calls):** + +```sql +begin; +select * from orders where id = 1 for update; -- Lock acquired + +-- Application makes HTTP call to payment API (2-5 seconds) +-- Other queries on this row are blocked! + +update orders set status = 'paid' where id = 1; +commit; -- Lock held for entire duration +``` + +**Correct (minimal transaction scope):** + +```sql +-- Validate data and call APIs outside transaction +-- Application: response = await paymentAPI.charge(...) + +-- Only hold lock for the actual update +begin; +update orders +set status = 'paid', payment_id = $1 +where id = $2 and status = 'pending' +returning *; +commit; -- Lock held for milliseconds +``` + +Use `statement_timeout` to prevent runaway transactions: + +```sql +-- Abort queries running longer than 30 seconds +set statement_timeout = '30s'; + +-- Or per-session +set local statement_timeout = '5s'; +``` + +Reference: [Transaction Management](https://www.postgresql.org/docs/current/tutorial-transactions.html) diff --git a/.agents/skills/supabase-postgres-best-practices/references/lock-skip-locked.md b/.agents/skills/supabase-postgres-best-practices/references/lock-skip-locked.md new file mode 100644 index 00000000..77bdbb97 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/lock-skip-locked.md @@ -0,0 +1,54 @@ +--- +title: Use SKIP LOCKED for Non-Blocking Queue Processing +impact: MEDIUM-HIGH +impactDescription: 10x throughput for worker queues +tags: skip-locked, queue, workers, concurrency +--- + +## Use SKIP LOCKED for Non-Blocking Queue Processing + +When multiple workers process a queue, SKIP LOCKED allows workers to process different rows without waiting. + +**Incorrect (workers block each other):** + +```sql +-- Worker 1 and Worker 2 both try to get next job +begin; +select * from jobs where status = 'pending' order by created_at limit 1 for update; +-- Worker 2 waits for Worker 1's lock to release! +``` + +**Correct (SKIP LOCKED for parallel processing):** + +```sql +-- Each worker skips locked rows and gets the next available +begin; +select * from jobs +where status = 'pending' +order by created_at +limit 1 +for update skip locked; + +-- Worker 1 gets job 1, Worker 2 gets job 2 (no waiting) + +update jobs set status = 'processing' where id = $1; +commit; +``` + +Complete queue pattern: + +```sql +-- Atomic claim-and-update in one statement +update jobs +set status = 'processing', worker_id = $1, started_at = now() +where id = ( + select id from jobs + where status = 'pending' + order by created_at + limit 1 + for update skip locked +) +returning *; +``` + +Reference: [SELECT FOR UPDATE SKIP LOCKED](https://www.postgresql.org/docs/current/sql-select.html#SQL-FOR-UPDATE-SHARE) diff --git a/.agents/skills/supabase-postgres-best-practices/references/monitor-explain-analyze.md b/.agents/skills/supabase-postgres-best-practices/references/monitor-explain-analyze.md new file mode 100644 index 00000000..542978c3 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/monitor-explain-analyze.md @@ -0,0 +1,45 @@ +--- +title: Use EXPLAIN ANALYZE to Diagnose Slow Queries +impact: LOW-MEDIUM +impactDescription: Identify exact bottlenecks in query execution +tags: explain, analyze, diagnostics, query-plan +--- + +## Use EXPLAIN ANALYZE to Diagnose Slow Queries + +EXPLAIN ANALYZE executes the query and shows actual timings, revealing the true performance bottlenecks. + +**Incorrect (guessing at performance issues):** + +```sql +-- Query is slow, but why? +select * from orders where customer_id = 123 and status = 'pending'; +-- "It must be missing an index" - but which one? +``` + +**Correct (use EXPLAIN ANALYZE):** + +```sql +explain (analyze, buffers, format text) +select * from orders where customer_id = 123 and status = 'pending'; + +-- Output reveals the issue: +-- Seq Scan on orders (cost=0.00..25000.00 rows=50 width=100) (actual time=0.015..450.123 rows=50 loops=1) +-- Filter: ((customer_id = 123) AND (status = 'pending'::text)) +-- Rows Removed by Filter: 999950 +-- Buffers: shared hit=5000 read=15000 +-- Planning Time: 0.150 ms +-- Execution Time: 450.500 ms +``` + +Key things to look for: + +```sql +-- Seq Scan on large tables = missing index +-- Rows Removed by Filter = poor selectivity or missing index +-- Buffers: read >> hit = data not cached, needs more memory +-- Nested Loop with high loops = consider different join strategy +-- Sort Method: external merge = work_mem too low +``` + +Reference: [EXPLAIN](https://supabase.com/docs/guides/database/inspect) diff --git a/.agents/skills/supabase-postgres-best-practices/references/monitor-pg-stat-statements.md b/.agents/skills/supabase-postgres-best-practices/references/monitor-pg-stat-statements.md new file mode 100644 index 00000000..d7e82f1a --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/monitor-pg-stat-statements.md @@ -0,0 +1,55 @@ +--- +title: Enable pg_stat_statements for Query Analysis +impact: LOW-MEDIUM +impactDescription: Identify top resource-consuming queries +tags: pg-stat-statements, monitoring, statistics, performance +--- + +## Enable pg_stat_statements for Query Analysis + +pg_stat_statements tracks execution statistics for all queries, helping identify slow and frequent queries. + +**Incorrect (no visibility into query patterns):** + +```sql +-- Database is slow, but which queries are the problem? +-- No way to know without pg_stat_statements +``` + +**Correct (enable and query pg_stat_statements):** + +```sql +-- Enable the extension +create extension if not exists pg_stat_statements; + +-- Find slowest queries by total time +select + calls, + round(total_exec_time::numeric, 2) as total_time_ms, + round(mean_exec_time::numeric, 2) as mean_time_ms, + query +from pg_stat_statements +order by total_exec_time desc +limit 10; + +-- Find most frequent queries +select calls, query +from pg_stat_statements +order by calls desc +limit 10; + +-- Reset statistics after optimization +select pg_stat_statements_reset(); +``` + +Key metrics to monitor: + +```sql +-- Queries with high mean time (candidates for optimization) +select query, mean_exec_time, calls +from pg_stat_statements +where mean_exec_time > 100 -- > 100ms average +order by mean_exec_time desc; +``` + +Reference: [pg_stat_statements](https://supabase.com/docs/guides/database/extensions/pg_stat_statements) diff --git a/.agents/skills/supabase-postgres-best-practices/references/monitor-vacuum-analyze.md b/.agents/skills/supabase-postgres-best-practices/references/monitor-vacuum-analyze.md new file mode 100644 index 00000000..e0e8ea0b --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/monitor-vacuum-analyze.md @@ -0,0 +1,55 @@ +--- +title: Maintain Table Statistics with VACUUM and ANALYZE +impact: MEDIUM +impactDescription: 2-10x better query plans with accurate statistics +tags: vacuum, analyze, statistics, maintenance, autovacuum +--- + +## Maintain Table Statistics with VACUUM and ANALYZE + +Outdated statistics cause the query planner to make poor decisions. VACUUM reclaims space, ANALYZE updates statistics. + +**Incorrect (stale statistics):** + +```sql +-- Table has 1M rows but stats say 1000 +-- Query planner chooses wrong strategy +explain select * from orders where status = 'pending'; +-- Shows: Seq Scan (because stats show small table) +-- Actually: Index Scan would be much faster +``` + +**Correct (maintain fresh statistics):** + +```sql +-- Manually analyze after large data changes +analyze orders; + +-- Analyze specific columns used in WHERE clauses +analyze orders (status, created_at); + +-- Check when tables were last analyzed +select + relname, + last_vacuum, + last_autovacuum, + last_analyze, + last_autoanalyze +from pg_stat_user_tables +order by last_analyze nulls first; +``` + +Autovacuum tuning for busy tables: + +```sql +-- Increase frequency for high-churn tables +alter table orders set ( + autovacuum_vacuum_scale_factor = 0.05, -- Vacuum at 5% dead tuples (default 20%) + autovacuum_analyze_scale_factor = 0.02 -- Analyze at 2% changes (default 10%) +); + +-- Check autovacuum status +select * from pg_stat_progress_vacuum; +``` + +Reference: [VACUUM](https://supabase.com/docs/guides/database/database-size#vacuum-operations) diff --git a/.agents/skills/supabase-postgres-best-practices/references/query-composite-indexes.md b/.agents/skills/supabase-postgres-best-practices/references/query-composite-indexes.md new file mode 100644 index 00000000..fea64523 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/query-composite-indexes.md @@ -0,0 +1,44 @@ +--- +title: Create Composite Indexes for Multi-Column Queries +impact: HIGH +impactDescription: 5-10x faster multi-column queries +tags: indexes, composite-index, multi-column, query-optimization +--- + +## Create Composite Indexes for Multi-Column Queries + +When queries filter on multiple columns, a composite index is more efficient than separate single-column indexes. + +**Incorrect (separate indexes require bitmap scan):** + +```sql +-- Two separate indexes +create index orders_status_idx on orders (status); +create index orders_created_idx on orders (created_at); + +-- Query must combine both indexes (slower) +select * from orders where status = 'pending' and created_at > '2024-01-01'; +``` + +**Correct (composite index):** + +```sql +-- Single composite index (leftmost column first for equality checks) +create index orders_status_created_idx on orders (status, created_at); + +-- Query uses one efficient index scan +select * from orders where status = 'pending' and created_at > '2024-01-01'; +``` + +**Column order matters** - place equality columns first, range columns last: + +```sql +-- Good: status (=) before created_at (>) +create index idx on orders (status, created_at); + +-- Works for: WHERE status = 'pending' +-- Works for: WHERE status = 'pending' AND created_at > '2024-01-01' +-- Does NOT work for: WHERE created_at > '2024-01-01' (leftmost prefix rule) +``` + +Reference: [Multicolumn Indexes](https://www.postgresql.org/docs/current/indexes-multicolumn.html) diff --git a/.agents/skills/supabase-postgres-best-practices/references/query-covering-indexes.md b/.agents/skills/supabase-postgres-best-practices/references/query-covering-indexes.md new file mode 100644 index 00000000..9d2a4947 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/query-covering-indexes.md @@ -0,0 +1,40 @@ +--- +title: Use Covering Indexes to Avoid Table Lookups +impact: MEDIUM-HIGH +impactDescription: 2-5x faster queries by eliminating heap fetches +tags: indexes, covering-index, include, index-only-scan +--- + +## Use Covering Indexes to Avoid Table Lookups + +Covering indexes include all columns needed by a query, enabling index-only scans that skip the table entirely. + +**Incorrect (index scan + heap fetch):** + +```sql +create index users_email_idx on users (email); + +-- Must fetch name and created_at from table heap +select email, name, created_at from users where email = 'user@example.com'; +``` + +**Correct (index-only scan with INCLUDE):** + +```sql +-- Include non-searchable columns in the index +create index users_email_idx on users (email) include (name, created_at); + +-- All columns served from index, no table access needed +select email, name, created_at from users where email = 'user@example.com'; +``` + +Use INCLUDE for columns you SELECT but don't filter on: + +```sql +-- Searching by status, but also need customer_id and total +create index orders_status_idx on orders (status) include (customer_id, total); + +select status, customer_id, total from orders where status = 'shipped'; +``` + +Reference: [Index-Only Scans](https://www.postgresql.org/docs/current/indexes-index-only-scans.html) diff --git a/.agents/skills/supabase-postgres-best-practices/references/query-index-types.md b/.agents/skills/supabase-postgres-best-practices/references/query-index-types.md new file mode 100644 index 00000000..93b32590 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/query-index-types.md @@ -0,0 +1,48 @@ +--- +title: Choose the Right Index Type for Your Data +impact: HIGH +impactDescription: 10-100x improvement with correct index type +tags: indexes, btree, gin, gist, brin, hash, index-types +--- + +## Choose the Right Index Type for Your Data + +Different index types excel at different query patterns. The default B-tree isn't always optimal. + +**Incorrect (B-tree for JSONB containment):** + +```sql +-- B-tree cannot optimize containment operators +create index products_attrs_idx on products (attributes); +select * from products where attributes @> '{"color": "red"}'; +-- Full table scan - B-tree doesn't support @> operator +``` + +**Correct (GIN for JSONB):** + +```sql +-- GIN supports @>, ?, ?&, ?| operators +create index products_attrs_idx on products using gin (attributes); +select * from products where attributes @> '{"color": "red"}'; +``` + +Index type guide: + +```sql +-- B-tree (default): =, <, >, BETWEEN, IN, IS NULL +create index users_created_idx on users (created_at); + +-- GIN: arrays, JSONB, full-text search +create index posts_tags_idx on posts using gin (tags); + +-- GiST: geometric data, range types, nearest-neighbor (KNN) queries +create index locations_idx on places using gist (location); + +-- BRIN: large time-series tables (10-100x smaller) +create index events_time_idx on events using brin (created_at); + +-- Hash: equality-only (slightly faster than B-tree for =) +create index sessions_token_idx on sessions using hash (token); +``` + +Reference: [Index Types](https://www.postgresql.org/docs/current/indexes-types.html) diff --git a/.agents/skills/supabase-postgres-best-practices/references/query-missing-indexes.md b/.agents/skills/supabase-postgres-best-practices/references/query-missing-indexes.md new file mode 100644 index 00000000..e6daace7 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/query-missing-indexes.md @@ -0,0 +1,43 @@ +--- +title: Add Indexes on WHERE and JOIN Columns +impact: CRITICAL +impactDescription: 100-1000x faster queries on large tables +tags: indexes, performance, sequential-scan, query-optimization +--- + +## Add Indexes on WHERE and JOIN Columns + +Queries filtering or joining on unindexed columns cause full table scans, which become exponentially slower as tables grow. + +**Incorrect (sequential scan on large table):** + +```sql +-- No index on customer_id causes full table scan +select * from orders where customer_id = 123; + +-- EXPLAIN shows: Seq Scan on orders (cost=0.00..25000.00 rows=100 width=85) +``` + +**Correct (index scan):** + +```sql +-- Create index on frequently filtered column +create index orders_customer_id_idx on orders (customer_id); + +select * from orders where customer_id = 123; + +-- EXPLAIN shows: Index Scan using orders_customer_id_idx (cost=0.42..8.44 rows=100 width=85) +``` + +For JOIN columns, always index the foreign key side: + +```sql +-- Index the referencing column +create index orders_customer_id_idx on orders (customer_id); + +select c.name, o.total +from customers c +join orders o on o.customer_id = c.id; +``` + +Reference: [Query Optimization](https://supabase.com/docs/guides/database/query-optimization) diff --git a/.agents/skills/supabase-postgres-best-practices/references/query-partial-indexes.md b/.agents/skills/supabase-postgres-best-practices/references/query-partial-indexes.md new file mode 100644 index 00000000..3e61a341 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/query-partial-indexes.md @@ -0,0 +1,45 @@ +--- +title: Use Partial Indexes for Filtered Queries +impact: HIGH +impactDescription: 5-20x smaller indexes, faster writes and queries +tags: indexes, partial-index, query-optimization, storage +--- + +## Use Partial Indexes for Filtered Queries + +Partial indexes only include rows matching a WHERE condition, making them smaller and faster when queries consistently filter on the same condition. + +**Incorrect (full index includes irrelevant rows):** + +```sql +-- Index includes all rows, even soft-deleted ones +create index users_email_idx on users (email); + +-- Query always filters active users +select * from users where email = 'user@example.com' and deleted_at is null; +``` + +**Correct (partial index matches query filter):** + +```sql +-- Index only includes active users +create index users_active_email_idx on users (email) +where deleted_at is null; + +-- Query uses the smaller, faster index +select * from users where email = 'user@example.com' and deleted_at is null; +``` + +Common use cases for partial indexes: + +```sql +-- Only pending orders (status rarely changes once completed) +create index orders_pending_idx on orders (created_at) +where status = 'pending'; + +-- Only non-null values +create index products_sku_idx on products (sku) +where sku is not null; +``` + +Reference: [Partial Indexes](https://www.postgresql.org/docs/current/indexes-partial.html) diff --git a/.agents/skills/supabase-postgres-best-practices/references/schema-constraints.md b/.agents/skills/supabase-postgres-best-practices/references/schema-constraints.md new file mode 100644 index 00000000..1d2ef8f9 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/schema-constraints.md @@ -0,0 +1,80 @@ +--- +title: Add Constraints Safely in Migrations +impact: HIGH +impactDescription: Prevents migration failures and enables idempotent schema changes +tags: constraints, migrations, schema, alter-table +--- + +## Add Constraints Safely in Migrations + +PostgreSQL does not support `ADD CONSTRAINT IF NOT EXISTS`. Migrations using this syntax will fail. + +**Incorrect (causes syntax error):** + +```sql +-- ERROR: syntax error at or near "not" (SQLSTATE 42601) +alter table public.profiles +add constraint if not exists profiles_birthchart_id_unique unique (birthchart_id); +``` + +**Correct (idempotent constraint creation):** + +```sql +-- Use DO block to check before adding +do $$ +begin + if not exists ( + select 1 from pg_constraint + where conname = 'profiles_birthchart_id_unique' + and conrelid = 'public.profiles'::regclass + ) then + alter table public.profiles + add constraint profiles_birthchart_id_unique unique (birthchart_id); + end if; +end $$; +``` + +For all constraint types: + +```sql +-- Check constraints +do $$ +begin + if not exists ( + select 1 from pg_constraint + where conname = 'check_age_positive' + ) then + alter table users add constraint check_age_positive check (age > 0); + end if; +end $$; + +-- Foreign keys +do $$ +begin + if not exists ( + select 1 from pg_constraint + where conname = 'profiles_birthchart_id_fkey' + ) then + alter table profiles + add constraint profiles_birthchart_id_fkey + foreign key (birthchart_id) references birthcharts(id); + end if; +end $$; +``` + +Check if constraint exists: + +```sql +-- Query to check constraint existence +select conname, contype, pg_get_constraintdef(oid) +from pg_constraint +where conrelid = 'public.profiles'::regclass; + +-- contype values: +-- 'p' = PRIMARY KEY +-- 'f' = FOREIGN KEY +-- 'u' = UNIQUE +-- 'c' = CHECK +``` + +Reference: [Constraints](https://www.postgresql.org/docs/current/ddl-constraints.html) diff --git a/.agents/skills/supabase-postgres-best-practices/references/schema-data-types.md b/.agents/skills/supabase-postgres-best-practices/references/schema-data-types.md new file mode 100644 index 00000000..f253a581 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/schema-data-types.md @@ -0,0 +1,46 @@ +--- +title: Choose Appropriate Data Types +impact: HIGH +impactDescription: 50% storage reduction, faster comparisons +tags: data-types, schema, storage, performance +--- + +## Choose Appropriate Data Types + +Using the right data types reduces storage, improves query performance, and prevents bugs. + +**Incorrect (wrong data types):** + +```sql +create table users ( + id int, -- Will overflow at 2.1 billion + email varchar(255), -- Unnecessary length limit + created_at timestamp, -- Missing timezone info + is_active varchar(5), -- String for boolean + price varchar(20) -- String for numeric +); +``` + +**Correct (appropriate data types):** + +```sql +create table users ( + id bigint generated always as identity primary key, -- 9 quintillion max + email text, -- No artificial limit, same performance as varchar + created_at timestamptz, -- Always store timezone-aware timestamps + is_active boolean default true, -- 1 byte vs variable string length + price numeric(10,2) -- Exact decimal arithmetic +); +``` + +Key guidelines: + +```sql +-- IDs: use bigint, not int (future-proofing) +-- Strings: use text, not varchar(n) unless constraint needed +-- Time: use timestamptz, not timestamp +-- Money: use numeric, not float (precision matters) +-- Enums: use text with check constraint or create enum type +``` + +Reference: [Data Types](https://www.postgresql.org/docs/current/datatype.html) diff --git a/.agents/skills/supabase-postgres-best-practices/references/schema-foreign-key-indexes.md b/.agents/skills/supabase-postgres-best-practices/references/schema-foreign-key-indexes.md new file mode 100644 index 00000000..6c3d6ff6 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/schema-foreign-key-indexes.md @@ -0,0 +1,59 @@ +--- +title: Index Foreign Key Columns +impact: HIGH +impactDescription: 10-100x faster JOINs and CASCADE operations +tags: foreign-key, indexes, joins, schema +--- + +## Index Foreign Key Columns + +Postgres does not automatically index foreign key columns. Missing indexes cause slow JOINs and CASCADE operations. + +**Incorrect (unindexed foreign key):** + +```sql +create table orders ( + id bigint generated always as identity primary key, + customer_id bigint references customers(id) on delete cascade, + total numeric(10,2) +); + +-- No index on customer_id! +-- JOINs and ON DELETE CASCADE both require full table scan +select * from orders where customer_id = 123; -- Seq Scan +delete from customers where id = 123; -- Locks table, scans all orders +``` + +**Correct (indexed foreign key):** + +```sql +create table orders ( + id bigint generated always as identity primary key, + customer_id bigint references customers(id) on delete cascade, + total numeric(10,2) +); + +-- Always index the FK column +create index orders_customer_id_idx on orders (customer_id); + +-- Now JOINs and cascades are fast +select * from orders where customer_id = 123; -- Index Scan +delete from customers where id = 123; -- Uses index, fast cascade +``` + +Find missing FK indexes: + +```sql +select + conrelid::regclass as table_name, + a.attname as fk_column +from pg_constraint c +join pg_attribute a on a.attrelid = c.conrelid and a.attnum = any(c.conkey) +where c.contype = 'f' + and not exists ( + select 1 from pg_index i + where i.indrelid = c.conrelid and a.attnum = any(i.indkey) + ); +``` + +Reference: [Foreign Keys](https://www.postgresql.org/docs/current/ddl-constraints.html#DDL-CONSTRAINTS-FK) diff --git a/.agents/skills/supabase-postgres-best-practices/references/schema-lowercase-identifiers.md b/.agents/skills/supabase-postgres-best-practices/references/schema-lowercase-identifiers.md new file mode 100644 index 00000000..f0072940 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/schema-lowercase-identifiers.md @@ -0,0 +1,55 @@ +--- +title: Use Lowercase Identifiers for Compatibility +impact: MEDIUM +impactDescription: Avoid case-sensitivity bugs with tools, ORMs, and AI assistants +tags: naming, identifiers, case-sensitivity, schema, conventions +--- + +## Use Lowercase Identifiers for Compatibility + +PostgreSQL folds unquoted identifiers to lowercase. Quoted mixed-case identifiers require quotes forever and cause issues with tools, ORMs, and AI assistants that may not recognize them. + +**Incorrect (mixed-case identifiers):** + +```sql +-- Quoted identifiers preserve case but require quotes everywhere +CREATE TABLE "Users" ( + "userId" bigint PRIMARY KEY, + "firstName" text, + "lastName" text +); + +-- Must always quote or queries fail +SELECT "firstName" FROM "Users" WHERE "userId" = 1; + +-- This fails - Users becomes users without quotes +SELECT firstName FROM Users; +-- ERROR: relation "users" does not exist +``` + +**Correct (lowercase snake_case):** + +```sql +-- Unquoted lowercase identifiers are portable and tool-friendly +CREATE TABLE users ( + user_id bigint PRIMARY KEY, + first_name text, + last_name text +); + +-- Works without quotes, recognized by all tools +SELECT first_name FROM users WHERE user_id = 1; +``` + +Common sources of mixed-case identifiers: + +```sql +-- ORMs often generate quoted camelCase - configure them to use snake_case +-- Migrations from other databases may preserve original casing +-- Some GUI tools quote identifiers by default - disable this + +-- If stuck with mixed-case, create views as a compatibility layer +CREATE VIEW users AS SELECT "userId" AS user_id, "firstName" AS first_name FROM "Users"; +``` + +Reference: [Identifiers and Key Words](https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS) diff --git a/.agents/skills/supabase-postgres-best-practices/references/schema-partitioning.md b/.agents/skills/supabase-postgres-best-practices/references/schema-partitioning.md new file mode 100644 index 00000000..13137a03 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/schema-partitioning.md @@ -0,0 +1,55 @@ +--- +title: Partition Large Tables for Better Performance +impact: MEDIUM-HIGH +impactDescription: 5-20x faster queries and maintenance on large tables +tags: partitioning, large-tables, time-series, performance +--- + +## Partition Large Tables for Better Performance + +Partitioning splits a large table into smaller pieces, improving query performance and maintenance operations. + +**Incorrect (single large table):** + +```sql +create table events ( + id bigint generated always as identity, + created_at timestamptz, + data jsonb +); + +-- 500M rows, queries scan everything +select * from events where created_at > '2024-01-01'; -- Slow +vacuum events; -- Takes hours, locks table +``` + +**Correct (partitioned by time range):** + +```sql +create table events ( + id bigint generated always as identity, + created_at timestamptz not null, + data jsonb +) partition by range (created_at); + +-- Create partitions for each month +create table events_2024_01 partition of events + for values from ('2024-01-01') to ('2024-02-01'); + +create table events_2024_02 partition of events + for values from ('2024-02-01') to ('2024-03-01'); + +-- Queries only scan relevant partitions +select * from events where created_at > '2024-01-15'; -- Only scans events_2024_01+ + +-- Drop old data instantly +drop table events_2023_01; -- Instant vs DELETE taking hours +``` + +When to partition: + +- Tables > 100M rows +- Time-series data with date-based queries +- Need to efficiently drop old data + +Reference: [Table Partitioning](https://www.postgresql.org/docs/current/ddl-partitioning.html) diff --git a/.agents/skills/supabase-postgres-best-practices/references/schema-primary-keys.md b/.agents/skills/supabase-postgres-best-practices/references/schema-primary-keys.md new file mode 100644 index 00000000..fb0fbb16 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/schema-primary-keys.md @@ -0,0 +1,61 @@ +--- +title: Select Optimal Primary Key Strategy +impact: HIGH +impactDescription: Better index locality, reduced fragmentation +tags: primary-key, identity, uuid, serial, schema +--- + +## Select Optimal Primary Key Strategy + +Primary key choice affects insert performance, index size, and replication +efficiency. + +**Incorrect (problematic PK choices):** + +```sql +-- identity is the SQL-standard approach +create table users ( + id serial primary key -- Works, but IDENTITY is recommended +); + +-- Random UUIDs (v4) cause index fragmentation +create table orders ( + id uuid default gen_random_uuid() primary key -- UUIDv4 = random = scattered inserts +); +``` + +**Correct (optimal PK strategies):** + +```sql +-- Use IDENTITY for sequential IDs (SQL-standard, best for most cases) +create table users ( + id bigint generated always as identity primary key +); + +-- For distributed systems needing UUIDs, use UUIDv7 (time-ordered) +-- Requires pg_uuidv7 extension: create extension pg_uuidv7; +create table orders ( + id uuid default uuid_generate_v7() primary key -- Time-ordered, no fragmentation +); + +-- Alternative: time-prefixed IDs for sortable, distributed IDs (no extension needed) +create table events ( + id text default concat( + to_char(now() at time zone 'utc', 'YYYYMMDDHH24MISSMS'), + gen_random_uuid()::text + ) primary key +); +``` + +Guidelines: + +- Single database: `bigint identity` (sequential, 8 bytes, SQL-standard) +- Distributed/exposed IDs: UUIDv7 (requires pg_uuidv7) or ULID (time-ordered, no + fragmentation) +- `serial` works but `identity` is SQL-standard and preferred for new + applications +- Avoid random UUIDs (v4) as primary keys on large tables (causes index + fragmentation) + +Reference: +[Identity Columns](https://www.postgresql.org/docs/current/sql-createtable.html#SQL-CREATETABLE-PARMS-GENERATED-IDENTITY) diff --git a/.agents/skills/supabase-postgres-best-practices/references/security-privileges.md b/.agents/skills/supabase-postgres-best-practices/references/security-privileges.md new file mode 100644 index 00000000..448ec345 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/security-privileges.md @@ -0,0 +1,54 @@ +--- +title: Apply Principle of Least Privilege +impact: MEDIUM +impactDescription: Reduced attack surface, better audit trail +tags: privileges, security, roles, permissions +--- + +## Apply Principle of Least Privilege + +Grant only the minimum permissions required. Never use superuser for application queries. + +**Incorrect (overly broad permissions):** + +```sql +-- Application uses superuser connection +-- Or grants ALL to application role +grant all privileges on all tables in schema public to app_user; +grant all privileges on all sequences in schema public to app_user; + +-- Any SQL injection becomes catastrophic +-- drop table users; cascades to everything +``` + +**Correct (minimal, specific grants):** + +```sql +-- Create role with no default privileges +create role app_readonly nologin; + +-- Grant only SELECT on specific tables +grant usage on schema public to app_readonly; +grant select on public.products, public.categories to app_readonly; + +-- Create role for writes with limited scope +create role app_writer nologin; +grant usage on schema public to app_writer; +grant select, insert, update on public.orders to app_writer; +grant usage on sequence orders_id_seq to app_writer; +-- No DELETE permission + +-- Login role inherits from these +create role app_user login password 'xxx'; +grant app_writer to app_user; +``` + +Revoke public defaults: + +```sql +-- Revoke default public access +revoke all on schema public from public; +revoke all on all tables in schema public from public; +``` + +Reference: [Roles and Privileges](https://supabase.com/blog/postgres-roles-and-privileges) diff --git a/.agents/skills/supabase-postgres-best-practices/references/security-rls-basics.md b/.agents/skills/supabase-postgres-best-practices/references/security-rls-basics.md new file mode 100644 index 00000000..c61e1a85 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/security-rls-basics.md @@ -0,0 +1,50 @@ +--- +title: Enable Row Level Security for Multi-Tenant Data +impact: CRITICAL +impactDescription: Database-enforced tenant isolation, prevent data leaks +tags: rls, row-level-security, multi-tenant, security +--- + +## Enable Row Level Security for Multi-Tenant Data + +Row Level Security (RLS) enforces data access at the database level, ensuring users only see their own data. + +**Incorrect (application-level filtering only):** + +```sql +-- Relying only on application to filter +select * from orders where user_id = $current_user_id; + +-- Bug or bypass means all data is exposed! +select * from orders; -- Returns ALL orders +``` + +**Correct (database-enforced RLS):** + +```sql +-- Enable RLS on the table +alter table orders enable row level security; + +-- Create policy for users to see only their orders +create policy orders_user_policy on orders + for all + using (user_id = current_setting('app.current_user_id')::bigint); + +-- Force RLS even for table owners +alter table orders force row level security; + +-- Set user context and query +set app.current_user_id = '123'; +select * from orders; -- Only returns orders for user 123 +``` + +Policy for authenticated role: + +```sql +create policy orders_user_policy on orders + for all + to authenticated + using (user_id = auth.uid()); +``` + +Reference: [Row Level Security](https://supabase.com/docs/guides/database/postgres/row-level-security) diff --git a/.agents/skills/supabase-postgres-best-practices/references/security-rls-performance.md b/.agents/skills/supabase-postgres-best-practices/references/security-rls-performance.md new file mode 100644 index 00000000..b32d92f7 --- /dev/null +++ b/.agents/skills/supabase-postgres-best-practices/references/security-rls-performance.md @@ -0,0 +1,57 @@ +--- +title: Optimize RLS Policies for Performance +impact: HIGH +impactDescription: 5-10x faster RLS queries with proper patterns +tags: rls, performance, security, optimization +--- + +## Optimize RLS Policies for Performance + +Poorly written RLS policies can cause severe performance issues. Use subqueries and indexes strategically. + +**Incorrect (function called for every row):** + +```sql +create policy orders_policy on orders + using (auth.uid() = user_id); -- auth.uid() called per row! + +-- With 1M rows, auth.uid() is called 1M times +``` + +**Correct (wrap functions in SELECT):** + +```sql +create policy orders_policy on orders + using ((select auth.uid()) = user_id); -- Called once, cached + +-- 100x+ faster on large tables +``` + +Use security definer functions for complex checks: + +```sql +-- Create helper function (runs as definer, bypasses RLS) +create or replace function is_team_member(team_id bigint) +returns boolean +language sql +security definer +set search_path = '' +as $$ + select exists ( + select 1 from public.team_members + where team_id = $1 and user_id = (select auth.uid()) + ); +$$; + +-- Use in policy (indexed lookup, not per-row check) +create policy team_orders_policy on orders + using ((select is_team_member(team_id))); +``` + +Always add indexes on columns used in RLS policies: + +```sql +create index orders_user_id_idx on orders (user_id); +``` + +Reference: [RLS Performance](https://supabase.com/docs/guides/database/postgres/row-level-security#rls-performance-recommendations) diff --git a/.claude/worktrees/peaceful-northcutt b/.claude/worktrees/peaceful-northcutt new file mode 160000 index 00000000..0d5ee84c --- /dev/null +++ b/.claude/worktrees/peaceful-northcutt @@ -0,0 +1 @@ +Subproject commit 0d5ee84c2d2509c9e1543f417eec317992a5d00e diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..40ed28e3 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,2 @@ +# Shell scripts run in Linux containers; CRLF breaks shebangs (e.g. /bin/sh^M). +*.sh text eol=lf diff --git a/.github/workflows/ci-smoke-preprod.yml b/.github/workflows/ci-smoke-preprod.yml index 548202d6..d02869be 100644 --- a/.github/workflows/ci-smoke-preprod.yml +++ b/.github/workflows/ci-smoke-preprod.yml @@ -3,6 +3,7 @@ name: CI Smoke (Preprod) on: pull_request: branches: [main, preprod] + deployment_status: workflow_dispatch: env: @@ -17,8 +18,18 @@ jobs: smoke: runs-on: ubuntu-latest timeout-minutes: 15 + # For deployment_status triggers, only run when Railway reports a + # successful deploy to the preprod environment. Other events always run. + if: >- + github.event_name != 'deployment_status' || + (github.event.deployment_status.state == 'success' && + github.event.deployment.creator.login == 'railway-app[bot]' && + contains(github.event.deployment.environment, 'preprod')) steps: - uses: actions/checkout@v4 + with: + # On deployment_status, check out the exact SHA that was deployed. + ref: ${{ github.event.deployment.sha || github.sha }} - uses: actions/setup-node@v4 with: diff --git a/.github/workflows/pr-multisig-v1-smoke.yml b/.github/workflows/pr-multisig-v1-smoke.yml new file mode 100644 index 00000000..cb34aea2 --- /dev/null +++ b/.github/workflows/pr-multisig-v1-smoke.yml @@ -0,0 +1,166 @@ +name: PR Multisig v1 Smoke + +on: + pull_request: + branches: + - main + - preprod + workflow_dispatch: + inputs: + required_signers: + description: "Required signatures for CI wallet threshold scripts" + required: false + default: "2" + type: string + sign_wallet_type: + description: "Which wallet type to sign in smoke" + required: false + default: "legacy" + type: choice + options: + - legacy + - hierarchical + - sdk + route_scenarios: + description: "Optional comma-separated scenario IDs for scripts/ci/cli/route-chain.ts" + required: false + default: "" + type: string + +jobs: + multisig-v1-smoke: + if: github.repository == 'MeshJS/multisig' + runs-on: ubuntu-latest + timeout-minutes: 120 + env: + CI_JWT_SECRET: ${{ secrets.CI_JWT_SECRET }} + CI_MNEMONIC_1: ${{ secrets.CI_MNEMONIC_1 }} + CI_MNEMONIC_2: ${{ secrets.CI_MNEMONIC_2 }} + CI_MNEMONIC_3: ${{ secrets.CI_MNEMONIC_3 }} + CI_BLOCKFROST_PREPROD_API_KEY: ${{ secrets.CI_BLOCKFROST_PREPROD_API_KEY }} + CI_NETWORK_ID: "0" + CI_NUM_REQUIRED_SIGNERS: ${{ github.event_name == 'workflow_dispatch' && inputs.required_signers || '2' }} + CI_WALLET_TYPES: "legacy,hierarchical,sdk" + CI_SIGN_WALLET_TYPE: ${{ github.event_name == 'workflow_dispatch' && inputs.sign_wallet_type || 'legacy' }} + SIGN_BROADCAST: "true" + CI_ROUTE_SCENARIOS: ${{ github.event_name == 'workflow_dispatch' && inputs.route_scenarios || '' }} + CI_CONTEXT_PATH: /tmp/ci-wallet-context.json + CI_DREP_ANCHOR_URL: ${{ secrets.CI_DREP_ANCHOR_URL }} + CI_DREP_ANCHOR_JSON: ${{ secrets.CI_DREP_ANCHOR_JSON }} + CI_STAKE_POOL_ID_HEX: ${{ secrets.CI_STAKE_POOL_ID_HEX }} + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Validate required CI secrets + shell: bash + run: | + missing=() + [[ -n "$CI_JWT_SECRET" ]] || missing+=("CI_JWT_SECRET") + [[ -n "$CI_MNEMONIC_1" ]] || missing+=("CI_MNEMONIC_1") + [[ -n "$CI_MNEMONIC_2" ]] || missing+=("CI_MNEMONIC_2") + [[ -n "$CI_MNEMONIC_3" ]] || missing+=("CI_MNEMONIC_3") + [[ -n "$CI_BLOCKFROST_PREPROD_API_KEY" ]] || missing+=("CI_BLOCKFROST_PREPROD_API_KEY") + + route_scenarios=",${CI_ROUTE_SCENARIOS//[[:space:]]/}," + default_route_chain=false + if [[ -z "${CI_ROUTE_SCENARIOS//[[:space:]]/}" ]]; then + default_route_chain=true + fi + + scenario_enabled() { + local scenario_id="$1" + [[ "$default_route_chain" == "true" || "$route_scenarios" == *",$scenario_id,"* ]] + } + + if scenario_enabled "scenario.drep-certificates" || scenario_enabled "scenario.proxy-full-lifecycle"; then + [[ -n "$CI_DREP_ANCHOR_URL" ]] || missing+=("CI_DREP_ANCHOR_URL") + fi + if scenario_enabled "scenario.drep-certificates"; then + [[ -n "$CI_DREP_ANCHOR_JSON" ]] || missing+=("CI_DREP_ANCHOR_JSON") + fi + if scenario_enabled "scenario.stake-certificates"; then + [[ -n "$CI_STAKE_POOL_ID_HEX" ]] || missing+=("CI_STAKE_POOL_ID_HEX") + fi + + if [[ "${#missing[@]}" -gt 0 ]]; then + echo "Missing required secrets: ${missing[*]}" + echo "Set these in repo settings before running PR multisig smoke workflow." + exit 1 + fi + + - name: Pull base image (with retry) + shell: bash + run: | + for i in 1 2 3; do + docker pull node:20-alpine && break + echo "Pull attempt $i failed, retrying in 30s..." + sleep 30 + done + + - name: Build CI containers + shell: bash + run: docker compose -f docker-compose.ci.yml build + + - name: Start Postgres + App containers + shell: bash + run: docker compose -f docker-compose.ci.yml up -d postgres app + + - name: Wait for app healthcheck + shell: bash + run: | + for i in {1..60}; do + status=$(docker inspect --format='{{if .State.Health}}{{.State.Health.Status}}{{else}}none{{end}}' "$(docker compose -f docker-compose.ci.yml ps -q app)") + if [[ "$status" == "healthy" ]]; then + echo "App is healthy." + exit 0 + fi + sleep 2 + done + + echo "App failed to become healthy in time." + docker compose -f docker-compose.ci.yml ps + exit 1 + + - name: Run CI wallet bootstrap + v1 route-chain smoke + shell: bash + run: docker compose -f docker-compose.ci.yml --profile ci-test run --rm ci-runner + + - name: Dump container logs on failure + if: failure() + shell: bash + run: | + docker compose -f docker-compose.ci.yml logs --no-color \ + | sed -E 's/(Bearer )[A-Za-z0-9._-]+/\1[REDACTED]/g' \ + | sed -E 's/("token"[[:space:]]*:[[:space:]]*")[^"]+(")/\1[REDACTED]\2/g' \ + | sed -E 's/("secret"[[:space:]]*:[[:space:]]*")[^"]+(")/\1[REDACTED]\2/g' \ + | sed -E 's/("mnemonic([[:alnum:]_-]*)?"[[:space:]]*:[[:space:]]*")[^"]+(")/\1[REDACTED]\3/gI' \ + | sed -E 's/("private([[:alnum:]_-]*)?key([[:alnum:]_-]*)?"[[:space:]]*:[[:space:]]*")[^"]+(")/\1[REDACTED]\3/gI' \ + | sed -E 's/("signing([[:alnum:]_-]*)?key([[:alnum:]_-]*)?"[[:space:]]*:[[:space:]]*")[^"]+(")/\1[REDACTED]\3/gI' \ + | sed -E 's/("seed([[:alnum:]_-]*)?"[[:space:]]*:[[:space:]]*")[^"]+(")/\1[REDACTED]\3/gI' \ + | sed -E 's/("xprv([[:alnum:]_-]*)?"[[:space:]]*:[[:space:]]*")[^"]+(")/\1[REDACTED]\3/gI' \ + | sed -E 's/(ed25519e?_sk[[:alnum:]_]+)/[REDACTED]/gI' \ + | sed -E 's/(xprv[[:alnum:]]+)/[REDACTED]/gI' \ + > docker-compose-ci.log + + - name: Upload logs on failure + if: failure() + uses: actions/upload-artifact@v4 + with: + name: docker-compose-ci-logs + path: docker-compose-ci.log + + - name: Upload route-chain report + if: always() + uses: actions/upload-artifact@v4 + with: + name: ci-route-chain-report + path: ci-artifacts/ci-route-chain-report.md + if-no-files-found: warn + + - name: Tear down CI containers + if: always() + shell: bash + run: docker compose -f docker-compose.ci.yml down -v --remove-orphans + diff --git a/.gitignore b/.gitignore index 4924b8ab..189ad92e 100644 --- a/.gitignore +++ b/.gitignore @@ -31,6 +31,9 @@ yarn-debug.log* yarn-error.log* .pnpm-debug.log* +# CI local artifacts +/ci-artifacts/ + # local env files # do not commit any .env files to git, except for the .env.example file. https://create.t3.gg/en/usage/env-variables#using-environment-variables .env diff --git a/.mcp.json b/.mcp.json new file mode 100644 index 00000000..6571769d --- /dev/null +++ b/.mcp.json @@ -0,0 +1,8 @@ +{ + "mcpServers": { + "supabase": { + "type": "http", + "url": "https://mcp.supabase.com/mcp?project_ref=wzgemhfjyfnqmhxlvkqc&read_only=true" + } + } +} \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..f98c0c0c --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,107 @@ +# Contributing + +Thanks for your interest in Mesh Multi-Sig. This document describes how we work on the project: filing issues, opening pull requests, and getting changes reviewed and merged. + +> **Contributions are accepted against the `preprod` branch.** All PRs — external and internal — should target `preprod`, not `main`. Changes graduate from `preprod` to `main` after they run clean in the preprod environment and pass smoke CI. The only exception is a critical hotfix, which may target `main` directly. + +## Who reviews what + +- Core maintainers: **Quirin** and **Andre**. +- Every PR is reviewed by the contributor who did not author it. If the author is external, either maintainer can review. +- Ownership per feature area is tracked in [ROADMAP.md](ROADMAP.md#task-ownership). The owner for a given area gets first look. + +## Filing an issue + +Before opening an issue, search open and closed issues — many things are already tracked. + +A good issue has: + +- **Title:** one line describing the symptom, not the guess at a cause +- **Steps to reproduce:** exact clicks, URLs, inputs +- **Expected vs actual:** what should happen, what does happen +- **Environment:** browser, network (preprod / mainnet), wallet, commit or deploy URL +- **Logs or screenshots** if the bug is visible + +Label the issue (`bug`, `enhancement`, `research`, etc.) and attach the relevant milestone from the [roadmap milestones](../../milestones) if one applies. + +## Branches + +- `main` — production. Only maintainers merge to `main`. +- `preprod` — integration branch deployed to the preprod environment and exercised by the smoke CI. +- Feature / fix branches use a short prefix matching intent: + - `feature/` — new user-facing capability + - `fix/` — bug fix + - `refactor/` — internal change, no behavior change + - `docs/` — docs only + - `chore/` — tooling, dependencies, build + +Branch off `preprod` by default. Branch off `main` only for hotfixes that need to ship immediately. + +## Commit messages + +We follow [Conventional Commits](https://www.conventionalcommits.org/): + +``` +: + + +``` + +Types in use: `feat`, `fix`, `refactor`, `chore`, `docs`, `test`. + +Keep the subject under 72 characters. Write the body when the "why" is non-obvious — a linked issue, a tradeoff, a constraint. Don't restate the diff. + +## Pull requests + +Open the PR against `preprod` unless it's a hotfix for `main`. + +Your PR description should include: + +- **What** the change does in one or two sentences +- **Why** — the issue, incident, or decision it addresses (link the issue: `Closes #123`) +- **How to test** — concrete steps a reviewer can run to verify, including any preprod URL +- **Screenshots or recordings** for UI changes +- **Risk** — anything a reviewer should look at closely (migrations, auth, on-chain behavior) + +Before requesting review: + +- [ ] Rebased on the latest target branch +- [ ] Type-check and lint pass locally (`npm run build`, `npm run lint`) +- [ ] New or changed logic is covered by tests where practical +- [ ] Smoke CI is green on the PR (or the failure is understood and unrelated) +- [ ] UI changes have been loaded in a browser, not just type-checked + +## Review + +Reviewers look for: + +1. Correctness — does the change actually do what it says? +2. Scope — no drive-by refactors, no unrelated cleanup, no half-finished migrations +3. Security — input validation at boundaries, no secrets committed, RLS intact, no new injection surface +4. Tests — is the happy path covered? The failure modes you'd expect a user to hit? +5. Docs — if behavior changed, did docs and examples move too? + +Review etiquette: + +- Comment with intent: `nit:` (optional), `question:` (clarify), `blocking:` (must address before merge) +- Prefer suggestions over prose when the change is mechanical +- Resolve your own threads after addressing feedback; don't resolve someone else's + +Two weak approvals do not substitute for one careful review. If a change touches unfamiliar territory, say so and ask the owner to take a pass. + +## Merging + +- Squash merge by default — keep `main` history linear and each commit a complete change. +- Only merge when: + - At least one approval from a maintainer who did not author the PR + - CI is green (or failure is documented and unrelated) + - All blocking comments resolved +- The author merges. If the author is external, the reviewing maintainer merges. + +## Security + +Don't open public issues for vulnerabilities. Email the maintainers directly and we'll coordinate a fix. + +## Questions + +If you're unsure whether something belongs in scope, open a draft PR or an issue with the `question` label — we'd rather discuss early than review a large change that needs to be redone. diff --git a/Dockerfile.ci b/Dockerfile.ci new file mode 100644 index 00000000..ee3f9965 --- /dev/null +++ b/Dockerfile.ci @@ -0,0 +1,17 @@ +FROM node:20-alpine + +# Install PostgreSQL client tools for readiness checks. +RUN apk add --no-cache postgresql-client + +WORKDIR /app + +# Install dependencies first for better layer caching. +COPY package.json package-lock.json* ./ +COPY prisma ./prisma +RUN npm ci + +# Copy full source for containerized CI runs. +COPY . . + +EXPOSE 3000 + diff --git a/ROADMAP.md b/ROADMAP.md index c32327ba..7374f8b1 100644 --- a/ROADMAP.md +++ b/ROADMAP.md @@ -27,6 +27,19 @@ | **Review and handle open external PRs** - Summon API routes and capability-based metadata from kanyuku | Quirin + Andre | PR #212, PR #208 | | Fix legacy wallet compatibility bug | Quirin + Andre | | +### Proof of completion + +Status of M1 tasks. Last updated 2026-04-23. + +| Task | Status | Evidence | +|------|--------|----------| +| Define review process for issues and PRs | Drafted | [`CONTRIBUTING.md`](CONTRIBUTING.md) covers issue template, branch/commit conventions, PR + review process, merge rules. Pending: team sign-off | +| Improve repository infrastructure — preprod + smoke CI | Done | `preprod` branch active; [PR #218](https://github.com/MeshJS/multisig/pull/218) merged; [`.github/workflows/ci-smoke-preprod.yml`](.github/workflows/ci-smoke-preprod.yml) landed | +| CI smoke tests on real chain (#213) | Landed, awaiting secrets | [PR #217](https://github.com/MeshJS/multisig/pull/217) merged (CI smoke system + VKey witness fix); `dc49af2` skips gracefully when secrets missing. All runs since have hit the skip path (~8s) because `SMOKE_*` repo secrets are not yet configured; [Issue #213](https://github.com/MeshJS/multisig/issues/213) stays open until the first real route-chain run is linked | +| Fix transaction loading bug (#211) | In review | [PR #227](https://github.com/MeshJS/multisig/pull/227) open: validates CBOR + JSON on `POST /api/v1/addTransaction` and renders a degraded "Unreadable transaction" card with Reject & Delete so already-poisoned wallets can free their UTxOs | +| Review and handle open external PRs (PR #212, PR #208) | Reviewed, awaiting author | Change requests left on [PR #212](https://github.com/MeshJS/multisig/pull/212) (rebase to `preprod`, drop non-null assertion in `useWalletBalances`, Summon `canVote` TODO) and [PR #208](https://github.com/MeshJS/multisig/pull/208) (superset of #212, recommended to close) | +| Fix legacy wallet compatibility bug | Done | [PR #210](https://github.com/MeshJS/multisig/pull/210) (legacy drep retirement) and [PR #225](https://github.com/MeshJS/multisig/pull/225) (drep deregistration fix, commit `4ae3d10`) merged; [Issue #223](https://github.com/MeshJS/multisig/issues/223) closed | + --- ## Months 2–3 — June–July 2026 @@ -119,3 +132,46 @@ - Final summary report in month 12 **GitHub milestones:** Created and issues assigned. View at [Milestones](../../milestones). + +--- + +## Task ownership + +Aggregated view of the 12-month roadmap split by contributor. Each task has a single owner; the other contributor reviews the PR. + +### Quirin + +- [M1] Define review process for issues and PRs +- [M1] Fix transaction loading bug (#211) +- [M1] Handle external PR — Summon API routes (PR #212) +- [M1] Fix legacy wallet compatibility bug +- [M2–3] Improved authentication — nonce-based auth, wallet connection fixes, registration flow (#135, #53) +- [M2–3] Full address verification (#196) +- [M2–3] Transaction pagination (#30) +- [M4–6] Aiken crowdfund integration (PR #164) +- [M4–6] Governance metadata fix (#122) +- [M4–6] Proxy voting polish and documentation +- [M4–6] FROST research kickoff (#220) +- [M7–9] dApp connector — external dApps request multi-sig transactions +- [M7–9] FROST research — deliver findings, PoC, go/no-go (#220) +- [M10–12] Vesting — time-locked multi-sig contracts (#81) +- [M10–12] Performance and UX audit +- [M10–12] Invite flow (PR #67) +- [M10–12] Final summary report + +### Andre + +- [M1] Improve repository infrastructure — preprod environment and comprehensive smoke CI +- [M1] CI smoke tests on real chain (#213) +- [M1] Handle external PR — capability-based metadata (PR #208) +- [M2–3] Summon migration — land API routes and wallet import (PR #212, PR #208) +- [M2–3] Collateral service — 22 ADA → 4 UTxOs for proxy collateral (#221) +- [M2–3] Better 404 page (#22) +- [M4–6] Wallet V2 — on-chain registration and discovery (#33) +- [M4–6] Pending transactions on homepage (#125) +- [M4–6] Backlog cleanup, dependency/security updates +- [M7–9] Hardware wallet support — Ledger/Trezor (#44) +- [M7–9] Bot platform v2 — SDK, webhooks, example bots +- [M7–9] API documentation and developer portal +- [M10–12] User profiles and contacts +- [M10–12] Discover page — browse wallets, DAOs, governance (#52) diff --git a/docker-compose.ci.yml b/docker-compose.ci.yml new file mode 100644 index 00000000..57ca457a --- /dev/null +++ b/docker-compose.ci.yml @@ -0,0 +1,107 @@ +services: + postgres: + image: postgres:14-alpine + environment: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: multisig + volumes: + - postgres-ci-data:/var/lib/postgresql/data + - ./docker/init-db.sh:/docker-entrypoint-initdb.d/init-db.sh:ro + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 5s + timeout: 5s + retries: 20 + networks: + - multisig-ci-network + + app: + build: + context: . + dockerfile: Dockerfile.ci + environment: + NODE_ENV: test + NEXT_TELEMETRY_DISABLED: "1" + SKIP_ENV_VALIDATION: "true" + DATABASE_URL: postgresql://postgres:postgres@postgres:5432/multisig + DIRECT_URL: postgresql://postgres:postgres@postgres:5432/multisig + JWT_SECRET: ${CI_JWT_SECRET} + NEXT_PUBLIC_BLOCKFROST_API_KEY_PREPROD: ${CI_BLOCKFROST_PREPROD_API_KEY:-} + NEXT_PUBLIC_BLOCKFROST_API_KEY_MAINNET: ${CI_BLOCKFROST_MAINNET_API_KEY:-} + BLOCKFROST_API_KEY_PREPROD: ${CI_BLOCKFROST_PREPROD_API_KEY:-} + depends_on: + postgres: + condition: service_healthy + networks: + - multisig-ci-network + command: > + sh -c " + echo 'Waiting for PostgreSQL to be ready...' && + until pg_isready -h postgres -p 5432 -U postgres; do sleep 1; done && + echo 'Running Prisma migrations...' && + npx prisma migrate deploy || npx prisma db push && + echo 'Starting application...' && + npm run dev -- --hostname 0.0.0.0 --port 3000 + " + healthcheck: + test: + - CMD-SHELL + - node -e "fetch('http://localhost:3000/api/swagger').then((r)=>process.exit(r.ok?0:1)).catch(()=>process.exit(1))" + interval: 5s + timeout: 5s + retries: 30 + + ci-runner: + build: + context: . + dockerfile: Dockerfile.ci + environment: + NODE_ENV: test + NEXT_TELEMETRY_DISABLED: "1" + SKIP_ENV_VALIDATION: "true" + DATABASE_URL: postgresql://postgres:postgres@postgres:5432/multisig + DIRECT_URL: postgresql://postgres:postgres@postgres:5432/multisig + API_BASE_URL: http://app:3000 + CI_NETWORK_ID: ${CI_NETWORK_ID:-0} + CI_NUM_REQUIRED_SIGNERS: ${CI_NUM_REQUIRED_SIGNERS:-2} + CI_JWT_SECRET: ${CI_JWT_SECRET} + CI_MNEMONIC_1: ${CI_MNEMONIC_1:-} + CI_MNEMONIC_2: ${CI_MNEMONIC_2:-} + CI_MNEMONIC_3: ${CI_MNEMONIC_3:-} + CI_BLOCKFROST_PREPROD_API_KEY: ${CI_BLOCKFROST_PREPROD_API_KEY:-} + CI_WALLET_TYPES: ${CI_WALLET_TYPES:-legacy,hierarchical,sdk} + CI_SIGN_WALLET_TYPE: ${CI_SIGN_WALLET_TYPE:-legacy} + SIGN_BROADCAST: ${SIGN_BROADCAST:-true} + CI_ROUTE_SCENARIOS: ${CI_ROUTE_SCENARIOS:-} + CI_ROUTE_CHAIN_REPORT_PATH: ${CI_ROUTE_CHAIN_REPORT_PATH:-/artifacts/ci-route-chain-report.md} + CI_CONTEXT_PATH: ${CI_CONTEXT_PATH:-/tmp/ci-wallet-context.json} + CI_DREP_ANCHOR_URL: ${CI_DREP_ANCHOR_URL:-} + CI_DREP_ANCHOR_JSON: ${CI_DREP_ANCHOR_JSON:-} + CI_STAKE_POOL_ID_HEX: ${CI_STAKE_POOL_ID_HEX:-} + depends_on: + app: + condition: service_healthy + networks: + - multisig-ci-network + volumes: + - ./ci-artifacts:/artifacts + profiles: + - ci-test + command: > + sh -c " + status=0; + npx --yes tsx scripts/ci/cli/bootstrap.ts || status=$$?; + if [ \"$$status\" -eq 0 ]; then npx --yes tsx scripts/ci/cli/wallet-status.ts || status=$$?; fi; + if [ \"$$status\" -eq 0 ]; then npx --yes tsx scripts/ci/cli/route-chain.ts || status=$$?; fi; + rm -f \"${CI_CONTEXT_PATH:-/tmp/ci-wallet-context.json}\"; + exit \"$$status\" + " + +volumes: + postgres-ci-data: + +networks: + multisig-ci-network: + driver: bridge + diff --git a/docker/init-db.sh b/docker/init-db.sh index 659312d0..69147448 100755 --- a/docker/init-db.sh +++ b/docker/init-db.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/bin/sh set -e echo "Initializing database..." diff --git a/package.json b/package.json index ef470275..c22df500 100644 --- a/package.json +++ b/package.json @@ -17,6 +17,9 @@ "prestart": "prisma migrate deploy", "start": "next start", "test": "jest", + "test:bot:unit": "jest src/__tests__/botAuth.test.ts src/__tests__/botMe.test.ts src/__tests__/createWallet.bot.test.ts src/__tests__/walletIds.bot.test.ts src/__tests__/pendingTransactions.bot.test.ts src/__tests__/freeUtxos.bot.test.ts src/__tests__/addTransaction.bot.test.ts src/__tests__/nativeScript.bot.test.ts src/__tests__/governanceActiveProposals.test.ts src/__tests__/botBallotsUpsert.test.ts src/__tests__/signTransaction.bot.test.ts src/__tests__/submitDatum.bot.test.ts src/__tests__/resolveUtxoRefsFromChain.test.ts src/__tests__/resolveDRepAnchorFromUrl.test.ts src/__tests__/normalizePoolId.test.ts src/__tests__/createPendingMultisigTransaction.test.ts src/__tests__/proxyUtxos.test.ts src/__tests__/proxyTxBuilders.test.ts src/__tests__/proxySetup.bot.test.ts src/__tests__/proxyCleanup.bot.test.ts src/__tests__/proxyAccess.test.ts src/__tests__/proxySetupFinalization.test.ts src/__tests__/proxyCleanupFinalization.test.ts src/__tests__/proxyCiPreflight.test.ts src/__tests__/proxyCiOrphanAdoption.test.ts src/__tests__/proxyCiChainRecovery.test.ts src/__tests__/proxyBotSelection.test.ts src/__tests__/proxyCleanupRuntime.test.ts src/__tests__/ciSigningSelection.test.ts src/__tests__/ciScenarioManifest.test.ts", + "test:bot:integration": "jest src/__tests__/botApi.integration.test.ts --runInBand", + "test:bot": "npm run test:bot:unit && npm run test:bot:integration", "test:watch": "jest --watch", "test:coverage": "jest --coverage", "test:ci": "jest --ci --coverage --watchAll=false", diff --git a/railway.toml b/railway.toml new file mode 100644 index 00000000..77cda9e6 --- /dev/null +++ b/railway.toml @@ -0,0 +1,5 @@ +[build] +buildCommand = "npm install && npm run build" + +[deploy] +startCommand = "npm run start" diff --git a/scripts/bot-ref/README.md b/scripts/bot-ref/README.md index 10225ab2..71c3f111 100644 --- a/scripts/bot-ref/README.md +++ b/scripts/bot-ref/README.md @@ -54,7 +54,7 @@ npm install ```bash curl -sS -X POST http://localhost:3000/api/v1/botRegister \ -H "Content-Type: application/json" \ - -d '{"name":"Reference Bot","paymentAddress":"addr1_xxx","scopes":["multisig:read"]}' + -d '{"name":"Reference Bot","paymentAddress":"addr1_xxx","requestedScopes":["multisig:read","multisig:sign"]}' ``` Response includes `pendingBotId` and `claimCode`. @@ -134,11 +134,43 @@ echo '{"name":"Me and Bot","signersAddresses":["addr1_your...","addr1_bot..."]," Optional fields: `description`, `signersDescriptions`, `signersStakeKeys`, `signersDRepKeys`, `numRequiredSigners`, `scriptType` (`atLeast`|`all`|`any`), `stakeCredentialHash`, `network` (0=testnet, 1=mainnet). -### 8. Generate a bot wallet (testing) +### 8. Stake certificate (SDK multisig) + +The bot must have **multisig:sign** and be a **cosigner** on the wallet. The server builds the same Mesh stake certificates as the UI (`register`, `deregister`, `delegate`, `register_and_delegate`). **Legacy and Summon wallets are rejected.** + +1. List free UTxOs and pick inputs; each body field is `txHash` + `outputIndex` as returned by the API. +2. POST `walletId`, `address` (must match JWT / bot payment address), `action`, optional `poolId` (required for `delegate` and `register_and_delegate`; bech32 `pool1...` or 56-char hex), and `utxoRefs`. + +```bash +# stake.json example: +# { +# "walletId": "", +# "address": "", +# "action": "delegate", +# "poolId": "pool1...", +# "utxoRefs": [{ "txHash": "...", "outputIndex": 0 }], +# "description": "Delegate via bot" +# } +export BOT_TOKEN='' +npx tsx bot-client.ts stakeCert stake.json +``` + +If `numRequiredSigners > 1`, the response is a pending `Transaction` row; co-sign with `POST /api/v1/signTransaction` as usual. + +### 9. DRep certificate (register / retire) + +Also requires **multisig:sign**. **Summon** wallets are rejected; **legacy** wallets use payment-script DRep derivation (same as the app). For `register`, send both `anchorUrl` and `anchorJson`; the server does not fetch the URL and computes `hashDrepAnchor(anchorJson)` from the object you provide. + +```bash +# drep-register.json — anchorUrl and anchorJson required for register +npx tsx bot-client.ts drepCert drep-register.json +``` + +### 10. Generate a bot wallet (testing) From **repo root**: `npx tsx scripts/bot-ref/generate-bot-wallet.ts` — creates gitignored `bot-wallet.json` (mnemonic + address) and updates `bot-config.json`. -### 9. Create “Me and Bot” 2-of-2 wallet +### 11. Create “Me and Bot” 2-of-2 wallet ```bash cd scripts/bot-ref && npx tsx create-wallet-us.ts @@ -162,6 +194,40 @@ BOT_TOKEN='...' BOT_CONFIG_PATH=bot-config.json npx tsx bot-client.ts walletIds The reference client only uses **bot-key auth** (POST /api/v1/botAuth). Wallet-based auth (getNonce + sign + authSigner) would require a real Cardano signer; implement that in your bot if needed. +## Proxy bot API + +Proxy routes use the normal pending multisig flow and require `multisig:sign` plus **cosigner** access for mutating calls. `GET /api/v1/proxies` and `GET /api/v1/proxyDRepInfo` allow bot observer access. The reference CLI does not wrap these routes yet; call them directly with `BOT_TOKEN`. + +All proxy transaction builders accept UTxO references, not raw UTxO JSON: + +```json +{ "txHash": "", "outputIndex": 0 } +``` + +Use `GET /api/v1/freeUtxos?walletId=...&address=...&fresh=true` to select wallet inputs. `collateralRef` must be an ADA-only UTxO with at least 5 ADA at the bot payment address. Proxy actions also require a wallet input containing the proxy auth token, returned from setup/finalization metadata as `authTokenId`. + +### Setup and finalize + +1. `POST /api/v1/proxySetup` with `walletId`, `address`, `utxoRefs`, `collateralRef`, optional `initialProxyLovelace`, and optional `description`. +2. Sign the returned pending transaction with the required wallet signers. Proxy transactions are persisted with no initial signed addresses, so the proposer still needs to sign through `signTransaction`. +3. After the setup transaction is confirmed, call `POST /api/v1/proxySetupFinalize` with `walletId`, `address`, `txHash`, `proxyAddress`, `authTokenId`, and `paramUtxo` from the setup response. The server validates the confirmed setup outputs and creates or reactivates the `Proxy` row. +4. `GET /api/v1/proxies?walletId=...&address=...` lists active confirmed proxies. + +### Spend, DRep, and vote + +- `POST /api/v1/proxySpend`: sends proxy-held assets to `outputs[]`. If `proxyUtxoRefs` is omitted, the server selects proxy-address UTxOs sufficient for the requested outputs plus fee buffer. +- `POST /api/v1/proxyDRepCertificate`: `action` is `register`, `update`, or `deregister`. `register` and `update` require both `anchorUrl` and `anchorJson`; the server computes the anchor hash from `anchorJson`. +- `GET /api/v1/proxyDRepInfo`: returns `{ active, dRepId }` for the proxy script DRep credential. +- `POST /api/v1/proxyVote`: votes as the proxy DRep. Each vote uses `proposalId` in `#` form and `voteKind` of `Yes`, `No`, or `Abstain`. + +### Cleanup + +`POST /api/v1/proxyCleanup` is safe to call repeatedly during lifecycle cleanup: + +1. If the proxy address still has UTxOs, it returns cleanup phase `sweep`; sign and submit that transaction, then wait until the proxy address is empty. When `proxyUtxoRefs` is provided, it must include every currently visible proxy UTxO. +2. Call `POST /api/v1/proxyCleanup` again. When cleanup phase is `burn`, sign and submit the burn transaction. +3. After burn confirmation, call `POST /api/v1/proxyCleanupFinalize` with the confirmed burn `txHash`. The server validates that the auth token was spent and not recreated, that the proxy address has no UTxOs, and deactivates the proxy row unless `deactivateProxy` is `false`. + ## Governance bot flow For governance automation, request and approve these bot scopes during register/claim: diff --git a/scripts/bot-ref/bot-client.ts b/scripts/bot-ref/bot-client.ts index 57d8e980..f4e8696d 100644 --- a/scripts/bot-ref/bot-client.ts +++ b/scripts/bot-ref/bot-client.ts @@ -238,11 +238,68 @@ export async function createWallet( return (await res.json()) as { walletId: string; address: string; name: string }; } +/** Build stake certificate tx (SDK wallets; bot needs multisig:sign). */ +export async function botStakeCertificate( + baseUrl: string, + token: string, + body: { + walletId: string; + address: string; + action: "register" | "deregister" | "delegate" | "register_and_delegate"; + poolId?: string; + utxoRefs: { txHash: string; outputIndex: number }[]; + description?: string; + }, +): Promise { + const base = ensureSlash(baseUrl); + const res = await fetch(`${base}/api/v1/botStakeCertificate`, { + method: "POST", + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${token}`, + }, + body: JSON.stringify(body), + }); + if (!res.ok) { + throw new Error(`botStakeCertificate failed ${res.status}: ${await res.text()}`); + } + return res.json(); +} + +/** Build DRep register/retire tx (bot needs multisig:sign). */ +export async function botDRepCertificate( + baseUrl: string, + token: string, + body: { + walletId: string; + address: string; + action: "register" | "retire"; + utxoRefs: { txHash: string; outputIndex: number }[]; + description?: string; + anchorUrl?: string; + anchorDataHash?: string; + }, +): Promise { + const base = ensureSlash(baseUrl); + const res = await fetch(`${base}/api/v1/botDRepCertificate`, { + method: "POST", + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${token}`, + }, + body: JSON.stringify(body), + }); + if (!res.ok) { + throw new Error(`botDRepCertificate failed ${res.status}: ${await res.text()}`); + } + return res.json(); +} + async function main() { const config = await loadConfig(); const cmd = process.argv[2]; if (!cmd) { - console.error("Usage: bot-client.ts [args]"); + console.error("Usage: bot-client.ts [args]"); console.error(" register [scope1,scope2,...] [paymentAddress] - create pending bot + claim code"); console.error(" pickup - pickup botKeyId + secret after human claim"); console.error(" auth - authenticate and print token"); @@ -252,6 +309,8 @@ async function main() { console.error(" ownerInfo - get wallet owner info"); console.error(" botMe - get bot's own info (incl. owner address)"); console.error(" createWallet [file] - create wallet via API (body from file or stdin); bot needs multisig:create"); + console.error(" stakeCert [file] - POST /api/v1/botStakeCertificate (JSON body file or stdin); needs multisig:sign"); + console.error(" drepCert [file] - POST /api/v1/botDRepCertificate (JSON body file or stdin); needs multisig:sign"); console.error("Env: BOT_CONFIG (JSON), BOT_CONFIG_PATH, BOT_TOKEN (after auth)."); process.exit(1); } @@ -402,6 +461,44 @@ async function main() { console.log(JSON.stringify(result, null, 2)); break; } + case "stakeCert": { + const fileArg = process.argv[3]; + let raw: string; + if (fileArg) { + const { readFileSync } = await import("fs"); + const { join } = await import("path"); + raw = readFileSync(fileArg.startsWith("/") ? fileArg : join(process.cwd(), fileArg), "utf8"); + } else { + const { createInterface } = await import("readline"); + const rl = createInterface({ input: process.stdin, terminal: false }); + const lines: string[] = []; + for await (const line of rl) lines.push(line); + raw = lines.join("\n"); + } + const body = JSON.parse(raw) as Parameters[2]; + const result = await botStakeCertificate(config.baseUrl, token, body); + console.log(JSON.stringify(result, null, 2)); + break; + } + case "drepCert": { + const fileArg = process.argv[3]; + let raw: string; + if (fileArg) { + const { readFileSync } = await import("fs"); + const { join } = await import("path"); + raw = readFileSync(fileArg.startsWith("/") ? fileArg : join(process.cwd(), fileArg), "utf8"); + } else { + const { createInterface } = await import("readline"); + const rl = createInterface({ input: process.stdin, terminal: false }); + const lines: string[] = []; + for await (const line of rl) lines.push(line); + raw = lines.join("\n"); + } + const body = JSON.parse(raw) as Parameters[2]; + const result = await botDRepCertificate(config.baseUrl, token, body); + console.log(JSON.stringify(result, null, 2)); + break; + } default: console.error("Unknown command:", cmd); process.exit(1); diff --git a/scripts/ci/README.md b/scripts/ci/README.md new file mode 100644 index 00000000..f760bec5 --- /dev/null +++ b/scripts/ci/README.md @@ -0,0 +1,533 @@ +# CI Route-Chain Test Suite + +This folder contains the real-chain CI smoke system used by `.github/workflows/pr-multisig-v1-smoke.yml`. + +## Why this exists + +- Protects v1 API routes from regressions on pull requests. +- Verifies behavior against real blockchain conditions (preprod), not only mocked/unit paths. +- Keeps wallet bootstrap stable while allowing route tests to grow incrementally. +- Makes it easy to add new API route checks as composable scenario steps. + +## High-level flow + +CI runs these stages in order: + +1. **Bootstrap** (`cli/bootstrap.ts`) + - Derives signer payment addresses from mnemonic secrets and matching stake (reward) addresses from those base addresses. + - Provisions one bot key per signer address. + - Creates test wallets (`legacy`, `hierarchical`, `sdk`). + - For **SDK** wallets, always attaches `signersStakeKeys` so the wallet matches production “SDK multisig” staking (native script role `2` alongside payment `0` and DRep `3`). + - Grants all signer bots cosigner access to created wallets. + - Writes a versioned context JSON consumed by all later steps. + +2. **Route chain** (`cli/route-chain.ts`) + - Loads and validates bootstrap context. + - Loads enabled scenarios from `scenarios/manifest.ts`. + - Executes steps in deterministic order with critical/non-critical failure semantics. + - Emits console summary and machine-readable JSON report. + +3. **Artifacts** + - Route-chain Markdown report is written to `ci-artifacts/ci-route-chain-report.md`. + - Workflow uploads it as an artifact for triage. + - Report contains a run summary header, wallet balance table, scenario summary table, and per-scenario step tables. Failed steps include error/artifact code blocks. + +## Folder structure + +- `cli/` + - `bootstrap.ts`: stable setup stage, writes CI context. + - `wallet-status.ts`: print multisig wallet addresses and on-chain balances (after bootstrap, before route-chain). + - `route-chain.ts`: main orchestrator for scenario execution. + - `inspect-context.ts`: print bootstrap context summary (debug). +- `framework/` + - `types.ts`: shared types for context/scenarios/reports. + - `context.ts`: context loading + validation. + - `env.ts`, `mnemonic.ts`, `walletType.ts`, `preprod.ts`: shared env and Cardano helpers. + - `botProvision.ts`: bot key hashing for bootstrap. + - `botAuth.ts`: bot JWT authentication with in-process token caching (10 s expiry margin) and 429-rate-limit retry. + - `botContext.ts`: bot selection helpers (`getDefaultBot`, `getBotForAddress`, `getBotForSignerIndex`). + - `http.ts`: API caller helper with timeout/retry support. + - `walletAuth.ts`: nonce + signer auth helper (`getNonce`/`authSigner`) and signer data signing. + - `datumSign.ts`: reusable datum signing helper. + - `governance.ts`: deterministic governance proposal selection and ballot payload builder. + - `runner.ts`: scenario/step execution + report writing. + - `walletBalances.ts`: on-chain UTxO balance collection via Blockfrost (used by `walletBalanceSummary` in report). + - `redact.ts`: recursive sensitive-value redaction for log-safe JSON serialisation. +- `scenarios/` + - `manifest.ts`: scenario registry and ordering only. + - `proxyLifecyclePreflight.ts`: proxy lifecycle ADA/UTxO budget constants and shape analysis. + - `flows/`: `signingFlow.ts`, `transferFlow.ts`, `certificateSigningFlow.ts`, `utxoShapeFlow.ts` (reusable multisig sign, real transfer builders, stake-cert signing with dual payment+stake witnesses, and proxy lifecycle self-split shaping). + - `steps/`: route step factories grouped by area (`discovery.ts`, `botIdentity.ts`, `authPlane.ts`, `datum.ts`, `governance.ts`, `transferRing.ts`, `certificates.ts`, `walletLifecycle.ts`, `proxyBot.ts`, …) plus `helpers.ts` (ring wallet-type utilities) and `template-route-step.ts` for new steps. + +### Full scenario execution order + +The manifest runs scenarios in this fixed sequence: + +| # | Scenario ID | Conditional | +|---|-------------|-------------| +| 1 | `scenario.wallet-discovery` | always | +| 2 | `scenario.ada-route-health` | always | +| 3 | `scenario.create-wallet` | always | +| 4 | `scenario.bot-identity` | always | +| 5 | `scenario.auth-plane` | always | +| 6 | `scenario.proxy-smoke` | always | +| 7 | `scenario.submit-datum` | always | +| 8 | `scenario.governance-routes` | always | +| 9 | `scenario.drep-certificates` | legacy + sdk wallets present | +| 10 | `scenario.stake-certificates` | sdk wallet present | +| 11 | `scenario.proxy-full-lifecycle` | legacy, hierarchical, and/or sdk wallets present | +| 12 | `scenario.real-transfer-and-sign` | always (all 3 wallet types required) | +| 13 | `scenario.final-assertions` | always | + +Certificate scenarios (9–10) run before the ring transfer so they spend confirmed UTxOs; the ring transfer would put those UTxOs in the mempool and create a race. + +### Subset runs + +Use a comma-separated `CI_ROUTE_SCENARIOS` filter (same mechanism as the workflow dispatch input). + +Quick auth + discovery smoke (no on-chain transfers, finishes in seconds): + +```bash +CI_ROUTE_SCENARIOS=scenario.wallet-discovery,scenario.ada-route-health,scenario.bot-identity,scenario.auth-plane,scenario.proxy-smoke +``` + +Wallet creation API only: + +```bash +CI_ROUTE_SCENARIOS=scenario.create-wallet +``` + +Ring transfer + final checks only: + +```bash +CI_ROUTE_SCENARIOS=scenario.real-transfer-and-sign,scenario.final-assertions +``` + +Set `CI_ROUTE_CHAIN_REPORT_PATH` if you want a separate report file for that run. + +## Current scenario intent + +The manifest currently covers: + +- route discovery (`walletIds`, `proxies`) +- **pending-transactions zero-check** at bootstrap for each wallet type — catches stale state from a previous incomplete run before the ring transfer begins +- **public wallet lookup** (`lookupMultisigWallet`) — smoke-tests the unauthenticated on-chain metadata lookup endpoint +- route health checks (`freeUtxos`, `nativeScript`) — `nativeScript` now asserts a `payment` script entry is present and, when the root type is `atLeast`, that `required` matches `CI_NUM_REQUIRED_SIGNERS` +- **wallet creation via API** (`createWallet`) — creates a wallet through the bot-authenticated API path and confirms it appears in `walletIds`; runs early to avoid prior default-bot smoke checks consuming the shared bot rate-limit budget +- bot identity (`botAuth` explicit response shape, `botMe`) +- auth-plane checks (`getNonce`, `authSigner`) +- explicit auth negative checks (`walletIds`, `addTransaction`, `pendingTransactions`, `drepInfo`, `stakeAccountInfo`, `createWallet`) — `drepInfo`/`stakeAccountInfo`/`createWallet` check for missing token (401); `walletIds`/`addTransaction` check for address mismatch (403); `pendingTransactions` checks for missing token (401) +- proxy smoke checks (`proxies`, malformed proxy mutating routes) plus full proxy lifecycle coverage (`proxySetup`, `proxySpend`, proxy DRep register/deregister, optional proxy vote, cleanup, finalization) +- **`signTransaction` input validation** — asserts a non-existent `transactionId` returns 404, not 500 (requires `CI_MNEMONIC_2`; step is non-critical and skips gracefully if the env var is absent) +- datum route coverage (`submitDatum`) +- governance routes (`governanceActiveProposals`, `botBallotsUpsert`) +- **DRep certificate registration and retirement** (`botDRepCertificate`) — legacy and SDK wallets +- **stake certificate registration and deregistration** (`botStakeCertificate`) — SDK wallet only +- real multisig-wallet ring transfer + sign path +- pending lifecycle assertions for ring transfer txs only +- final state assertions after transfer/sign progression + +### Proxy bot scenarios + +`scenario.proxy-smoke` runs by default and performs authenticated `proxies` read checks plus negative validation checks that should fail before chain mutation. + +`scenario.proxy-full-lifecycle` runs by default in PR smoke for `legacy`, `hierarchical`, and `sdk` wallets when present. The hierarchical coverage reuses the wallet already created for route-chain context and the ring transfer; it does not add a new bootstrap wallet path. It starts each eligible wallet type with three pre-hygiene steps before normal setup: chain recovery reconstructs missing `Proxy` rows from proxy auth tokens still visible at the current CI wallet address, row adoption reattaches valid rows from historical deterministic CI wallets, and hygiene cleans any active rows before the new lifecycle begins. It then runs UTxO shaping and a funding preflight that fetches fresh `freeUtxos`. The hardcoded lifecycle budget is 536 ADA per eligible wallet: 505 ADA DRep registration, 10 ADA initial proxy funding, 1 ADA planned proxy spend, and a 20 ADA fee buffer. Because collateral is reserved outside selected spend inputs, the practical minimum post-shape layout is at least 536 ADA selectable at the multisig wallet address plus a separate ADA-only bot payment-address collateral UTxO. The self-split path needs enough total ADA to leave that 536 ADA selectable budget, create a 6 ADA collateral output, and cover a 2 ADA self-split fee buffer. Adding hierarchical means default PR smoke needs that budget available for one more wallet. Proxy DRep registration uses `CI_DREP_ANCHOR_URL` as the on-chain anchor URL and sends an inline route-chain `anchorJson`; it does not use `CI_DREP_ANCHOR_JSON`. + +The first full-lifecycle steps for each eligible wallet type are ordered as: + +1. `v1.proxy.full.recoverFromChain.` +2. `v1.proxy.full.adoptOrphans.` +3. `v1.proxy.full.hygiene.` +4. `v1.proxy.full.utxoShape.` +5. `v1.proxy.full.preflight.` + +Chain recovery is CI-only and evidence-based. It scans non-lovelace assets at the current bootstrap `walletAddress`, asks Blockfrost for each asset's mint transaction, tests the mint transaction inputs as candidate `paramUtxo` values with `deriveProxyScripts`, and only creates or reactivates a `Proxy` row when the derived `authTokenId` exactly matches the observed asset unit. This handles clean-database rebuilds where old proxy auth tokens and proxy DReps remain on-chain but the app has no `Proxy` rows. It cannot recover a proxy if the auth token is no longer discoverable at the current CI wallet address. + +When preflight passes, each eligible wallet lifecycle creates its own proxy, finalizes the confirmed setup, exercises proxy spend, proxy DRep register/deregister, optional proxy voting when active governance proposals exist, then runs safe cleanup and asserts the proxy no longer appears in `GET /api/v1/proxies`. Proxy actions always use bot payment-address collateral that is distinct from selected wallet spend inputs; DRep registration selects an auth-token input plus additional wallet inputs when needed to meet the registration budget. The proposer/collateral owner is signer index 0 (`CI_MNEMONIC_1`), and signer index 1 (`CI_MNEMONIC_2`) broadcasts for the default threshold-2 proxy actions. After each broadcasted proxy action, the route-chain waits for the selected wallet inputs to disappear from fresh `freeUtxos` before proposing the next action. Cleanup may require two submitted transactions: a sweep transaction that empties the proxy address while preserving an auth token, followed by a burn transaction and cleanup finalization. If the initial cleanup call already returns a burn transaction, the optional burn proposal is skipped after that transaction is signed. Because this scenario runs on every PR, the default CI legacy, hierarchical, and SDK wallets must stay funded; one-UTxO shape problems are repaired by the self-split step, while true budget failures still fail the route-chain rather than skipping proxy lifecycle coverage. + +Runtime expectation: `scenario.proxy-smoke` is the quick, non-mutating proxy subset. `scenario.proxy-full-lifecycle` is a real-chain scenario with multiple broadcasts per eligible wallet and can dominate default PR smoke duration during slow preprod/Blockfrost periods. The GitHub Actions job timeout is intentionally higher than the nominal happy path to leave room for confirmation polling. + +For each tested wallet type, the `nativeScript` step stores decoded script payloads in step artifacts (`artifacts.nativeScripts`) and the list of script entry types (`artifacts.scriptTypes`) inside `ci-route-chain-report.md`, so script structure is visible during CI triage. + +Signing is expected to be on, and broadcast is expected to be on, for normal CI route-chain runs. + +### Ring transfer + +Current transfer/sign chain in the route manifest runs a deterministic ring across multisig wallet addresses: + +- `legacy.walletAddress -> hierarchical.walletAddress` +- `hierarchical.walletAddress -> sdk.walletAddress` +- `sdk.walletAddress -> legacy.walletAddress` + +Each ring leg uses the same `CI_TRANSFER_LOVELACE` amount, so balances remain close after one cycle (differences are fee-driven). + +Real transfer construction is script-native: + +- route-chain spends UTxOs from the source multisig wallet script address +- destination is the next multisig wallet script address in the ring +- change returns to the source multisig wallet script address +- signer mnemonics are used for witness collection/signing, not as transfer funding inputs + +For each ring leg, signing runs two signer rounds: + +- signer index 1 (`CI_MNEMONIC_2`) signs with broadcast disabled +- signer index 2 (`CI_MNEMONIC_3`) signs with broadcast enabled + +Each leg is asserted as pending immediately after `addTransaction`, then asserted removed after signer 2 broadcast. + +### Create-wallet scenario (`scenario.create-wallet`) + +Runs after the early discovery and ADA route-health checks, before request-heavy default-bot scenarios. This keeps the app's rate-limit behavior intact while avoiding earlier smoke checks consuming the shared bot rate-limit budget before the positive wallet creation assertion. Requires `multisig:create` scope on the CI bot (provisioned by default). + +**Step 1** — calls `POST /api/v1/createWallet` with the CI signer addresses and the `CI_NUM_REQUIRED_SIGNERS` threshold. Asserts the response is 201 with a `walletId` and `address`. + +**Step 2** — calls `GET /api/v1/walletIds` for the bot and asserts the new `walletId` is present. This confirms the bot's cosigner access was set correctly during wallet creation. + +**Step 3 (cleanup, non-critical)** — deletes the test wallet directly via Prisma (`WalletBotAccess` rows first, then the `Wallet` row). Marked non-critical so a cleanup failure does not fail the scenario. If cleanup is skipped (e.g. step 1 failed), no orphan wallet is left behind. + +### DRep certificate scenarios (`scenario.drep-certificates`) + +Runs when both `legacy` and `sdk` wallets are in context. Requires `CI_DREP_ANCHOR_URL`. + +For each wallet type the scenario runs a pre-hygiene step followed by two sequential phases — register then retire — leaving the wallet in its pre-test DRep state: + +**Pre-hygiene step** — checks on-chain DRep state via `GET /api/v1/drepInfo`. If the DRep is already registered (e.g. from a previous incomplete run), it proposes a `retire` tx, signs with both signers, and waits for on-chain confirmation. If the broadcast is rejected with `DRepNotRegistered` or similar errors, the credential is treated as already clean (stale Blockfrost cache false-positive) and the step succeeds silently. + +**Main test phases:** + +1. Fetch free UTxOs from the wallet, call `POST /api/v1/botDRepCertificate` with `action: "register"`, `anchorUrl`, and `anchorJson` (the parsed JSON from `CI_DREP_ANCHOR_JSON`). The API computes the anchor data hash server-side from `anchorJson` — no outbound fetch anywhere. +2. Assert the transaction appears in pending. +3. Signer 1 (`CI_MNEMONIC_2`, index 1) adds a payment-key witness, no broadcast. +4. Signer 2 (`CI_MNEMONIC_3`, index 2) adds a payment-key witness and broadcasts. +5. Assert the transaction is cleared from pending. +6. Poll `freeUtxos?fresh=true` until the spent inputs are no longer unspent on-chain (confirms block inclusion before the next phase). Up to 30 retries × 8 s = 4 minutes. +7. Repeat steps 1–6 with `action: "retire"`. + +**Why payment-key witnesses are sufficient for DRep cert:** + +- **Legacy wallet:** the DRep credential script is the same as the payment script (no separate DRep keys), so the same payment vkeys satisfy both the spending inputs and the DRep certificate. +- **SDK wallet:** the CI bootstrap sets `signersDRepKeys = paymentKeyHashes`, so the DRep certificate script is also built from payment key hashes. Payment vkeys satisfy both scripts. + +### Stake certificate scenarios (`scenario.stake-certificates`) + +Runs when the `sdk` wallet is in context. Does not require `CI_DREP_ANCHOR_URL`. **`CI_STAKE_POOL_ID_HEX` is required** — it is passed as `poolId` in the `register_and_delegate` body. + +The scenario runs three phases: + +**Pre-hygiene step** — before the main test, checks on-chain state via `GET /api/v1/stakeAccountInfo`. If the stake credential is already registered (e.g. from a previous incomplete run), it proposes a `deregister` tx, signs with both signers, and waits for on-chain confirmation. If the broadcast is rejected with `StakeKeyNotRegisteredDELEG` or similar errors, the credential is treated as already clean (stale Blockfrost cache false-positive) and the step succeeds silently. + +**Main test: `register_and_delegate`** — uses `register_and_delegate` rather than bare `register` because production `stakingCertificates.ts` includes `.certificateScript()` on the register cert. In Conway era a bare register cert with a script witness causes `ExtraneousScriptWitnessesUTXOW`; `register_and_delegate` avoids this because the delegate cert legitimately requires the same staking script. Each phase follows 6 steps (propose → pending → sign1 → sign2+broadcast → cleared → on-chain confirmation poll). + +**Main test: `deregister`** — restores the wallet to its pre-test staking state. Same 6-step flow. + +Each signing step uses **`runStakeCertSigningFlow`** (`scenarios/flows/certificateSigningFlow.ts`) instead of the standard `runSigningFlow`, because the staking certificate script uses **stake key hashes** (role-2 keys) rather than payment key hashes: + +1. `MeshWallet.signTx(txCbor, true)` produces both a payment vkey witness and a stake vkey witness. +2. The flow extracts the payment vkey (matched by `resolvePaymentKeyHash(signerAddress)`) and the stake vkey (matched by `resolveStakeKeyHash(ctx.signerStakeAddresses[signerIndex])`). If the stake vkey cannot be found by key-hash search, the flow falls back to BIP32 derivation at path `m/1852'/1815'/0'/2/0` and signs the tx hash manually. +3. Both are submitted in a **single** `POST /api/v1/signTransaction` call via the optional `stakeKey` / `stakeSignature` body fields — this avoids hitting the "address already signed" guard that would block a second call from the same signer. + +`signTransaction` validates the stake witness by checking that its key hash is present in `wallet.signersStakeKeys` (resolved to key hashes). The stake witness is merged into the transaction CBOR alongside the payment witness before the broadcast threshold check runs. + +## Environment and secrets + +Primary variables (in workflow/compose): + +- `CI_JWT_SECRET` +- `CI_MNEMONIC_1`, `CI_MNEMONIC_2`, `CI_MNEMONIC_3` +- `CI_BLOCKFROST_PREPROD_API_KEY` +- `CI_NETWORK_ID` +- `CI_WALLET_TYPES` +- `CI_NUM_REQUIRED_SIGNERS` (default `2`): minimum signature threshold written into each created wallet's native script. Passed as `requiredSigners` during bootstrap. Also used by the `nativeScript` step to assert that the decoded `atLeast` script's `required` count matches, and by `scenario.create-wallet` as the `numRequiredSigners` parameter. +- `CI_SIGN_WALLET_TYPE` (default `legacy`): which wallet type is used when `runSigningFlow` resolves a wallet for signing in ring-transfer steps. Overridden per leg in transfer scenarios. +- `SIGN_BROADCAST` +- `CI_ROUTE_SCENARIOS` (optional scenario id filter) +- `CI_TRANSFER_LOVELACE` (optional transfer amount) +- `CI_DREP_ANCHOR_URL` (required by the default run for `scenario.drep-certificates` and `scenario.proxy-full-lifecycle`): the URL string stored in the on-chain anchor — passed as-is to the API, never fetched. +- `CI_DREP_ANCHOR_JSON` (required by the default run for `scenario.drep-certificates`): the raw JSON content of the CIP-119 DRep metadata document. Parsed and sent as `anchorJson`; the API computes the anchor data hash server-side — no outbound fetch anywhere. Both vars are forwarded into the `ci-runner` container via `docker-compose.ci.yml`. +- `CI_STAKE_POOL_ID_HEX` (**required** for `scenario.stake-certificates`): hex stake pool id stored in bootstrap context and used as `poolId` in the `register_and_delegate` certificate body. +- `CI_HTTP_RETRIES` (default `6`), `CI_HTTP_RETRY_DELAY_MS` (default `1000`), `CI_HTTP_MAX_RETRY_DELAY_MS` (default `30000`): route-chain API retry controls for transient responses (`429`, `418`, and selected `5xx`). Defaults are long enough to ride out the app's 60-second in-process rate-limit window without changing app behavior. + +Validation notes: + +- Route-chain transfer scenarios are preprod-only; `CI_NETWORK_ID` must be `0`. +- Signer/bot/wallet addresses used in context must all be testnet-form (`addr_test` / `stake_test`). +- `CI_WALLET_TYPES` must contain only `legacy`, `hierarchical`, `sdk`; invalid values fail fast. +- The default full route-chain (including ring transfer scenario) requires all three wallet types (`legacy`, `hierarchical`, `sdk`) to be present. +- `CI_ROUTE_SCENARIOS` values must exist in `scenarios/manifest.ts`; unknown ids fail fast. +- `CI_MNEMONIC_1`, `CI_MNEMONIC_2`, and `CI_MNEMONIC_3` must derive signer addresses from bootstrap context for multi-signer route-chain signing. Signer indexes are zero-based relative to `wallet.signerAddresses`. +- `CI_STAKE_POOL_ID_HEX` must be set when running `scenario.stake-certificates`; the scenario throws at proposal time if `ctx.stakePoolIdHex` is absent. +- Proxy full lifecycle runs by default for legacy, hierarchical, and SDK wallets when present. Before new proxy setup, route-chain first recovers any chain-discoverable proxy rows, adopts historical rows for the same deterministic wallet script, and runs hygiene so stale proxy DReps/auth tokens are cleaned centrally. Those CI wallets must each have enough selectable multisig-wallet ADA for initial proxy funding, the planned proxy spend, DRep registration, and fee headroom, plus an ADA-only collateral UTxO at `bot.paymentAddress`. If total ADA is sufficient but the UTxO shape is not, route-chain self-splits it before proxy preflight by creating a 6 ADA collateral output at `bot.paymentAddress`. The proxy collateral is selected from `bot.paymentAddress`, which is signer index 0 in the bootstrap wallet context. +- Source multisig wallet script addresses must be funded on preprod for each ring leg (`legacy -> hierarchical -> sdk -> legacy`). +- `CI_JWT_SECRET` must remain the same between bootstrap and route-chain, because bot auth secrets are deterministically derived from it. +- CI bot keys are provisioned with scopes: `multisig:create`, `multisig:read`, `multisig:sign`, `governance:read`, `ballot:write`. + +## Bootstrap context schema + +`cli/bootstrap.ts` writes **`schemaVersion`: `3`** only; route-chain rejects any other version. There are no persisted runtime secrets. + +- `wallets[]`: `{ type, walletId, walletAddress, signerAddresses }` (no seeded `transactionId`) +- `bots[]`: `{ id, paymentAddress, botKeyId, botId }` +- `defaultBotId`: primary bot used for discovery/freeUtxos assertions +- `signerStakeAddresses[]`: stake (`stake_test` / `stake1`) addresses aligned with `signerAddresses` (derived from each signer’s payment address). +- `sdkStakeAddress` (optional): multisig reward address for the CI SDK wallet (same derivation as `MultisigWallet.getStakeAddress()`); omitted if `CI_WALLET_TYPES` did not include `sdk`. +- `stakePoolIdHex` (optional): copied from `CI_STAKE_POOL_ID_HEX` when set. + +### Native scripts and wallet types + +Cardano “native scripts” here are `sig` / `all` / `any` / `atLeast` trees ([`MultisigWallet`](src/utils/multisigSDK.ts)). + +- **Staking (SDK multisig):** UTxOs are witnessed with the **payment** script; stake registration / delegation / deregistration certificates use **`certificateScript`** with the **staking** script (`buildScript(2)` / role `2` keys). Bootstrap always attaches role-2 stake keys for the SDK wallet. Because the staking script uses **stake key hashes** (distinct from payment key hashes), `signTransaction` accepts an optional `stakeKey` / `stakeSignature` pair validated against `wallet.signersStakeKeys`. +- **DRep registration / voting:** **Legacy** wallets use a **single** script (payment-only) for both spending and DRep identity. **SDK** wallets with DRep keys use the **payment** script for inputs and a **DRep** script (`buildScript(3)`) for DRep certificates. In the CI bootstrap `signersDRepKeys` is set to the payment key hashes, so standard payment-key witnesses satisfy the DRep certificate script without any additional witness type. + +Security guarantees: + +- The context file does not store bot JWT tokens. +- The context file does not store bot secrets. +- Route steps authenticate bots on demand at runtime. +- `docker-compose.ci.yml` removes the context file after route-chain execution. +- Failure log upload applies token/secret/mnemonic/private-key redaction filters. + +Limitation: + +- If application code logs sensitive values directly, redaction can miss uncommon formats. +- Treat uploaded logs as diagnostic artifacts, not as guaranteed zero-leak outputs. + +Logging policy (required for contributors): + +- It is acceptable to log non-sensitive diagnostics: wallet IDs, transaction hashes, key hashes, and testnet addresses. +- Never log raw secrets: mnemonics, private keys/signing keys, bot auth secrets, bearer tokens, or API keys. +- Redaction is best-effort safety net; route steps and helpers must avoid printing sensitive raw values in the first place. + +Safe-to-print checklist for new route/scenario code: + +- Safe: `walletId`, `transactionId`/tx hash, `paymentAddress`/`stakeAddress` (testnet), `keyHash`, scenario ids/status. +- Forbidden: any `CI_MNEMONIC_*` value, any `xprv*`/`ed25519*_sk*` material, `Authorization` headers, `secret`/`token` payload fields. + +## Report format + +`ci-route-chain-report.md` is a Markdown file structured for human triage. It contains: + +1. **Run header** — overall status, timestamp, duration, network, wallet types. +2. **Wallet balances table** — UTxO count and ADA balance per wallet type at run end. Native asset counts noted when present. +3. **Scenario summary table** — pass/fail, step pass rate, and duration per scenario. +4. **Step detail sections** — one subsection per scenario with a step table (step ID, duration, result message). Failed steps include their error and artifacts as code blocks. Passing step artifacts are intentionally omitted from Markdown, so use the step message and rerun targeted scenarios when detailed recovery diagnostics are needed. + +Balance source: direct on-chain UTxO lookup per wallet address from bootstrap context (includes UTxOs referenced by pending transactions). Lovelace values shown as ADA (2 d.p.). If balance collection fails, a warning line replaces the table. + +## Proxy Full Lifecycle UTxO Shaping + +`scenario.proxy-full-lifecycle` needs a wallet script UTxO for proxy setup/spend and a separate key-address collateral UTxO at `bot.paymentAddress` for each eligible wallet type (`legacy`, `hierarchical`, `sdk`). When a funded wallet has enough ADA but lacks the required wallet/key UTxO shape, the route-chain now performs an idempotent self-split before the proxy preflight: + +- If fresh `freeUtxos` plus fresh `bot.paymentAddress` UTxOs already satisfy the lifecycle budget and key collateral shape, the shaping step is a no-op. +- If wallet ADA is sufficient but the shape is not, the step submits a real preprod self-split through `/api/v1/addTransaction`, creating a 6 ADA collateral output at `bot.paymentAddress` and returning the rest as change to the wallet script address. The split requires the 536 ADA lifecycle budget plus the 6 ADA collateral output and a 2 ADA self-split fee buffer. +- The self-split is signed by signer 1 and signer 2 using the existing `CI_MNEMONIC_2` / `CI_MNEMONIC_3` route-chain signing path, then waits for the original inputs to disappear from fresh `freeUtxos`. +- Server-built proxy transactions are persisted with no initial signed addresses. Because key-address collateral lives at `bot.paymentAddress`, proxy setup and action transactions first add signer index 0 (`CI_MNEMONIC_1`) as a real collateral witness, then signer index 1 (`CI_MNEMONIC_2`) broadcasts for the default threshold-2 wallet. +- Manual funding is still required when the wallet does not have enough total ADA for the proxy lifecycle budget plus the 6 ADA collateral output and fee buffer. + +Because the self-split is an on-chain transaction, it can add one confirmation wait per wallet type, but only when the current UTxO shape needs repair. + +## How to contribute + +### Add a new route step + +1. Copy `scenarios/steps/template-route-step.ts` into a new step module under `scenarios/steps/`. +2. Set a stable `id` and route-specific `description`. +3. Implement deterministic inputs from context/env. +4. Call route(s) via `requestJson`. +5. Add strict assertions and concise artifacts for failure triage. +6. Register the step in `scenarios/manifest.ts`. + +### Add a new scenario + +1. Build a scenario factory in `scenarios/manifest.ts`. +2. Keep ordering intentional (upstream dependencies first). +3. Mark step severity correctly: + - `critical`: stop scenario/chain on failure. + - `non-critical`: continue and report. +4. Ensure artifacts are small but diagnostic. + +### Keep things maintainable + +- Do not overload bootstrap with route-specific behavior. +- Prefer reusable helpers in `framework/` or `scenarios/flows/`. +- Keep step ids stable (helps CI history and triage). +- Avoid hidden randomness in assertions; use deterministic checks. +- For governance scenarios, derive proposal lists via `framework/governance.ts` so payload shape and proposal selection remain deterministic across step reruns. + +## Local execution (PowerShell, CI-like) + +From repo root: + +- `C:\Users\andru\Documents\GitHub\multisig` + +Set required CI variables in your current shell: + +```powershell +$env:CI_JWT_SECRET="..." +$env:CI_MNEMONIC_1="..." +$env:CI_MNEMONIC_2="..." +$env:CI_MNEMONIC_3="..." +$env:CI_BLOCKFROST_PREPROD_API_KEY="..." +$env:CI_NETWORK_ID="0" +$env:CI_WALLET_TYPES="legacy,hierarchical,sdk" +$env:CI_TRANSFER_LOVELACE="2000000" +$env:SIGN_BROADCAST="true" +$env:CI_DREP_ANCHOR_URL="https://..." # required for the default full flow; stored as on-chain anchor URL, never fetched +$env:CI_STAKE_POOL_ID_HEX="..." # required for the default full flow (scenario.stake-certificates) +``` + +`CI_DREP_ANCHOR_JSON` contains the full CIP-119 JSON document and must be set separately using a PowerShell here-string so the double quotes are preserved: + +```powershell +$env:CI_DREP_ANCHOR_JSON = @' +{ + "@context": { + "CIP100": "https://github.com/cardano-foundation/CIPs/blob/master/CIP-0100/README.md#", + "CIP119": "https://github.com/cardano-foundation/CIPs/blob/master/CIP-0119/README.md#", + ... + }, + "hashAlgorithm": "blake2b-256", + "body": { ... } +} +'@ +``` + +In GitHub Actions, store the full JSON as a repository secret — the runner injects it verbatim, no quoting required. + +Optional (recommended for full flow): + +```powershell +Remove-Item Env:CI_ROUTE_SCENARIOS -ErrorAction SilentlyContinue +$env:CI_ROUTE_SCENARIOS="" +``` + +Start a clean CI-like stack: + +If you changed local code or Dockerfiles, rebuild `app` and `ci-runner`; otherwise you can skip the `build` command for faster reruns. + +```powershell +docker compose -f docker-compose.ci.yml down -v +docker compose -f docker-compose.ci.yml build app ci-runner +docker compose -f docker-compose.ci.yml up -d postgres app +``` + +Bootstrap wallets and write host-mounted artifacts: + +```powershell +docker compose -f docker-compose.ci.yml run --rm ` + -e CI_CONTEXT_PATH=/artifacts/ci-wallet-context.json ` + ci-runner npx --yes tsx scripts/ci/cli/bootstrap.ts +``` + +Optional: confirm wallets are funded on-chain before running route-chain (uses `CI_CONTEXT_PATH` and `CI_BLOCKFROST_PREPROD_API_KEY`; same total-balance semantics as `walletBalanceSummary` in the route-chain report). Flags: `--json` (machine-readable summary only), `--strict` (exit with status 1 if balance collection fails). + +```powershell +docker compose -f docker-compose.ci.yml run --rm ` + -e CI_CONTEXT_PATH=/artifacts/ci-wallet-context.json ` + ci-runner npx --yes tsx scripts/ci/cli/wallet-status.ts +``` + +Run route-chain smoke scenarios: + +```powershell +docker compose -f docker-compose.ci.yml run --rm ` + -e CI_CONTEXT_PATH=/artifacts/ci-wallet-context.json ` + -e CI_ROUTE_CHAIN_REPORT_PATH=/artifacts/ci-route-chain-report.md ` + ci-runner npx --yes tsx scripts/ci/cli/route-chain.ts + +``` + +View generated report on host: + +```powershell +Get-Content ".\ci-artifacts\ci-route-chain-report.md" +``` + +## Local execution (Linux/Bash, CI-like) + +From repo root: + +- `/path/to/multisig` + +Set required CI variables in your current shell: + +```bash +export CI_JWT_SECRET="..." +export CI_MNEMONIC_1="..." +export CI_MNEMONIC_2="..." +export CI_MNEMONIC_3="..." +export CI_BLOCKFROST_PREPROD_API_KEY="..." +export CI_NETWORK_ID="0" +export CI_WALLET_TYPES="legacy,hierarchical,sdk" +export CI_TRANSFER_LOVELACE="2000000" +export SIGN_BROADCAST="true" +export CI_DREP_ANCHOR_URL="https://..." # required for the default full flow; stored as on-chain anchor URL, never fetched +export CI_STAKE_POOL_ID_HEX="..." # required for the default full flow (scenario.stake-certificates) +``` + +`CI_DREP_ANCHOR_JSON` contains the full CIP-119 JSON document and must be set separately using a heredoc so the double quotes are preserved: + +```bash +export CI_DREP_ANCHOR_JSON=$(cat <<'EOF' +{ + "@context": { + "CIP100": "https://github.com/cardano-foundation/CIPs/blob/master/CIP-0100/README.md#", + "CIP119": "https://github.com/cardano-foundation/CIPs/blob/master/CIP-0119/README.md#", + ... + }, + "hashAlgorithm": "blake2b-256", + "body": { ... } +} +EOF +) +``` + +In GitHub Actions, store the full JSON as a repository secret — the runner injects it verbatim, no quoting required. + +Optional (recommended for full flow): + +```bash +unset CI_ROUTE_SCENARIOS +export CI_ROUTE_SCENARIOS="" +``` + +Start a clean CI-like stack: + +If you changed local code or Dockerfiles, rebuild `app` and `ci-runner`; otherwise you can skip the `build` command for faster reruns. + +```bash +docker compose -f docker-compose.ci.yml down -v +docker compose -f docker-compose.ci.yml build app ci-runner +docker compose -f docker-compose.ci.yml up -d postgres app +``` + +Bootstrap wallets and write host-mounted artifacts: + +```bash +docker compose -f docker-compose.ci.yml run --rm \ + -e CI_CONTEXT_PATH=/artifacts/ci-wallet-context.json \ + ci-runner npx --yes tsx scripts/ci/cli/bootstrap.ts +``` + +Optional: confirm wallets are funded on-chain before running route-chain (uses `CI_CONTEXT_PATH` and `CI_BLOCKFROST_PREPROD_API_KEY`; same total-balance semantics as `walletBalanceSummary` in the route-chain report). Flags: `--json` (machine-readable summary only), `--strict` (exit with status 1 if balance collection fails). + +```bash +docker compose -f docker-compose.ci.yml run --rm \ + -e CI_CONTEXT_PATH=/artifacts/ci-wallet-context.json \ + ci-runner npx --yes tsx scripts/ci/cli/wallet-status.ts +``` + +Run route-chain smoke scenarios: + +```bash +docker compose -f docker-compose.ci.yml run --rm \ + -e CI_CONTEXT_PATH=/artifacts/ci-wallet-context.json \ + -e CI_ROUTE_CHAIN_REPORT_PATH=/artifacts/ci-route-chain-report.md \ + ci-runner npx --yes tsx scripts/ci/cli/route-chain.ts +``` + +View generated report on host: + +```bash +cat ./ci-artifacts/ci-route-chain-report.md +``` diff --git a/scripts/ci/cli/bootstrap.ts b/scripts/ci/cli/bootstrap.ts new file mode 100644 index 00000000..0138922b --- /dev/null +++ b/scripts/ci/cli/bootstrap.ts @@ -0,0 +1,326 @@ +import { BotWalletRole, PrismaClient } from "@prisma/client"; +import { stringifyRedacted } from "../framework/redact"; +import { requireEnv, parseWalletTypesEnv } from "../framework/env"; +import { parseMnemonic } from "../framework/mnemonic"; +import { deriveCiBotSecret } from "../framework/botAuth"; +import { hashBotSecret } from "../framework/botProvision"; +import { + deserializeAddress, + resolvePaymentKeyHash, + resolveStakeKeyHash, + serializeRewardAddress, +} from "@meshsdk/core"; +import { MultisigWallet, type MultisigKey } from "../../../src/utils/multisigSDK"; + +const prisma = new PrismaClient(); + +type CIWalletType = "legacy" | "hierarchical" | "sdk"; + +type PaymentNativeScript = + | { type: "sig"; keyHash: string } + | { type: "all"; scripts: PaymentNativeScript[] } + | { type: "any"; scripts: PaymentNativeScript[] } + | { type: "atLeast"; required: number; scripts: PaymentNativeScript[] }; + +type CIBotBootstrap = { + id: string; + paymentAddress: string; + botKeyId: string; + botId: string; +}; + +function stakeAddressFromPaymentAddress(paymentAddress: string): string { + const stakeHash = deserializeAddress(paymentAddress).stakeCredentialHash; + if (!stakeHash) { + throw new Error("Expected stake credential on payment address for CI signer"); + } + const network = paymentAddress.includes("test") ? 0 : 1; + const stake = serializeRewardAddress(stakeHash, false, network); + if (!stake) { + throw new Error("Could not serialize stake address from payment address"); + } + return stake; +} + +function buildSdkMultisigStakeAddress(args: { + signersAddresses: string[]; + signerStakeAddresses: string[]; + signerDescriptions: string[]; + paymentKeyHashes: string[]; + numRequiredSigners: number; + networkId: 0 | 1; +}): string { + const keys: MultisigKey[] = []; + for (let i = 0; i < args.signersAddresses.length; i++) { + const addr = args.signersAddresses[i]; + if (!addr) continue; + keys.push({ + keyHash: resolvePaymentKeyHash(addr), + role: 0, + name: args.signerDescriptions[i] ?? "", + }); + } + for (let i = 0; i < args.signerStakeAddresses.length; i++) { + const sk = args.signerStakeAddresses[i]; + if (!sk) continue; + keys.push({ + keyHash: resolveStakeKeyHash(sk), + role: 2, + name: args.signerDescriptions[i] ?? "", + }); + } + for (let i = 0; i < args.paymentKeyHashes.length; i++) { + const drep = args.paymentKeyHashes[i]; + if (!drep) continue; + keys.push({ keyHash: drep, role: 3, name: args.signerDescriptions[i] ?? "" }); + } + const wallet = new MultisigWallet( + "ci-sdk-preview", + keys, + "", + args.numRequiredSigners, + args.networkId, + undefined, + "atLeast", + ); + if (!wallet.stakingEnabled()) { + throw new Error("CI SDK preview MultisigWallet: staking not enabled (check signer key roles)"); + } + const stakeAddr = wallet.getStakeAddress(); + if (!stakeAddr) { + throw new Error("CI SDK preview MultisigWallet: could not derive multisig stake address"); + } + return stakeAddr; +} + +async function deriveAddress(words: string[], networkId: 0 | 1): Promise { + const { MeshWallet } = await import("@meshsdk/core"); + const wallet = new MeshWallet({ + networkId, + key: { type: "mnemonic", words }, + }); + await wallet.init(); + return wallet.getChangeAddress(); +} + +async function main() { + const apiBaseUrl = (process.env.API_BASE_URL ?? "http://app:3000").trim().replace(/\/$/, ""); + const jwtSecret = requireEnv("CI_JWT_SECRET"); + const mnemonic1 = requireEnv("CI_MNEMONIC_1"); + const mnemonic2 = requireEnv("CI_MNEMONIC_2"); + const mnemonic3 = requireEnv("CI_MNEMONIC_3"); + const walletTypes = parseWalletTypesEnv( + process.env.CI_WALLET_TYPES ?? "legacy,hierarchical,sdk", + ); + const parsedNetworkId = Number(process.env.CI_NETWORK_ID ?? "0"); + const networkId: 0 | 1 = parsedNetworkId === 1 ? 1 : 0; + const requiredSigners = Math.max( + 1, + Number.isFinite(Number(process.env.CI_NUM_REQUIRED_SIGNERS ?? "2")) + ? Number(process.env.CI_NUM_REQUIRED_SIGNERS ?? "2") + : 2, + ); + const contextPath = process.env.CI_CONTEXT_PATH ?? "/tmp/ci-wallet-context.json"; + const stakePoolIdHex = + typeof process.env.CI_STAKE_POOL_ID_HEX === "string" && process.env.CI_STAKE_POOL_ID_HEX.trim() + ? process.env.CI_STAKE_POOL_ID_HEX.trim() + : undefined; + + const signerAddresses = await Promise.all([ + deriveAddress(parseMnemonic(mnemonic1), networkId), + deriveAddress(parseMnemonic(mnemonic2), networkId), + deriveAddress(parseMnemonic(mnemonic3), networkId), + ]); + + const signerStakeAddresses = signerAddresses.map((addr) => stakeAddressFromPaymentAddress(addr)); + + const signerBots: CIBotBootstrap[] = []; + const botAuthByAddress: Record = {}; + for (let i = 0; i < signerAddresses.length; i++) { + const paymentAddress = signerAddresses[i]; + if (!paymentAddress) { + throw new Error(`Missing signer address at index ${i}`); + } + const botSecret = deriveCiBotSecret(paymentAddress, jwtSecret); + const botKey = await prisma.botKey.create({ + data: { + ownerAddress: `ci-owner-${Date.now()}-${i}`, + name: `ci-bot-signer-${i}-${Date.now()}`, + keyHash: hashBotSecret(botSecret, jwtSecret), + scope: JSON.stringify([ + "multisig:create", + "multisig:read", + "multisig:sign", + "governance:read", + "ballot:write", + ]), + }, + }); + + const botAuthResponse = await fetch(`${apiBaseUrl}/api/v1/botAuth`, { + method: "POST", + headers: { "content-type": "application/json" }, + body: JSON.stringify({ + botKeyId: botKey.id, + secret: botSecret, + paymentAddress, + }), + }); + const botAuthBody = await botAuthResponse.json(); + if (!botAuthResponse.ok || !botAuthBody?.token || !botAuthBody?.botId) { + throw new Error( + `botAuth failed for signer index ${i} (${botAuthResponse.status}): ${stringifyRedacted(botAuthBody)}`, + ); + } + + signerBots.push({ + id: `signer${i}`, + paymentAddress, + botKeyId: botKey.id, + botId: botAuthBody.botId as string, + }); + botAuthByAddress[paymentAddress] = botAuthBody.token as string; + } + const primaryBot = signerBots[0]; + if (!primaryBot) { + throw new Error("No signer bots were provisioned"); + } + + const paymentKeyHashes = signerAddresses.map((addr) => resolvePaymentKeyHash(addr)); + + const signerDescriptions = ["CI Signer 1", "CI Signer 2", "CI Signer 3"]; + const numRequired = Math.min(requiredSigners, signerAddresses.length); + + const createdWallets: Array<{ + type: CIWalletType; + walletId: string; + walletAddress: string; + signerAddresses: string[]; + }> = []; + + let sdkStakeAddress: string | undefined; + + for (const walletType of walletTypes) { + const basePayload: Record = { + name: `CI ${walletType} Wallet ${Date.now()}`, + description: `CI ${walletType} wallet smoke test`, + signersAddresses: signerAddresses, + signersDescriptions: signerDescriptions, + numRequiredSigners: numRequired, + scriptType: "atLeast", + network: networkId, + }; + + if (walletType === "hierarchical") { + basePayload.scriptType = "all"; + basePayload.paymentNativeScript = { + type: "all", + scripts: [ + { + type: "atLeast", + required: Math.min(requiredSigners, paymentKeyHashes.length), + scripts: paymentKeyHashes.map((keyHash) => ({ type: "sig", keyHash })), + }, + ], + } satisfies PaymentNativeScript; + } + + if (walletType === "sdk") { + basePayload.signersDRepKeys = paymentKeyHashes; + basePayload.signersStakeKeys = signerStakeAddresses; + } + + const createWalletResponse = await fetch(`${apiBaseUrl}/api/v1/createWallet`, { + method: "POST", + headers: { + "content-type": "application/json", + authorization: `Bearer ${botAuthByAddress[primaryBot.paymentAddress]}`, + }, + body: JSON.stringify(basePayload), + }); + const createWalletBody = await createWalletResponse.json(); + if (!createWalletResponse.ok || !createWalletBody?.walletId) { + throw new Error( + `createWallet (${walletType}) failed (${createWalletResponse.status}): ${stringifyRedacted(createWalletBody)}`, + ); + } + + if (walletType === "sdk") { + sdkStakeAddress = buildSdkMultisigStakeAddress({ + signersAddresses: signerAddresses, + signerStakeAddresses, + signerDescriptions, + paymentKeyHashes, + numRequiredSigners: numRequired, + networkId, + }); + } + + for (const bot of signerBots.slice(1)) { + await prisma.walletBotAccess.upsert({ + where: { + walletId_botId: { + walletId: createWalletBody.walletId as string, + botId: bot.botId, + }, + }, + update: { + role: BotWalletRole.cosigner, + }, + create: { + walletId: createWalletBody.walletId as string, + botId: bot.botId, + role: BotWalletRole.cosigner, + }, + }); + } + + createdWallets.push({ + type: walletType, + walletId: createWalletBody.walletId as string, + walletAddress: createWalletBody.address as string, + signerAddresses, + }); + } + + await import("fs/promises").then((fs) => + fs.writeFile( + contextPath, + JSON.stringify( + { + schemaVersion: 3, + createdAt: new Date().toISOString(), + apiBaseUrl, + networkId, + walletTypes, + wallets: createdWallets, + bots: signerBots, + defaultBotId: primaryBot.id, + walletId: createdWallets[0]?.walletId, + walletAddress: createdWallets[0]?.walletAddress, + signerAddresses, + signerStakeAddresses, + sdkStakeAddress, + ...(stakePoolIdHex ? { stakePoolIdHex } : {}), + }, + null, + 2, + ), + "utf8", + ), + ); + + console.log( + `Created wallets: ${createdWallets.map((w) => `${w.type}:${w.walletId}`).join(", ")}`, + ); + console.log(`Saved CI context (schema 3) to ${contextPath}`); +} + +main() + .catch((error) => { + console.error("bootstrap failed:", error); + process.exit(1); + }) + .finally(async () => { + await prisma.$disconnect(); + }); diff --git a/scripts/ci/cli/inspect-context.ts b/scripts/ci/cli/inspect-context.ts new file mode 100644 index 00000000..9eeadad7 --- /dev/null +++ b/scripts/ci/cli/inspect-context.ts @@ -0,0 +1,48 @@ +import { loadBootstrapContext } from "../framework/context"; +import { getBotForAddress, getDefaultBot } from "../framework/botContext"; +import { requireEnv } from "../framework/env"; + +function maskMiddle(value: string): string { + if (value.length <= 12) { + return `${value.slice(0, 4)}...${value.slice(-2)}`; + } + return `${value.slice(0, 8)}...${value.slice(-8)}`; +} + +async function main() { + const contextPath = requireEnv("CI_CONTEXT_PATH", "/tmp/ci-wallet-context.json"); + const ctx = await loadBootstrapContext(contextPath); + const defaultBot = getDefaultBot(ctx); + + console.log(`Context file: ${contextPath}`); + console.log(`Schema version: ${ctx.schemaVersion}`); + console.log(`API base URL: ${ctx.apiBaseUrl}`); + console.log(`Network ID: ${ctx.networkId}`); + console.log(`Wallets: ${ctx.wallets.length}`); + console.log(`Bots: ${ctx.bots.length}`); + console.log(`Default bot: ${defaultBot.id} (${maskMiddle(defaultBot.paymentAddress)})`); + console.log( + `Signer stake addresses: ${ctx.signerStakeAddresses.map((a) => maskMiddle(a)).join(", ")}`, + ); + if (ctx.sdkStakeAddress) { + console.log(`SDK multisig reward address: ${maskMiddle(ctx.sdkStakeAddress)}`); + } + if (ctx.stakePoolIdHex) { + console.log(`Stake pool id (hex): ${maskMiddle(ctx.stakePoolIdHex)}`); + } + console.log(""); + + console.log("Signer to bot mapping:"); + for (const [walletIndex, wallet] of ctx.wallets.entries()) { + console.log(`- [${walletIndex}] ${wallet.type} wallet ${wallet.walletId}`); + wallet.signerAddresses.forEach((address, signerIndex) => { + const bot = getBotForAddress(ctx, address); + console.log(` signer[${signerIndex}] ${maskMiddle(address)} -> ${bot.id}`); + }); + } +} + +main().catch((error) => { + console.error("inspect-context failed:", error); + process.exit(1); +}); diff --git a/scripts/ci/cli/route-chain.ts b/scripts/ci/cli/route-chain.ts new file mode 100644 index 00000000..8179f979 --- /dev/null +++ b/scripts/ci/cli/route-chain.ts @@ -0,0 +1,58 @@ +import { loadBootstrapContext } from "../framework/context"; +import { runScenarios } from "../framework/runner"; +import { writeMarkdownReport } from "../framework/markdown"; +import { getScenarioManifest, ROUTE_SCENARIO_IDS } from "../scenarios/manifest"; +import { requireEnv, parseCommaList } from "../framework/env"; +import { assertPreprodContext } from "../framework/preprod"; + +async function main() { + const contextPath = requireEnv("CI_CONTEXT_PATH", "/tmp/ci-wallet-context.json"); + const reportPath = requireEnv("CI_ROUTE_CHAIN_REPORT_PATH", "/tmp/ci-route-chain-report.md"); + const context = await loadBootstrapContext(contextPath); + assertPreprodContext(context); + const requestedScenarioIds = parseCommaList(process.env.CI_ROUTE_SCENARIOS); + const allScenarioIds = new Set(ROUTE_SCENARIO_IDS); + const unknownScenarioIds = requestedScenarioIds.filter((id) => !allScenarioIds.has(id)); + if (unknownScenarioIds.length) { + throw new Error( + `Unknown scenario id(s) in CI_ROUTE_SCENARIOS: ${unknownScenarioIds.join(", ")}. Available: ${Array.from(allScenarioIds).join(", ")}`, + ); + } + const scenarios = getScenarioManifest(context, requestedScenarioIds); + + if (!scenarios.length) { + throw new Error( + requestedScenarioIds.length + ? `No route scenarios matched CI_ROUTE_SCENARIOS='${requestedScenarioIds.join(",")}'` + : "No route scenarios enabled in manifest", + ); + } + + const report = await runScenarios({ + scenarios, + ctx: context, + continueOnNonCriticalFailure: true, + }); + await writeMarkdownReport(report, reportPath); + + for (const scenario of report.scenarios) { + console.log(`[${scenario.status.toUpperCase()}] ${scenario.id}`); + for (const step of scenario.steps) { + if (step.status === "passed") { + console.log(` + ${step.id} (${step.durationMs}ms) - ${step.message}`); + } else { + console.log(` x ${step.id} (${step.durationMs}ms) - ${step.error ?? step.message}`); + } + } + } + console.log(`Route-chain report written to ${reportPath} (markdown)`); + + if (report.status !== "passed") { + throw new Error("Route-chain scenario run failed"); + } +} + +main().catch((error) => { + console.error("route-chain failed:", error); + process.exit(1); +}); diff --git a/scripts/ci/cli/wallet-status.ts b/scripts/ci/cli/wallet-status.ts new file mode 100644 index 00000000..e416a829 --- /dev/null +++ b/scripts/ci/cli/wallet-status.ts @@ -0,0 +1,78 @@ +import { loadBootstrapContext } from "../framework/context"; +import { requireEnv } from "../framework/env"; +import { assertPreprodContext } from "../framework/preprod"; +import { collectWalletBalanceSummary } from "../framework/walletBalances"; +import type { CIWalletBalanceEntry } from "../framework/types"; + +function parseArgs(argv: string[]): { json: boolean; strict: boolean } { + let json = false; + let strict = false; + for (const arg of argv) { + if (arg === "--json") { + json = true; + } else if (arg === "--strict") { + strict = true; + } + } + return { json, strict }; +} + +function lovelaceToAdaDisplay(lovelace: string): string { + const v = BigInt(lovelace); + const whole = v / 1_000_000n; + const frac = v % 1_000_000n; + if (frac === 0n) { + return whole.toString(); + } + const fracStr = frac.toString().padStart(6, "0").replace(/0+$/, ""); + return `${whole}.${fracStr}`; +} + +function printHumanTable( + wallets: { walletId: string; type: string }[], + getEntry: (walletId: string) => CIWalletBalanceEntry | undefined, +): void { + console.log("CI wallet balances (multisig script addresses, total on-chain UTxO sums)"); + console.log(""); + for (const w of wallets) { + const e = getEntry(w.walletId); + if (!e) { + console.log(`${w.type}\t${w.walletId}\t(no balance entry)`); + continue; + } + const ada = lovelaceToAdaDisplay(e.lovelace); + console.log(`${e.walletType}\t${e.walletId}`); + console.log(` address: ${e.walletAddress}`); + console.log(` utxos: ${e.utxoCount}\tlovelace: ${e.lovelace}\t(~${ada} ADA)`); + console.log(""); + } +} + +async function main() { + const { json, strict } = parseArgs(process.argv.slice(2)); + const contextPath = requireEnv("CI_CONTEXT_PATH", "/tmp/ci-wallet-context.json"); + const ctx = await loadBootstrapContext(contextPath); + assertPreprodContext(ctx); + + const summary = await collectWalletBalanceSummary(ctx); + + if (json) { + console.log(JSON.stringify(summary, null, 2)); + } else { + printHumanTable(ctx.wallets, (id) => summary.byWalletId[id]); + if (summary.error) { + console.error(`Wallet balance collection reported an error: ${summary.error}`); + } else { + console.log(`Captured at: ${summary.capturedAt} (networkId=${summary.networkId})`); + } + } + + if (summary.error && strict) { + process.exit(1); + } +} + +main().catch((error) => { + console.error("wallet-status failed:", error); + process.exit(1); +}); diff --git a/scripts/ci/framework/botAuth.ts b/scripts/ci/framework/botAuth.ts new file mode 100644 index 00000000..d872126f --- /dev/null +++ b/scripts/ci/framework/botAuth.ts @@ -0,0 +1,94 @@ +import { createHmac } from "crypto"; +import type { CIBootstrapContext, CIBotContext } from "./types"; +import { requestJson } from "./http"; + +type CachedBotToken = { + token: string; + expiresAtMs: number; +}; + +const botTokenCache = new Map(); +const BOT_AUTH_RETRY_DELAYS_MS = [250, 500, 1000] as const; + +export function requireCiJwtSecret(): string { + const value = process.env.CI_JWT_SECRET; + if (!value || !value.trim()) { + throw new Error("Missing required environment variable: CI_JWT_SECRET"); + } + return value.trim(); +} + +// Deterministic secret lets us re-auth bots without persisting secrets to disk. +export function deriveCiBotSecret(paymentAddress: string, jwtSecret: string): string { + return createHmac("sha256", jwtSecret) + .update(`ci-bot-secret:${paymentAddress}`, "utf8") + .digest("hex"); +} + +function sleep(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +function decodeJwtExpiryMs(token: string): number | null { + const payload = token.split(".")[1]; + if (!payload) return null; + try { + const decoded = JSON.parse(Buffer.from(payload, "base64url").toString("utf8")) as { exp?: unknown }; + if (typeof decoded.exp !== "number" || !Number.isFinite(decoded.exp)) return null; + return decoded.exp * 1000; + } catch { + return null; + } +} + +function getBotCacheKey(bot: CIBotContext): string { + return `${bot.id}:${bot.paymentAddress}`; +} + +export async function authenticateBot(args: { + ctx: CIBootstrapContext; + bot: CIBotContext; +}): Promise { + const cacheKey = getBotCacheKey(args.bot); + const now = Date.now(); + const cacheHit = botTokenCache.get(cacheKey); + if (cacheHit && cacheHit.expiresAtMs - now > 10_000) { + return cacheHit.token; + } + + const secret = deriveCiBotSecret(args.bot.paymentAddress, requireCiJwtSecret()); + let auth: { status: number; data: { token?: string; error?: string } } | null = null; + for (let attempt = 0; attempt <= BOT_AUTH_RETRY_DELAYS_MS.length; attempt++) { + auth = await requestJson<{ token?: string; error?: string }>({ + url: `${args.ctx.apiBaseUrl}/api/v1/botAuth`, + method: "POST", + body: { + botKeyId: args.bot.botKeyId, + secret, + paymentAddress: args.bot.paymentAddress, + }, + }); + if (auth.status !== 429) { + break; + } + if (attempt < BOT_AUTH_RETRY_DELAYS_MS.length) { + const retryDelayMs = BOT_AUTH_RETRY_DELAYS_MS[attempt]; + if (retryDelayMs !== undefined) { + await sleep(retryDelayMs); + } + } + } + + if (!auth || auth.status !== 200 || !auth.data?.token) { + const failedStatus = auth?.status ?? "no-response"; + throw new Error(`botAuth failed (${failedStatus})`); + } + + const expiresAtMs = decodeJwtExpiryMs(auth.data.token) ?? Date.now() + 55 * 60 * 1000; + botTokenCache.set(cacheKey, { + token: auth.data.token, + expiresAtMs, + }); + + return auth.data.token; +} diff --git a/scripts/ci/framework/botContext.ts b/scripts/ci/framework/botContext.ts new file mode 100644 index 00000000..902c9d1c --- /dev/null +++ b/scripts/ci/framework/botContext.ts @@ -0,0 +1,46 @@ +import type { CIBootstrapContext, CIBotContext, CIWalletContext } from "./types"; + +export function getDefaultBot(ctx: CIBootstrapContext): CIBotContext { + if (ctx.defaultBotId) { + const matched = ctx.bots.find((bot) => bot.id === ctx.defaultBotId); + if (matched) { + return matched; + } + } + + const fallback = ctx.bots[0]; + if (!fallback) { + throw new Error("Context has no bot credentials"); + } + return fallback; +} + +export function getBotForAddress( + ctx: CIBootstrapContext, + paymentAddress: string, +): CIBotContext { + const address = paymentAddress.trim(); + const matched = ctx.bots.find((bot) => bot.paymentAddress === address); + if (matched) { + return matched; + } + throw new Error(`No bot context found for paymentAddress ${address}`); +} + +export function getBotForSignerIndex(args: { + ctx: CIBootstrapContext; + wallet: CIWalletContext; + signerIndex: number; +}): { bot: CIBotContext; signerAddress: string } { + const signerAddress = args.wallet.signerAddresses[args.signerIndex]; + if (!signerAddress) { + throw new Error( + `Context is missing signerAddresses[${args.signerIndex}] for wallet ${args.wallet.walletId}`, + ); + } + + return { + bot: getBotForAddress(args.ctx, signerAddress), + signerAddress, + }; +} diff --git a/scripts/ci/framework/botProvision.ts b/scripts/ci/framework/botProvision.ts new file mode 100644 index 00000000..d7e1a042 --- /dev/null +++ b/scripts/ci/framework/botProvision.ts @@ -0,0 +1,5 @@ +import { createHmac } from "crypto"; + +export function hashBotSecret(secret: string, jwtSecret: string): string { + return createHmac("sha256", jwtSecret).update(secret, "utf8").digest("hex"); +} diff --git a/scripts/ci/framework/context.ts b/scripts/ci/framework/context.ts new file mode 100644 index 00000000..e975f37a --- /dev/null +++ b/scripts/ci/framework/context.ts @@ -0,0 +1,133 @@ +import { readFile } from "fs/promises"; +import type { CIBootstrapContext, CIBotContext, CIWalletType } from "./types"; + +function assertString(name: string, value: unknown): string { + if (typeof value !== "string" || !value.trim()) { + throw new Error(`Invalid context: ${name} must be a non-empty string`); + } + return value.trim(); +} + +function optionalString(value: unknown): string | undefined { + return typeof value === "string" && value.trim() ? value.trim() : undefined; +} + +function assertStringArray(name: string, value: unknown): string[] { + if (!Array.isArray(value) || value.length === 0) { + throw new Error(`Invalid context: ${name} must be a non-empty array`); + } + const normalized = value.map((item, idx) => { + if (typeof item !== "string" || !item.trim()) { + throw new Error(`Invalid context: ${name}[${idx}] must be a non-empty string`); + } + return item.trim(); + }); + return normalized; +} + +function normalizeWalletType(value: unknown): CIWalletType { + const v = typeof value === "string" ? value.trim().toLowerCase() : ""; + if (v === "legacy" || v === "hierarchical" || v === "sdk") return v; + throw new Error(`Invalid context: unsupported wallet type '${String(value)}'`); +} + +function normalizeBots(input: Record): { + bots: CIBotContext[]; + defaultBotId?: string; +} { + const botsRaw = input.bots; + if (!Array.isArray(botsRaw) || botsRaw.length === 0) { + throw new Error("Invalid context: bots must be a non-empty array"); + } + const bots = botsRaw.map((bot, idx) => { + if (!bot || typeof bot !== "object") { + throw new Error(`Invalid context: bots[${idx}] must be an object`); + } + const b = bot as Record; + return { + id: assertString(`bots[${idx}].id`, b.id), + paymentAddress: assertString(`bots[${idx}].paymentAddress`, b.paymentAddress), + botKeyId: assertString(`bots[${idx}].botKeyId`, b.botKeyId), + botId: typeof b.botId === "string" && b.botId.trim() ? b.botId.trim() : undefined, + } satisfies CIBotContext; + }); + + const defaultBotIdRaw = typeof input.defaultBotId === "string" ? input.defaultBotId.trim() : ""; + const defaultBotId = defaultBotIdRaw || bots[0]?.id; + return { bots, defaultBotId }; +} + +export function validateBootstrapContext(raw: unknown): CIBootstrapContext { + if (!raw || typeof raw !== "object") { + throw new Error("Invalid context: expected JSON object"); + } + + const input = raw as Record; + if (Number(input.schemaVersion) !== 3) { + throw new Error( + `Invalid context: unsupported schemaVersion '${String(input.schemaVersion)}' (expected 3)`, + ); + } + + const walletsRaw = input.wallets; + if (!Array.isArray(walletsRaw) || walletsRaw.length === 0) { + throw new Error("Invalid context: wallets must be a non-empty array"); + } + + const wallets = walletsRaw.map((wallet, idx) => { + if (!wallet || typeof wallet !== "object") { + throw new Error(`Invalid context: wallets[${idx}] must be an object`); + } + const w = wallet as Record; + return { + type: normalizeWalletType(w.type), + walletId: assertString(`wallets[${idx}].walletId`, w.walletId), + walletAddress: assertString(`wallets[${idx}].walletAddress`, w.walletAddress), + transactionId: optionalString(w.transactionId), + signerAddresses: assertStringArray(`wallets[${idx}].signerAddresses`, w.signerAddresses), + }; + }); + + const walletTypesRaw = Array.isArray(input.walletTypes) ? input.walletTypes : wallets.map((w) => w.type); + const walletTypes = walletTypesRaw.map((v) => normalizeWalletType(v)); + const signerAddresses = assertStringArray("signerAddresses", input.signerAddresses); + const signerStakeAddresses = assertStringArray("signerStakeAddresses", input.signerStakeAddresses); + if (signerStakeAddresses.length !== signerAddresses.length) { + throw new Error("Invalid context: signerStakeAddresses length must match signerAddresses"); + } + + const sdkStakeAddress = optionalString(input.sdkStakeAddress); + const stakePoolIdHex = optionalString(input.stakePoolIdHex); + + const normalizedBots = normalizeBots(input); + const defaultBot = + normalizedBots.bots.find((bot) => bot.id === normalizedBots.defaultBotId) ?? + normalizedBots.bots[0]; + if (!defaultBot) { + throw new Error("Invalid context: unable to resolve default bot"); + } + + return { + schemaVersion: 3, + createdAt: assertString("createdAt", input.createdAt ?? new Date().toISOString()), + apiBaseUrl: assertString("apiBaseUrl", input.apiBaseUrl), + networkId: Number(input.networkId) === 1 ? 1 : 0, + walletTypes, + wallets, + bots: normalizedBots.bots, + defaultBotId: normalizedBots.defaultBotId, + walletId: typeof input.walletId === "string" ? input.walletId : wallets[0]?.walletId, + walletAddress: + typeof input.walletAddress === "string" ? input.walletAddress : wallets[0]?.walletAddress, + signerAddresses, + signerStakeAddresses, + transactionId: optionalString(input.transactionId) ?? wallets[0]?.transactionId, + ...(sdkStakeAddress !== undefined ? { sdkStakeAddress } : {}), + ...(stakePoolIdHex !== undefined ? { stakePoolIdHex } : {}), + }; +} + +export async function loadBootstrapContext(contextPath: string): Promise { + const raw = await readFile(contextPath, "utf8"); + return validateBootstrapContext(JSON.parse(raw)); +} diff --git a/scripts/ci/framework/datumSign.ts b/scripts/ci/framework/datumSign.ts new file mode 100644 index 00000000..b84e6973 --- /dev/null +++ b/scripts/ci/framework/datumSign.ts @@ -0,0 +1,24 @@ +import type { CIBootstrapContext } from "./types"; +import { deriveSignerFromMnemonic } from "./walletAuth"; + +export async function signDatumWithMnemonic(args: { + ctx: CIBootstrapContext; + mnemonic: string; + datum: string; +}): Promise<{ + signerAddress: string; + key: string; + signature: string; +}> { + const signer = await deriveSignerFromMnemonic({ + ctx: args.ctx, + mnemonic: args.mnemonic, + }); + const signature = await signer.signData(args.datum); + return { + signerAddress: signer.signerAddress, + key: signature.key, + signature: signature.signature, + }; +} + diff --git a/scripts/ci/framework/env.ts b/scripts/ci/framework/env.ts new file mode 100644 index 00000000..042900d1 --- /dev/null +++ b/scripts/ci/framework/env.ts @@ -0,0 +1,40 @@ +import type { CIWalletType } from "./types"; + +export function requireEnv(name: string, fallback?: string): string { + const value = process.env[name] ?? fallback; + if (!value || !value.trim()) { + throw new Error(`Missing required environment variable: ${name}`); + } + return value.trim(); +} + +export function boolFromEnv(value: string | undefined, fallback: boolean): boolean { + if (value === undefined) return fallback; + return value.trim().toLowerCase() === "true"; +} + +/** Parse comma-separated non-empty tokens (e.g. CI_ROUTE_SCENARIOS). */ +export function parseCommaList(raw: string | undefined): string[] { + return (raw ?? "") + .split(",") + .map((s) => s.trim()) + .filter(Boolean); +} + +export function parseWalletTypesEnv(raw: string): CIWalletType[] { + const allowed = new Set(["legacy", "hierarchical", "sdk"]); + const requested = raw + .split(",") + .map((s) => s.trim().toLowerCase()) + .filter(Boolean); + if (!requested.length) { + throw new Error("CI_WALLET_TYPES must include at least one wallet type"); + } + const invalid = requested.filter((value) => !allowed.has(value)); + if (invalid.length) { + throw new Error( + `CI_WALLET_TYPES contains unsupported value(s): ${invalid.join(", ")}. Allowed: legacy,hierarchical,sdk`, + ); + } + return requested as CIWalletType[]; +} diff --git a/scripts/ci/framework/governance.ts b/scripts/ci/framework/governance.ts new file mode 100644 index 00000000..ba40569f --- /dev/null +++ b/scripts/ci/framework/governance.ts @@ -0,0 +1,69 @@ +export type ActiveProposal = { + proposalId: string; + title: string; +}; + +type GovernanceResponse = { + proposals?: Array<{ + proposalId?: unknown; + title?: unknown; + }>; +}; + +export function getDeterministicActiveProposals( + data: GovernanceResponse | unknown, + maxItems = 2, +): ActiveProposal[] { + const proposalsRaw = (data as GovernanceResponse | undefined)?.proposals; + if (!Array.isArray(proposalsRaw)) { + return []; + } + const proposals = proposalsRaw + .map((proposal) => { + const proposalId = + typeof proposal?.proposalId === "string" ? proposal.proposalId.trim() : ""; + if (!proposalId) return null; + const title = + typeof proposal?.title === "string" && proposal.title.trim() + ? proposal.title.trim() + : proposalId; + return { + proposalId, + title, + }; + }) + .filter((proposal): proposal is ActiveProposal => Boolean(proposal)) + .sort((a, b) => a.proposalId.localeCompare(b.proposalId)) + .slice(0, Math.max(1, maxItems)); + + return proposals; +} + +export function buildBallotUpsertPayload(args: { + walletId: string; + ballotName: string; + proposals: ActiveProposal[]; + secondPass?: boolean; +}): { + walletId: string; + ballotName: string; + proposals: Array<{ + proposalId: string; + proposalTitle: string; + choice: "Yes" | "No"; + rationaleComment: string; + }>; +} { + const isSecondPass = Boolean(args.secondPass); + return { + walletId: args.walletId, + ballotName: args.ballotName, + proposals: args.proposals.map((proposal, index) => ({ + proposalId: proposal.proposalId, + proposalTitle: proposal.title, + choice: (isSecondPass ? (index % 2 === 0 ? "No" : "Yes") : index % 2 === 0 ? "Yes" : "No"), + rationaleComment: `ci-route-chain ${isSecondPass ? "update" : "seed"} ${proposal.proposalId}`, + })), + }; +} + diff --git a/scripts/ci/framework/http.ts b/scripts/ci/framework/http.ts new file mode 100644 index 00000000..96524914 --- /dev/null +++ b/scripts/ci/framework/http.ts @@ -0,0 +1,136 @@ +type JsonRecord = Record; + +const DEFAULT_RETRY_STATUSES = new Set([408, 418, 429, 500, 502, 503, 504]); +const DEFAULT_RETRIES = 6; +const DEFAULT_RETRY_DELAY_MS = 1000; +const DEFAULT_MAX_RETRY_DELAY_MS = 30000; + +function sleep(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +function parseNonNegativeInt(value: string | undefined, fallback: number): number { + if (!value?.trim()) return fallback; + const parsed = Number(value); + if (!Number.isFinite(parsed) || parsed < 0) return fallback; + return Math.floor(parsed); +} + +function getRetryAfterMs(header: string | null): number | null { + if (!header) return null; + const seconds = Number(header); + if (Number.isFinite(seconds) && seconds >= 0) { + return seconds * 1000; + } + const dateMs = Date.parse(header); + if (Number.isFinite(dateMs)) { + return Math.max(0, dateMs - Date.now()); + } + return null; +} + +function getRetryDelayMs(args: { + attempt: number; + retryDelayMs: number; + maxRetryDelayMs: number; + retryAfterMs?: number | null; +}): number { + if (typeof args.retryAfterMs === "number") { + return Math.min(args.retryAfterMs, args.maxRetryDelayMs); + } + const exponentialDelay = args.retryDelayMs * 2 ** Math.max(0, args.attempt - 1); + return Math.min(exponentialDelay, args.maxRetryDelayMs); +} + +function findBigIntPath(value: unknown, path = "body"): string | null { + if (typeof value === "bigint") return path; + if (Array.isArray(value)) { + for (let index = 0; index < value.length; index += 1) { + const childPath = findBigIntPath(value[index], `${path}[${index}]`); + if (childPath) return childPath; + } + return null; + } + if (typeof value === "object" && value !== null) { + for (const [key, child] of Object.entries(value)) { + const childPath = findBigIntPath(child, `${path}.${key}`); + if (childPath) return childPath; + } + } + return null; +} + +export async function requestJson(args: { + url: string; + method?: "GET" | "POST"; + token?: string; + body?: JsonRecord; + timeoutMs?: number; + retries?: number; + retryDelayMs?: number; + maxRetryDelayMs?: number; + retryStatuses?: number[]; +}): Promise<{ status: number; data: T }> { + const { + url, + method = "GET", + token, + body, + timeoutMs = 30000, + retries = parseNonNegativeInt(process.env.CI_HTTP_RETRIES, DEFAULT_RETRIES), + retryDelayMs = parseNonNegativeInt(process.env.CI_HTTP_RETRY_DELAY_MS, DEFAULT_RETRY_DELAY_MS), + maxRetryDelayMs = parseNonNegativeInt(process.env.CI_HTTP_MAX_RETRY_DELAY_MS, DEFAULT_MAX_RETRY_DELAY_MS), + retryStatuses, + } = args; + const retryableStatuses = retryStatuses ? new Set(retryStatuses) : DEFAULT_RETRY_STATUSES; + const bigIntPath = body ? findBigIntPath(body) : null; + if (bigIntPath) { + throw new Error( + `requestJson body contains non-JSON BigInt at ${bigIntPath}; convert diagnostics to strings before sending the request`, + ); + } + + let attempt = 0; + let lastError: unknown = null; + + while (attempt <= retries) { + attempt += 1; + const controller = new AbortController(); + const timer = setTimeout(() => controller.abort(), timeoutMs); + + try { + const response = await fetch(url, { + method, + headers: { + ...(body ? { "content-type": "application/json" } : {}), + ...(token ? { authorization: `Bearer ${token}` } : {}), + }, + body: body ? JSON.stringify(body) : undefined, + signal: controller.signal, + }); + + const data = (await response.json()) as T; + clearTimeout(timer); + if (attempt <= retries && retryableStatuses.has(response.status)) { + await sleep( + getRetryDelayMs({ + attempt, + retryDelayMs, + maxRetryDelayMs, + retryAfterMs: getRetryAfterMs(response.headers.get("retry-after")), + }), + ); + continue; + } + return { status: response.status, data }; + } catch (error) { + clearTimeout(timer); + lastError = error; + if (attempt <= retries) { + await sleep(getRetryDelayMs({ attempt, retryDelayMs, maxRetryDelayMs })); + } + } + } + + throw new Error(`HTTP request failed after ${retries + 1} attempt(s): ${String(lastError)}`); +} diff --git a/scripts/ci/framework/markdown.ts b/scripts/ci/framework/markdown.ts new file mode 100644 index 00000000..449dca31 --- /dev/null +++ b/scripts/ci/framework/markdown.ts @@ -0,0 +1,96 @@ +import { mkdir, writeFile } from "fs/promises"; +import { dirname } from "path"; +import type { RunReport, ScenarioReport, StepReport } from "./types"; + +function lovelaceToAda(lovelace: string): string { + return (Number(BigInt(lovelace)) / 1_000_000).toFixed(2); +} + +function fmtMs(ms: number): string { + if (ms < 1000) return `${ms}ms`; + return `${(ms / 1000).toFixed(1)}s`; +} + +function escapeCell(s: string): string { + return s.replace(/\\/g, "\\\\").replace(/\|/g, "\\|").replace(/\n/g, " "); +} + +function renderSteps(steps: StepReport[]): string { + const rows: string[] = []; + rows.push("| Step | ms | Message |"); + rows.push("|------|----|---------|"); + + const errorBlocks: string[] = []; + + for (const step of steps) { + const icon = step.status === "passed" ? "✅" : "❌"; + const msg = escapeCell(step.status === "failed" ? (step.error ?? step.message) : step.message); + rows.push(`| ${icon} ${step.id} | ${step.durationMs} | ${msg} |`); + + if (step.status === "failed" && step.artifacts && Object.keys(step.artifacts).length > 0) { + errorBlocks.push(`**\`${step.id}\` artifacts:**`); + errorBlocks.push("```json"); + errorBlocks.push(JSON.stringify(step.artifacts, null, 2)); + errorBlocks.push("```"); + } + } + + if (errorBlocks.length > 0) { + rows.push("", ...errorBlocks); + } + + return rows.join("\n"); +} + +function renderScenario(scenario: ScenarioReport): string { + const icon = scenario.status === "passed" ? "✅" : "❌"; + return [`### ${icon} ${scenario.id} — ${fmtMs(scenario.durationMs)}`, "", renderSteps(scenario.steps)].join("\n"); +} + +export async function writeMarkdownReport(report: RunReport, outputPath: string): Promise { + const lines: string[] = []; + const icon = report.status === "passed" ? "✅" : "❌"; + + lines.push(`# CI Route-Chain: ${report.status.toUpperCase()} ${icon}`, ""); + + const network = report.contextSummary.networkId === 0 ? "preprod" : "mainnet"; + lines.push( + `**Run:** ${report.createdAt} · **Duration:** ${fmtMs(report.durationMs)} · **Network:** ${network} · **Wallets:** ${report.contextSummary.walletTypes.join(", ")}`, + "", + ); + + // Wallet balances + lines.push("## Wallet Balances", ""); + if (report.walletBalanceSummary.error) { + lines.push(`> Balance collection failed: ${report.walletBalanceSummary.error}`); + } else { + lines.push("| Type | UTxOs | ADA |", "|------|-------|-----|"); + for (const [type, entry] of Object.entries(report.walletBalanceSummary.byWalletType)) { + if (!entry) continue; + const ada = lovelaceToAda(entry.lovelace); + const nativeCount = Object.keys(entry.assets).filter((k) => k !== "lovelace").length; + const assetNote = nativeCount > 0 ? ` +${nativeCount} assets` : ""; + lines.push(`| ${type} | ${entry.utxoCount} | ${ada}${assetNote} |`); + } + } + lines.push(""); + + // Scenario summary + lines.push("## Scenario Summary", ""); + lines.push("| Scenario | Status | Steps | Duration |", "|----------|--------|-------|----------|"); + for (const scenario of report.scenarios) { + const sIcon = scenario.status === "passed" ? "✅" : "❌"; + const passed = scenario.steps.filter((s) => s.status === "passed").length; + lines.push(`| ${scenario.id} | ${sIcon} | ${passed}/${scenario.steps.length} | ${fmtMs(scenario.durationMs)} |`); + } + lines.push(""); + + // Step details + lines.push("## Steps", ""); + for (const scenario of report.scenarios) { + lines.push(renderScenario(scenario), ""); + } + + await mkdir(dirname(outputPath), { recursive: true }); + await writeFile(outputPath, lines.join("\n"), "utf8"); +} diff --git a/scripts/ci/framework/mnemonic.ts b/scripts/ci/framework/mnemonic.ts new file mode 100644 index 00000000..f63581d2 --- /dev/null +++ b/scripts/ci/framework/mnemonic.ts @@ -0,0 +1,6 @@ +export function parseMnemonic(value: string): string[] { + return value + .trim() + .split(/\s+/) + .filter(Boolean); +} diff --git a/scripts/ci/framework/preprod.ts b/scripts/ci/framework/preprod.ts new file mode 100644 index 00000000..21740bd8 --- /dev/null +++ b/scripts/ci/framework/preprod.ts @@ -0,0 +1,33 @@ +import type { CIBootstrapContext } from "./types"; + +export function isTestnetAddress(address: string): boolean { + return address.startsWith("addr_test") || address.startsWith("stake_test"); +} + +export function assertPreprodContext(context: CIBootstrapContext): void { + const configuredNetworkId = Number(process.env.CI_NETWORK_ID ?? "0") === 1 ? 1 : 0; + if (configuredNetworkId !== 0) { + throw new Error( + `CI route-chain is configured for preprod only. CI_NETWORK_ID must be 0, got ${configuredNetworkId}`, + ); + } + if (context.networkId !== 0) { + throw new Error( + `Bootstrap context is not preprod. Expected context.networkId=0, got ${context.networkId}`, + ); + } + + const addresses = [ + ...context.signerAddresses, + ...context.bots.map((bot) => bot.paymentAddress), + ...context.wallets.map((wallet) => wallet.walletAddress), + ...context.wallets.flatMap((wallet) => wallet.signerAddresses), + ].map((address) => address.trim()); + + const nonTestnet = Array.from(new Set(addresses.filter((address) => !isTestnetAddress(address)))); + if (nonTestnet.length) { + throw new Error( + `Preprod invariant failed: found non-testnet address(es): ${nonTestnet.slice(0, 5).join(", ")}`, + ); + } +} diff --git a/scripts/ci/framework/redact.ts b/scripts/ci/framework/redact.ts new file mode 100644 index 00000000..9f69156a --- /dev/null +++ b/scripts/ci/framework/redact.ts @@ -0,0 +1,51 @@ +function shouldRedactKey(key: string): boolean { + const k = key.toLowerCase(); + const sensitiveKeyParts = [ + "token", + "secret", + "authorization", + "api_key", + "apikey", + "mnemonic", + "privatekey", + "private_key", + "signingkey", + "signing_key", + "seed", + "xprv", + "ed25519e_sk", + ]; + + if (sensitiveKeyParts.some((part) => k.includes(part))) { + return true; + } + + return ( + (k.includes("private") && k.includes("key")) || + (k.includes("signing") && k.includes("key")) + ); +} + +export function redactForLogs(value: unknown): unknown { + if (Array.isArray(value)) { + return value.map((item) => redactForLogs(item)); + } + if (!value || typeof value !== "object") { + return value; + } + + const obj = value as Record; + const out: Record = {}; + for (const [key, fieldValue] of Object.entries(obj)) { + out[key] = shouldRedactKey(key) ? "[REDACTED]" : redactForLogs(fieldValue); + } + return out; +} + +export function stringifyRedacted(value: unknown): string { + try { + return JSON.stringify(redactForLogs(value)); + } catch { + return String(value); + } +} diff --git a/scripts/ci/framework/runner.ts b/scripts/ci/framework/runner.ts new file mode 100644 index 00000000..7e665c0a --- /dev/null +++ b/scripts/ci/framework/runner.ts @@ -0,0 +1,98 @@ +import { mkdir, writeFile } from "fs/promises"; +import { dirname } from "path"; +import type { CIBootstrapContext, RunReport, Scenario, ScenarioReport, StepReport } from "./types"; +import { collectWalletBalanceSummary } from "./walletBalances"; + +function now(): number { + return Date.now(); +} + +export async function runScenarios(args: { + scenarios: Scenario[]; + ctx: CIBootstrapContext; + continueOnNonCriticalFailure?: boolean; +}): Promise { + const start = now(); + const { scenarios, ctx, continueOnNonCriticalFailure = true } = args; + const scenarioReports: ScenarioReport[] = []; + let overallFailed = false; + + for (const scenario of scenarios) { + const scenarioStart = now(); + const steps: StepReport[] = []; + let scenarioFailed = false; + + for (const step of scenario.steps) { + const stepStart = now(); + const severity = step.severity ?? "critical"; + try { + const result = await step.execute(ctx); + steps.push({ + id: step.id, + description: step.description, + status: "passed", + severity, + message: result.message, + artifacts: result.artifacts, + durationMs: now() - stepStart, + }); + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error); + steps.push({ + id: step.id, + description: step.description, + status: "failed", + severity, + message: "Step failed", + durationMs: now() - stepStart, + error: errorMessage, + }); + scenarioFailed = true; + overallFailed = true; + if (severity === "critical") { + break; + } + if (!continueOnNonCriticalFailure) { + break; + } + } + } + + scenarioReports.push({ + id: scenario.id, + description: scenario.description, + status: scenarioFailed ? "failed" : "passed", + durationMs: now() - scenarioStart, + steps, + }); + + if (scenarioFailed) { + const hitCritical = steps.some((s) => s.status === "failed" && s.severity === "critical"); + if (hitCritical) { + break; + } + } + } + + const walletBalanceSummary = await collectWalletBalanceSummary(ctx); + + return { + createdAt: new Date().toISOString(), + scenarioIds: scenarios.map((s) => s.id), + status: overallFailed ? "failed" : "passed", + durationMs: now() - start, + contextSummary: { + apiBaseUrl: ctx.apiBaseUrl, + networkId: ctx.networkId, + walletCount: ctx.wallets.length, + walletTypes: ctx.walletTypes, + }, + walletBalanceSummary, + scenarios: scenarioReports, + }; +} + +export async function writeRunReport(report: RunReport, outputPath: string): Promise { + await mkdir(dirname(outputPath), { recursive: true }); + await writeFile(outputPath, JSON.stringify(report, null, 2), "utf8"); +} diff --git a/scripts/ci/framework/types.ts b/scripts/ci/framework/types.ts new file mode 100644 index 00000000..909ee4f7 --- /dev/null +++ b/scripts/ci/framework/types.ts @@ -0,0 +1,110 @@ +export type CIWalletType = "legacy" | "hierarchical" | "sdk"; + +export type CIWalletContext = { + type: CIWalletType; + walletId: string; + walletAddress: string; + transactionId?: string; + signerAddresses: string[]; +}; + +export type CIBotContext = { + id: string; + paymentAddress: string; + botKeyId: string; + botId?: string; +}; + +export type CIBootstrapContext = { + schemaVersion: 3; + createdAt: string; + apiBaseUrl: string; + networkId: 0 | 1; + walletTypes: CIWalletType[]; + wallets: CIWalletContext[]; + bots: CIBotContext[]; + defaultBotId?: string; + walletId?: string; + walletAddress?: string; + signerAddresses: string[]; + transactionId?: string; + /** Per-signer stake (reward) addresses aligned with signerAddresses. */ + signerStakeAddresses: string[]; + /** Multisig reward address for the SDK wallet (from MultisigWallet.getStakeAddress); present when an SDK wallet was bootstrapped. */ + sdkStakeAddress?: string; + /** Optional preprod stake pool id (hex) for future delegate scenarios. */ + stakePoolIdHex?: string; +}; + +export type StepSeverity = "critical" | "non-critical"; + +export type StepRunResult = { + message: string; + artifacts?: Record; +}; + +export type RouteStep = { + id: string; + description: string; + severity?: StepSeverity; + execute: (ctx: CIBootstrapContext) => Promise; +}; + +export type Scenario = { + id: string; + description: string; + steps: RouteStep[]; +}; + +export type StepReport = { + id: string; + description: string; + status: "passed" | "failed" | "skipped"; + severity: StepSeverity; + message: string; + durationMs: number; + artifacts?: Record; + error?: string; +}; + +export type ScenarioReport = { + id: string; + description: string; + status: "passed" | "failed"; + durationMs: number; + steps: StepReport[]; +}; + +export type CIWalletBalanceEntry = { + walletType: CIWalletType; + walletId: string; + walletAddress: string; + utxoCount: number; + lovelace: string; + assets: Record; + capturedAt: string; + networkId: 0 | 1; +}; + +export type CIWalletBalanceSummary = { + capturedAt: string; + networkId: 0 | 1; + byWalletType: Partial>; + byWalletId: Record; + error?: string; +}; + +export type RunReport = { + createdAt: string; + scenarioIds: string[]; + status: "passed" | "failed"; + durationMs: number; + contextSummary: { + apiBaseUrl: string; + networkId: 0 | 1; + walletCount: number; + walletTypes: CIWalletType[]; + }; + walletBalanceSummary: CIWalletBalanceSummary; + scenarios: ScenarioReport[]; +}; diff --git a/scripts/ci/framework/walletAuth.ts b/scripts/ci/framework/walletAuth.ts new file mode 100644 index 00000000..bbdee880 --- /dev/null +++ b/scripts/ci/framework/walletAuth.ts @@ -0,0 +1,73 @@ +import type { CIBootstrapContext } from "./types"; +import { requestJson } from "./http"; +import { stringifyRedacted } from "./redact"; +import { parseMnemonic } from "./mnemonic"; + +export async function deriveSignerFromMnemonic(args: { + ctx: CIBootstrapContext; + mnemonic: string; +}): Promise<{ + signerAddress: string; + signData: (payload: string) => Promise<{ key: string; signature: string }>; +}> { + const { MeshWallet } = await import("@meshsdk/core"); + const wallet = new MeshWallet({ + networkId: args.ctx.networkId, + key: { type: "mnemonic", words: parseMnemonic(args.mnemonic) }, + }); + await wallet.init(); + const signerAddress = await wallet.getChangeAddress(); + return { + signerAddress, + signData: async (payload: string) => { + const signature = await wallet.signData(payload, signerAddress); + return { + key: signature.key, + signature: signature.signature, + }; + }, + }; +} + +export async function authenticateSignerWithMnemonic(args: { + ctx: CIBootstrapContext; + mnemonic: string; +}): Promise<{ + token: string; + signerAddress: string; + nonce: string; +}> { + const signer = await deriveSignerFromMnemonic(args); + const nonceResponse = await requestJson<{ nonce?: string; error?: string }>({ + url: `${args.ctx.apiBaseUrl}/api/v1/getNonce?address=${encodeURIComponent(signer.signerAddress)}`, + method: "GET", + }); + if (nonceResponse.status !== 200 || typeof nonceResponse.data?.nonce !== "string") { + throw new Error( + `getNonce failed (${nonceResponse.status}): ${stringifyRedacted(nonceResponse.data)}`, + ); + } + + const signed = await signer.signData(nonceResponse.data.nonce); + const authResponse = await requestJson<{ token?: string; error?: string }>({ + url: `${args.ctx.apiBaseUrl}/api/v1/authSigner`, + method: "POST", + body: { + address: signer.signerAddress, + signature: signed.signature, + key: signed.key, + }, + }); + if (authResponse.status !== 200 || typeof authResponse.data?.token !== "string") { + throw new Error( + `authSigner failed (${authResponse.status}): ${stringifyRedacted(authResponse.data)}`, + ); + } + + return { + token: authResponse.data.token, + signerAddress: signer.signerAddress, + nonce: nonceResponse.data.nonce, + }; +} + diff --git a/scripts/ci/framework/walletBalances.ts b/scripts/ci/framework/walletBalances.ts new file mode 100644 index 00000000..92435603 --- /dev/null +++ b/scripts/ci/framework/walletBalances.ts @@ -0,0 +1,102 @@ +import type { UTxO } from "@meshsdk/core"; +import type { + CIBootstrapContext, + CIWalletBalanceEntry, + CIWalletBalanceSummary, + CIWalletType, +} from "./types"; + +type BigIntMap = Map; + +function getBlockfrostApiKey(networkId: 0 | 1): string { + if (networkId === 0) { + const preprod = process.env.CI_BLOCKFROST_PREPROD_API_KEY?.trim(); + if (!preprod) { + throw new Error("CI_BLOCKFROST_PREPROD_API_KEY is required for wallet balance summary"); + } + return preprod; + } + + const mainnet = process.env.CI_BLOCKFROST_MAINNET_API_KEY?.trim(); + if (!mainnet) { + throw new Error("CI_BLOCKFROST_MAINNET_API_KEY is required for wallet balance summary"); + } + return mainnet; +} + +function addAssetQuantity(map: BigIntMap, unit: string, quantityRaw: string): void { + const quantity = BigInt(quantityRaw); + map.set(unit, (map.get(unit) ?? 0n) + quantity); +} + +function sumUtxoAssets(utxos: UTxO[]): BigIntMap { + const totals: BigIntMap = new Map(); + for (const utxo of utxos) { + for (const asset of utxo.output.amount ?? []) { + if (!asset?.unit || asset.quantity === undefined || asset.quantity === null) { + continue; + } + addAssetQuantity(totals, asset.unit, String(asset.quantity)); + } + } + return totals; +} + +function toAssetRecord(map: BigIntMap): Record { + return Object.fromEntries( + Array.from(map.entries()).map(([unit, quantity]) => [unit, quantity.toString()]), + ); +} + +function emptySummary(networkId: 0 | 1, error?: string): CIWalletBalanceSummary { + return { + capturedAt: new Date().toISOString(), + networkId, + byWalletType: {}, + byWalletId: {}, + ...(error ? { error } : {}), + }; +} + +export async function collectWalletBalanceSummary( + ctx: CIBootstrapContext, +): Promise { + try { + const apiKey = getBlockfrostApiKey(ctx.networkId); + const { BlockfrostProvider } = await import("@meshsdk/core"); + const provider = new BlockfrostProvider(apiKey); + const capturedAt = new Date().toISOString(); + + const byWalletType: Partial> = {}; + const byWalletId: CIWalletBalanceSummary["byWalletId"] = {}; + + for (const wallet of ctx.wallets) { + const utxos = await provider.fetchAddressUTxOs(wallet.walletAddress); + const totals = sumUtxoAssets(utxos); + const assets = toAssetRecord(totals); + const entry: CIWalletBalanceEntry = { + walletType: wallet.type, + walletId: wallet.walletId, + walletAddress: wallet.walletAddress, + utxoCount: utxos.length, + lovelace: (totals.get("lovelace") ?? 0n).toString(), + assets, + capturedAt, + networkId: ctx.networkId, + }; + + byWalletType[wallet.type] = entry; + byWalletId[wallet.walletId] = entry; + } + + return { + capturedAt, + networkId: ctx.networkId, + byWalletType, + byWalletId, + }; + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + return emptySummary(ctx.networkId, message); + } +} diff --git a/scripts/ci/framework/walletType.ts b/scripts/ci/framework/walletType.ts new file mode 100644 index 00000000..7fcc3f5d --- /dev/null +++ b/scripts/ci/framework/walletType.ts @@ -0,0 +1,8 @@ +import type { CIWalletType } from "./types"; + +/** Normalize wallet type from env/CLI strings (legacy default). */ +export function normalizeWalletTypeFromLabel(value: string): CIWalletType { + const v = value.trim().toLowerCase(); + if (v === "hierarchical" || v === "sdk") return v; + return "legacy"; +} diff --git a/scripts/ci/scenarios/flows/certificateSigningFlow.ts b/scripts/ci/scenarios/flows/certificateSigningFlow.ts new file mode 100644 index 00000000..b6722c66 --- /dev/null +++ b/scripts/ci/scenarios/flows/certificateSigningFlow.ts @@ -0,0 +1,232 @@ +import type { CIBootstrapContext, CIWalletType } from "../../framework/types"; +import { requestJson } from "../../framework/http"; +import { getBotForSignerIndex } from "../../framework/botContext"; +import { authenticateBot } from "../../framework/botAuth"; +import { stringifyRedacted } from "../../framework/redact"; +import { parseMnemonic } from "../../framework/mnemonic"; +import { + SIGN_TRANSACTION_REQUEST_OPTIONS, + selectPendingTransactionForSigning, + type PendingTransactionForSigning, +} from "./signingFlow"; + +/** + * Signs a pending certificate transaction using BOTH the signer's payment key + * (required for the spending native script) and, when available, their stake key + * (required for the staking certificate native script). + * + * Both witnesses are submitted in a single signTransaction call so that the + * address-already-signed guard is not hit on a second call. + * + * This is needed for botStakeCertificate transactions where the certificate + * script is built from role-2 (stake) key hashes that differ from the payment + * key hashes used by the spending script. + */ +export async function runStakeCertSigningFlow(args: { + ctx: CIBootstrapContext; + mnemonic: string; + signerIndex?: number; + signBroadcast?: boolean; + preferredTransactionId?: string; + requireBroadcastSuccess?: boolean; +}): Promise<{ + walletType: CIWalletType; + walletId: string; + transactionId: string; + signerAddress: string; + status: number; + submitted?: boolean; + submissionError?: string; + stakeWitnessIncluded: boolean; +}> { + const { ctx, mnemonic } = args; + const signerIndex = args.signerIndex ?? 1; + const shouldBroadcast = args.signBroadcast ?? true; + const requireBroadcastSuccess = args.requireBroadcastSuccess ?? true; + + // Staking cert scenarios always target the SDK wallet. + const targetWalletType: CIWalletType = "sdk"; + const selectedWallet = ctx.wallets.find((w) => w.type === targetWalletType); + if (!selectedWallet) { + throw new Error(`Unable to find wallet context for type ${targetWalletType}`); + } + + const { bot: signerBot, signerAddress: signAddress } = getBotForSignerIndex({ + ctx, + wallet: selectedWallet, + signerIndex, + }); + + const [{ MeshWallet, resolvePaymentKeyHash, resolveStakeKeyHash }, { csl, calculateTxHash }] = await Promise.all([ + import("@meshsdk/core"), + import("@meshsdk/core-csl"), + ]); + + const signerWallet = new MeshWallet({ + networkId: ctx.networkId, + key: { type: "mnemonic", words: parseMnemonic(mnemonic) }, + }); + await signerWallet.init(); + const signerAddress = await signerWallet.getChangeAddress(); + if (signerAddress !== signAddress) { + throw new Error( + `Mnemonic does not derive signer address index ${signerIndex} from context`, + ); + } + + const signerToken = await authenticateBot({ ctx, bot: signerBot }); + + const pendingResponse = await requestJson({ + url: `${ctx.apiBaseUrl}/api/v1/pendingTransactions?walletId=${encodeURIComponent(selectedWallet.walletId)}&address=${encodeURIComponent(signerAddress)}`, + method: "GET", + token: signerToken, + }); + if (pendingResponse.status !== 200 || !Array.isArray(pendingResponse.data)) { + throw new Error( + `pendingTransactions lookup failed (${pendingResponse.status}): ${stringifyRedacted(pendingResponse.data)}`, + ); + } + if (!pendingResponse.data.length) { + throw new Error("No pending transactions to sign for sdk wallet"); + } + + const tx = selectPendingTransactionForSigning(pendingResponse.data, args.preferredTransactionId); + + const signedPayloadHex = await signerWallet.signTx(tx.txCbor, true); + + // Parse the full vkey witness set from the signed payload. + let vkeys: ReturnType | null = null; + try { + const signedTx = csl.Transaction.from_hex(signedPayloadHex); + vkeys = signedTx.witness_set().vkeys() ?? csl.Vkeywitnesses.new(); + } catch { + const witnessSet = csl.TransactionWitnessSet.from_hex(signedPayloadHex); + vkeys = witnessSet.vkeys() ?? csl.Vkeywitnesses.new(); + } + if (!vkeys || vkeys.len() === 0) { + throw new Error("No vkey witnesses found in signed payload"); + } + + // ── Extract payment key witness ────────────────────────────────────────── + const paymentKeyHash = resolvePaymentKeyHash(signerAddress).toLowerCase(); + let paymentVkey: typeof vkeys extends { get: (i: number) => infer V } ? V : never; + let foundPayment = false; + for (let i = 0; i < vkeys.len(); i++) { + const candidate = vkeys.get(i); + const kh = Buffer.from(candidate.vkey().public_key().hash().to_bytes()).toString("hex").toLowerCase(); + if (kh === paymentKeyHash) { + paymentVkey = candidate; + foundPayment = true; + break; + } + } + if (!foundPayment) { + // Fall back to first witness if payment key not found by hash match. + paymentVkey = vkeys.get(0); + } + + const keyHex = paymentVkey!.vkey().public_key().to_hex().toLowerCase(); + const signatureHex = paymentVkey!.signature().to_hex().toLowerCase(); + + // ── Extract stake key witness ──────────────────────────────────────────── + // MeshWallet.signTx produces only payment key witnesses for native script + // spending inputs. Staking certificate native scripts require role-2 (stake) + // key witnesses, derived via BIP32 path m/1852'/1815'/0'/2/0 and signed + // against the transaction hash — the same path used by bootstrap.ts. + const signerStakeAddr = ctx.signerStakeAddresses[signerIndex]; + let stakeKeyHex: string | undefined; + let stakeSignatureHex: string | undefined; + + if (signerStakeAddr) { + try { + const expectedStakeHash = resolveStakeKeyHash(signerStakeAddr).toLowerCase(); + + // Primary: check if the regular signing already included the stake witness + // (MeshWallet may sign with all required keys in some versions). + for (let i = 0; i < vkeys.len(); i++) { + const candidate = vkeys.get(i); + const kh = Buffer.from(candidate.vkey().public_key().hash().to_bytes()).toString("hex").toLowerCase(); + if (kh === expectedStakeHash) { + stakeKeyHex = candidate.vkey().public_key().to_hex().toLowerCase(); + stakeSignatureHex = candidate.signature().to_hex().toLowerCase(); + break; + } + } + + // Fallback: derive stake key directly from mnemonic via BIP32 and sign + // the tx hash manually. This is reliable regardless of wallet version. + if (!stakeKeyHex) { + const { mnemonicToEntropy } = await import("bip39"); + const entropy = mnemonicToEntropy(parseMnemonic(mnemonic).join(" ")); + const rootKey = csl.Bip32PrivateKey.from_bip39_entropy( + Buffer.from(entropy, "hex"), + Buffer.from(""), + ); + const stakeRawKey = rootKey + .derive(2147483648 + 1852) + .derive(2147483648 + 1815) + .derive(2147483648 + 0) + .derive(2) + .derive(0) + .to_raw_key(); + const stakePubKey = stakeRawKey.to_public(); + const derivedHash = Buffer.from(stakePubKey.hash().to_bytes()).toString("hex").toLowerCase(); + if (derivedHash === expectedStakeHash) { + const txHashBytes = Buffer.from(calculateTxHash(tx.txCbor), "hex"); + stakeKeyHex = stakePubKey.to_hex().toLowerCase(); + stakeSignatureHex = Buffer.from(stakeRawKey.sign(txHashBytes).to_bytes()).toString("hex").toLowerCase(); + } + } + } catch { + // Cannot produce stake witness — broadcast may fail without it + } + } + + const stakeWitnessIncluded = !!(stakeKeyHex && stakeSignatureHex); + + // ── Submit to signTransaction ──────────────────────────────────────────── + const signBody: Record = { + walletId: selectedWallet.walletId, + transactionId: tx.id, + address: signerAddress, + signature: signatureHex, + key: keyHex, + broadcast: shouldBroadcast, + }; + if (stakeWitnessIncluded) { + signBody.stakeKey = stakeKeyHex; + signBody.stakeSignature = stakeSignatureHex; + } + + const signResponse = await requestJson< + { submitted?: boolean; txHash?: string; error?: string; submissionError?: string } + >({ + url: `${ctx.apiBaseUrl}/api/v1/signTransaction`, + method: "POST", + token: signerToken, + ...SIGN_TRANSACTION_REQUEST_OPTIONS, + body: signBody, + }); + + if (signResponse.status !== 200 && signResponse.status !== 502) { + throw new Error( + `signTransaction failed (${signResponse.status}): ${stringifyRedacted(signResponse.data)}`, + ); + } + if (requireBroadcastSuccess && signResponse.status === 502) { + throw new Error( + `signTransaction broadcast failed (${signResponse.status}): ${stringifyRedacted(signResponse.data)}`, + ); + } + + return { + walletType: selectedWallet.type, + walletId: selectedWallet.walletId, + transactionId: tx.id, + signerAddress, + status: signResponse.status, + submitted: signResponse.data?.submitted, + submissionError: signResponse.data?.submissionError, + stakeWitnessIncluded, + }; +} diff --git a/scripts/ci/scenarios/flows/signingFlow.ts b/scripts/ci/scenarios/flows/signingFlow.ts new file mode 100644 index 00000000..f52e5630 --- /dev/null +++ b/scripts/ci/scenarios/flows/signingFlow.ts @@ -0,0 +1,180 @@ +import type { CIBootstrapContext, CIWalletType } from "../../framework/types"; +import { requestJson } from "../../framework/http"; +import { getBotForSignerIndex } from "../../framework/botContext"; +import { authenticateBot } from "../../framework/botAuth"; +import { stringifyRedacted } from "../../framework/redact"; +import { parseMnemonic } from "../../framework/mnemonic"; +import { normalizeWalletTypeFromLabel } from "../../framework/walletType"; + +export type PendingTransactionForSigning = { id: string; txCbor?: string }; + +// signTransaction mutates the pending tx before broadcast. Retrying a 502 can +// turn the useful submission error into a duplicate-signature 409. +export const SIGN_TRANSACTION_REQUEST_OPTIONS = { + retries: 0, +} as const; + +export function selectPendingTransactionForSigning( + pendingTransactions: PendingTransactionForSigning[], + preferredTransactionId?: string, +): PendingTransactionForSigning & { txCbor: string } { + if (preferredTransactionId) { + const tx = pendingTransactions.find((p) => p.id === preferredTransactionId); + if (!tx) { + throw new Error(`Preferred pending transaction ${preferredTransactionId} was not found`); + } + if (!tx.txCbor) { + throw new Error(`Preferred pending transaction ${preferredTransactionId} does not include txCbor`); + } + return { ...tx, txCbor: tx.txCbor }; + } + + const tx = pendingTransactions.find((p) => typeof p.txCbor === "string" && p.txCbor.length > 0); + if (!tx) { + throw new Error("Pending transactions exist but none include txCbor"); + } + const txCbor = tx.txCbor; + if (!txCbor) { + throw new Error("Pending transactions exist but none include txCbor"); + } + return { ...tx, txCbor }; +} + +export async function runSigningFlow(args: { + ctx: CIBootstrapContext; + mnemonic: string; + signWalletType?: string; + signerIndex?: number; + signerLabel?: string; + signBroadcast?: boolean; + preferredTransactionId?: string; + requireBroadcastSuccess?: boolean; +}): Promise<{ + walletType: CIWalletType; + walletId: string; + transactionId: string; + signerAddress: string; + status: number; + submitted?: boolean; + txHash?: string; +}> { + const { ctx, mnemonic } = args; + const targetWalletType = normalizeWalletTypeFromLabel(args.signWalletType ?? "legacy"); + const signerIndex = args.signerIndex ?? 1; + const signerLabel = args.signerLabel ?? `signer${signerIndex}`; + const shouldBroadcast = args.signBroadcast ?? true; + const requireBroadcastSuccess = args.requireBroadcastSuccess ?? true; + + const selectedWallet = ctx.wallets.find((w) => w.type === targetWalletType); + if (!selectedWallet) { + throw new Error(`Unable to find wallet context for type ${targetWalletType}`); + } + + const { bot: signerBot, signerAddress: signAddress } = getBotForSignerIndex({ + ctx, + wallet: selectedWallet, + signerIndex, + }); + + const [{ MeshWallet, resolvePaymentKeyHash }, { csl }] = await Promise.all([ + import("@meshsdk/core"), + import("@meshsdk/core-csl"), + ]); + const signerWallet = new MeshWallet({ + networkId: ctx.networkId, + key: { type: "mnemonic", words: parseMnemonic(mnemonic) }, + }); + await signerWallet.init(); + const signerAddress = await signerWallet.getChangeAddress(); + if (signerAddress !== signAddress) { + throw new Error( + `${signerLabel} mnemonic does not derive signer address index ${signerIndex} from context`, + ); + } + + const signerToken = await authenticateBot({ ctx, bot: signerBot }); + + const pendingResponse = await requestJson({ + url: `${ctx.apiBaseUrl}/api/v1/pendingTransactions?walletId=${encodeURIComponent(selectedWallet.walletId)}&address=${encodeURIComponent(signerAddress)}`, + method: "GET", + token: signerToken, + }); + if (pendingResponse.status !== 200 || !Array.isArray(pendingResponse.data)) { + throw new Error( + `pendingTransactions lookup failed (${pendingResponse.status}): ${stringifyRedacted(pendingResponse.data)}`, + ); + } + if (!pendingResponse.data.length) { + throw new Error(`No pending transactions to sign for wallet type ${targetWalletType}`); + } + + const tx = selectPendingTransactionForSigning(pendingResponse.data, args.preferredTransactionId); + + const signedPayloadHex = await signerWallet.signTx(tx.txCbor, true); + + let vkeys: any = null; + try { + const signedTx = csl.Transaction.from_hex(signedPayloadHex); + vkeys = signedTx.witness_set().vkeys(); + } catch { + const witnessSet = csl.TransactionWitnessSet.from_hex(signedPayloadHex); + vkeys = witnessSet.vkeys(); + } + + if (!vkeys || vkeys.len() === 0) { + throw new Error("No vkey witness found in signed payload"); + } + + const addressKeyHash = resolvePaymentKeyHash(signerAddress).toLowerCase(); + let selected = vkeys.get(0); + for (let i = 0; i < vkeys.len(); i++) { + const candidate = vkeys.get(i); + const keyHash = Buffer.from(candidate.vkey().public_key().hash().to_bytes()) + .toString("hex") + .toLowerCase(); + if (keyHash === addressKeyHash) { + selected = candidate; + break; + } + } + + const keyHex = selected.vkey().public_key().to_hex().toLowerCase(); + const signatureHex = selected.signature().to_hex().toLowerCase(); + const signResponse = await requestJson< + { submitted?: boolean; txHash?: string; error?: string; submissionError?: string } + >({ + url: `${ctx.apiBaseUrl}/api/v1/signTransaction`, + method: "POST", + token: signerToken, + ...SIGN_TRANSACTION_REQUEST_OPTIONS, + body: { + walletId: selectedWallet.walletId, + transactionId: tx.id, + address: signerAddress, + signature: signatureHex, + key: keyHex, + broadcast: shouldBroadcast, + }, + }); + + if (signResponse.status !== 200 && signResponse.status !== 502) { + throw new Error( + `signTransaction failed (${signResponse.status}): ${stringifyRedacted(signResponse.data)}`, + ); + } + if (requireBroadcastSuccess && signResponse.status === 502) { + throw new Error( + `signTransaction broadcast failed (${signResponse.status}): ${stringifyRedacted(signResponse.data)}`, + ); + } + + return { + walletType: selectedWallet.type, + walletId: selectedWallet.walletId, + transactionId: tx.id, + signerAddress, + status: signResponse.status, + submitted: signResponse.data?.submitted, + txHash: signResponse.data?.txHash, + }; +} diff --git a/scripts/ci/scenarios/flows/transferFlow.ts b/scripts/ci/scenarios/flows/transferFlow.ts new file mode 100644 index 00000000..5790dff8 --- /dev/null +++ b/scripts/ci/scenarios/flows/transferFlow.ts @@ -0,0 +1,239 @@ +import type { CIBootstrapContext, CIWalletType } from "../../framework/types"; +import { requestJson } from "../../framework/http"; +import { getDefaultBot } from "../../framework/botContext"; +import { authenticateBot } from "../../framework/botAuth"; +import { stringifyRedacted } from "../../framework/redact"; +import { parseMnemonic } from "../../framework/mnemonic"; +import { normalizeWalletTypeFromLabel } from "../../framework/walletType"; +import { isTestnetAddress } from "../../framework/preprod"; +import { PrismaClient } from "@prisma/client"; + +const prisma = new PrismaClient(); + +type TransferSeedResult = { + fromWalletType: CIWalletType; + toWalletType: CIWalletType; + fromWalletId: string; + toWalletId: string; + transferFromAddress: string; + transferToAddress: string; + transferAmountLovelace: string; + transactionId: string; +}; + +type UTxOAmount = { + unit: string; + quantity: string; +}; + +type ScriptUtxo = { + input: { + txHash: string; + outputIndex: number; + }; + output: { + address: string; + amount: UTxOAmount[]; + }; +}; + +function parseLovelace(amounts: UTxOAmount[]): bigint { + const lovelace = amounts.find((asset) => asset.unit === "lovelace")?.quantity ?? "0"; + try { + return BigInt(lovelace); + } catch { + return 0n; + } +} + +async function loadScriptCbor(walletId: string): Promise { + const wallet = await prisma.wallet.findUnique({ + where: { id: walletId }, + select: { scriptCbor: true }, + }); + const scriptCbor = wallet?.scriptCbor?.trim(); + if (!scriptCbor) { + throw new Error(`Wallet ${walletId} is missing scriptCbor; cannot build multisig input transaction`); + } + return scriptCbor; +} + +export async function seedRealTransferTransaction(args: { + ctx: CIBootstrapContext; + fromMnemonic: string; + fromWalletType: string; + toWalletType: string; + transferLovelace?: string; +}): Promise { + const { ctx } = args; + const defaultBot = getDefaultBot(ctx); + const defaultBotToken = await authenticateBot({ ctx, bot: defaultBot }); + const fromWalletType = normalizeWalletTypeFromLabel(args.fromWalletType); + const toWalletType = normalizeWalletTypeFromLabel(args.toWalletType); + const fromWallet = ctx.wallets.find((w) => w.type === fromWalletType); + if (!fromWallet) { + throw new Error(`Unable to find source wallet context for type ${fromWalletType}`); + } + const toWallet = ctx.wallets.find((w) => w.type === toWalletType); + if (!toWallet) { + throw new Error(`Unable to find destination wallet context for type ${toWalletType}`); + } + + if (fromWallet.walletId === toWallet.walletId) { + throw new Error(`Source and destination wallets must differ for transfer leg ${fromWalletType}`); + } + + const transferToAddress = toWallet.walletAddress; + if (!transferToAddress) { + throw new Error(`Destination wallet ${toWallet.walletId} is missing walletAddress`); + } + const transferFromAddress = fromWallet.walletAddress; + if (!transferFromAddress) { + throw new Error(`Source wallet ${fromWallet.walletId} is missing walletAddress`); + } + + const transferAmountLovelace = (() => { + const raw = (args.transferLovelace ?? process.env.CI_TRANSFER_LOVELACE ?? "2000000").trim(); + const n = Number(raw); + if (!Number.isFinite(n) || n < 1_000_000) { + throw new Error("CI_TRANSFER_LOVELACE must be a number >= 1000000"); + } + return String(Math.trunc(n)); + })(); + + const apiKey = process.env.CI_BLOCKFROST_PREPROD_API_KEY?.trim(); + if (!apiKey) { + throw new Error("CI_BLOCKFROST_PREPROD_API_KEY is required for real transfer scenario"); + } + if (ctx.networkId !== 0) { + throw new Error( + `CI route-chain transfer scenario is preprod-only. Expected networkId=0, received networkId=${ctx.networkId}`, + ); + } + + for (const address of [ + transferFromAddress, + transferToAddress, + ...fromWallet.signerAddresses, + ...toWallet.signerAddresses, + ]) { + if (!isTestnetAddress(address)) { + throw new Error(`Preprod invariant failed: non-testnet address detected: ${address}`); + } + } + + const { MeshWallet, MeshTxBuilder, BlockfrostProvider } = await import("@meshsdk/core"); + const provider = new BlockfrostProvider(apiKey); + const signerWallet = new MeshWallet({ + networkId: ctx.networkId, + key: { type: "mnemonic", words: parseMnemonic(args.fromMnemonic) }, + }); + await signerWallet.init(); + const signerAddress = await signerWallet.getChangeAddress(); + const expectedFromAddress = fromWallet.signerAddresses?.[1]; + if (!expectedFromAddress || signerAddress !== expectedFromAddress) { + throw new Error( + `Transfer mnemonic does not match expected signerAddresses[1] for source wallet ${fromWalletType}`, + ); + } + if (!isTestnetAddress(signerAddress)) { + throw new Error(`Preprod invariant failed: transfer signer is not a testnet address (${signerAddress})`); + } + + const sourceWalletScriptCbor = await loadScriptCbor(fromWallet.walletId); + const freeUtxosResponse = await requestJson({ + url: `${ctx.apiBaseUrl}/api/v1/freeUtxos?walletId=${encodeURIComponent(fromWallet.walletId)}&address=${encodeURIComponent(defaultBot.paymentAddress)}`, + method: "GET", + token: defaultBotToken, + }); + if (freeUtxosResponse.status !== 200 || !Array.isArray(freeUtxosResponse.data)) { + throw new Error( + `freeUtxos transfer preflight failed (${freeUtxosResponse.status}): ${stringifyRedacted(freeUtxosResponse.data)}`, + ); + } + if (freeUtxosResponse.data.length === 0) { + throw new Error( + `No free UTxOs available for source wallet ${fromWalletType} (${fromWallet.walletId}) at ${transferFromAddress}`, + ); + } + + const availableLovelace = freeUtxosResponse.data.reduce((sum, utxo) => { + return sum + parseLovelace(utxo.output.amount); + }, 0n); + const transferAmount = BigInt(transferAmountLovelace); + const feeBuffer = 1_000_000n; + const minimumRequired = transferAmount + feeBuffer; + if (availableLovelace < minimumRequired) { + throw new Error( + `Insufficient multisig wallet balance for transfer: available=${availableLovelace.toString()} lovelace, required>=${minimumRequired.toString()} (amount=${transferAmountLovelace}, feeBuffer=${feeBuffer.toString()})`, + ); + } + + const txBuilder = new MeshTxBuilder({ + fetcher: provider, + evaluator: provider, + verbose: true, + }); + txBuilder.setNetwork("preprod"); + for (const utxo of freeUtxosResponse.data) { + txBuilder + .txIn( + utxo.input.txHash, + utxo.input.outputIndex, + utxo.output.amount, + utxo.output.address, + ) + .txInScript(sourceWalletScriptCbor); + } + txBuilder.txOut(transferToAddress, [ + { + unit: "lovelace", + quantity: transferAmountLovelace, + }, + ]); + txBuilder.changeAddress(transferFromAddress); + const unsignedTxHex = await txBuilder.complete(); + if (!unsignedTxHex || typeof unsignedTxHex !== "string") { + throw new Error("Failed to build unsigned transfer transaction"); + } + + const addResponse = await requestJson<{ id?: string; error?: string }>({ + url: `${ctx.apiBaseUrl}/api/v1/addTransaction`, + method: "POST", + token: defaultBotToken, + body: { + walletId: fromWallet.walletId, + address: defaultBot.paymentAddress, + txCbor: unsignedTxHex, + txJson: JSON.stringify({ + source: "ci-route-chain", + kind: "real-transfer", + fromWalletType, + toWalletType, + from: transferFromAddress, + to: transferToAddress, + fundingSource: "source-multisig-utxos", + amountLovelace: transferAmountLovelace, + sourceUtxoCount: freeUtxosResponse.data.length, + availableLovelace: availableLovelace.toString(), + }), + description: `CI real transfer route-chain tx (${fromWalletType} -> ${toWalletType})`, + }, + }); + if (addResponse.status !== 201 || !addResponse.data?.id) { + throw new Error( + `addTransaction real-transfer failed (${addResponse.status}): ${stringifyRedacted(addResponse.data)}`, + ); + } + + return { + fromWalletType, + toWalletType, + fromWalletId: fromWallet.walletId, + toWalletId: toWallet.walletId, + transferFromAddress, + transferToAddress, + transferAmountLovelace, + transactionId: addResponse.data.id, + }; +} diff --git a/scripts/ci/scenarios/flows/utxoShapeFlow.ts b/scripts/ci/scenarios/flows/utxoShapeFlow.ts new file mode 100644 index 00000000..d2dc9035 --- /dev/null +++ b/scripts/ci/scenarios/flows/utxoShapeFlow.ts @@ -0,0 +1,309 @@ +import { PrismaClient } from "@prisma/client"; +import type { CIBootstrapContext, CIWalletType } from "../../framework/types"; +import { authenticateBot } from "../../framework/botAuth"; +import { getDefaultBot } from "../../framework/botContext"; +import { boolFromEnv } from "../../framework/env"; +import { requestJson } from "../../framework/http"; +import { parseMnemonic } from "../../framework/mnemonic"; +import { isTestnetAddress } from "../../framework/preprod"; +import { stringifyRedacted } from "../../framework/redact"; +import { + analyzeProxyFullLifecycleUtxoShape, + assertProxyFullLifecyclePreflight, + formatAda, + key, + PROXY_LIFECYCLE_COLLATERAL_SPLIT_LOVELACE, + type ScriptUtxo, + type UtxoRef, + toRef, +} from "../proxyLifecyclePreflight"; +import { runSigningFlow } from "./signingFlow"; +import { getWalletByType } from "../steps/helpers"; + +const prisma = new PrismaClient(); + +type UtxoShapeResult = { + walletType: CIWalletType; + walletId: string; + status: "already-shaped" | "split"; + transactionId?: string; + spentUtxoRefs?: UtxoRef[]; + attempts?: number; + totalLovelace: string; + requiredTotalLovelace: string; + drepSelectableLovelace: string; + keyCollateralCandidates: number; +}; + +async function loadScriptCbor(walletId: string): Promise { + const wallet = await prisma.wallet.findUnique({ + where: { id: walletId }, + select: { scriptCbor: true }, + }); + const scriptCbor = wallet?.scriptCbor?.trim(); + if (!scriptCbor) { + throw new Error(`Wallet ${walletId} is missing scriptCbor; cannot build proxy lifecycle self-split`); + } + return scriptCbor; +} + +async function fetchFreshFreeUtxos(args: { + ctx: CIBootstrapContext; + walletId: string; + token: string; + address: string; +}): Promise { + const response = await requestJson({ + url: `${args.ctx.apiBaseUrl}/api/v1/freeUtxos?walletId=${encodeURIComponent(args.walletId)}&address=${encodeURIComponent(args.address)}&fresh=true`, + method: "GET", + token: args.token, + }); + if (response.status !== 200 || !Array.isArray(response.data)) { + throw new Error(`freeUtxos UTxO-shape lookup failed (${response.status}): ${stringifyRedacted(response.data)}`); + } + return response.data; +} + +async function fetchKeyAddressUtxos(args: { + ctx: CIBootstrapContext; + address: string; +}): Promise { + const apiKey = process.env.CI_BLOCKFROST_PREPROD_API_KEY?.trim(); + if (!apiKey) { + throw new Error("CI_BLOCKFROST_PREPROD_API_KEY is required to fetch proxy lifecycle key-address collateral"); + } + if (args.ctx.networkId !== 0) { + throw new Error(`Proxy lifecycle key collateral lookup is preprod-only. Expected networkId=0, received networkId=${args.ctx.networkId}`); + } + + const { BlockfrostProvider } = await import("@meshsdk/core"); + const provider = new BlockfrostProvider(apiKey); + const utxos = await provider.fetchAddressUTxOs(args.address); + return utxos.map((utxo) => ({ + input: utxo.input, + output: utxo.output, + })); +} + +async function pollUntilUtxosConsumed(args: { + ctx: CIBootstrapContext; + walletId: string; + token: string; + address: string; + spentUtxoRefs: UtxoRef[]; + maxRetries?: number; + retryDelayMs?: number; +}): Promise<{ attempts: number }> { + const maxRetries = args.maxRetries ?? 30; + const retryDelayMs = args.retryDelayMs ?? 8000; + const spent = new Set(args.spentUtxoRefs.map(key)); + for (let attempt = 0; attempt < maxRetries; attempt++) { + if (attempt > 0) { + await new Promise((resolve) => setTimeout(resolve, retryDelayMs)); + } + const utxos = await fetchFreshFreeUtxos(args); + if (!utxos.some((utxo) => spent.has(key(toRef(utxo))))) { + return { attempts: attempt + 1 }; + } + } + throw new Error("Timed out waiting for proxy lifecycle self-split inputs to be confirmed"); +} + +function requireProxyShapeEnvironment(ctx: CIBootstrapContext, walletAddress: string, collateralAddress: string): void { + const apiKey = process.env.CI_BLOCKFROST_PREPROD_API_KEY?.trim(); + if (!apiKey) { + throw new Error("CI_BLOCKFROST_PREPROD_API_KEY is required for proxy lifecycle UTxO shaping"); + } + if (ctx.networkId !== 0) { + throw new Error(`Proxy lifecycle UTxO shaping is preprod-only. Expected networkId=0, received networkId=${ctx.networkId}`); + } + if (!isTestnetAddress(walletAddress)) { + throw new Error(`Preprod invariant failed: proxy lifecycle wallet address is not testnet (${walletAddress})`); + } + if (!isTestnetAddress(collateralAddress)) { + throw new Error(`Preprod invariant failed: proxy lifecycle collateral address is not testnet (${collateralAddress})`); + } + for (const envName of ["CI_MNEMONIC_2", "CI_MNEMONIC_3"] as const) { + if (!process.env[envName]?.trim()) { + throw new Error(`${envName} is required for proxy lifecycle UTxO shaping`); + } + parseMnemonic(process.env[envName]!); + } +} + +async function buildSelfSplitTransaction(args: { + walletId: string; + walletAddress: string; + collateralAddress: string; + utxos: ScriptUtxo[]; +}): Promise { + const apiKey = process.env.CI_BLOCKFROST_PREPROD_API_KEY?.trim(); + if (!apiKey) { + throw new Error("CI_BLOCKFROST_PREPROD_API_KEY is required for proxy lifecycle UTxO shaping"); + } + const scriptCbor = await loadScriptCbor(args.walletId); + const { MeshTxBuilder, BlockfrostProvider } = await import("@meshsdk/core"); + const provider = new BlockfrostProvider(apiKey); + const txBuilder = new MeshTxBuilder({ + fetcher: provider, + evaluator: provider, + verbose: true, + }); + txBuilder.setNetwork("preprod"); + for (const utxo of args.utxos) { + txBuilder + .txIn( + utxo.input.txHash, + utxo.input.outputIndex, + utxo.output.amount, + utxo.output.address, + ) + .txInScript(scriptCbor); + } + txBuilder.txOut(args.collateralAddress, [ + { + unit: "lovelace", + quantity: PROXY_LIFECYCLE_COLLATERAL_SPLIT_LOVELACE.toString(), + }, + ]); + txBuilder.changeAddress(args.walletAddress); + const unsignedTxHex = await txBuilder.complete(); + if (!unsignedTxHex || typeof unsignedTxHex !== "string") { + throw new Error("Failed to build unsigned proxy lifecycle self-split transaction"); + } + return unsignedTxHex; +} + +export async function ensureProxyLifecycleUtxoShape(args: { + ctx: CIBootstrapContext; + walletType: CIWalletType; +}): Promise { + const wallet = getWalletByType(args.ctx, args.walletType); + if (!wallet) throw new Error(`Missing ${args.walletType} wallet`); + if (!wallet.walletAddress) { + throw new Error(`Wallet ${wallet.walletId} is missing walletAddress; cannot shape proxy lifecycle UTxOs`); + } + + const bot = getDefaultBot(args.ctx); + const token = await authenticateBot({ ctx: args.ctx, bot }); + const [utxos, collateralUtxos] = await Promise.all([ + fetchFreshFreeUtxos({ + ctx: args.ctx, + walletId: wallet.walletId, + token, + address: bot.paymentAddress, + }), + fetchKeyAddressUtxos({ ctx: args.ctx, address: bot.paymentAddress }), + ]); + const analysis = analyzeProxyFullLifecycleUtxoShape({ + walletUtxos: utxos, + collateralUtxos, + }); + if (analysis.status === "pass") { + return { + walletType: args.walletType, + walletId: wallet.walletId, + status: "already-shaped", + totalLovelace: analysis.totalLovelace.toString(), + requiredTotalLovelace: analysis.requiredTotalLovelace.toString(), + drepSelectableLovelace: analysis.drepSelectableLovelace.toString(), + keyCollateralCandidates: analysis.keyCollateralCandidates, + }; + } + if (analysis.status !== "needs-split") { + if (analysis.status === "insufficient-shape") { + throw new Error( + `Proxy lifecycle self-split cannot leave ${formatAda(PROXY_LIFECYCLE_COLLATERAL_SPLIT_LOVELACE)} collateral plus enough selectable ADA. ${analysis.diagnostics}. Add at least ${formatAda(analysis.selfSplitRequiredLovelace - analysis.totalLovelace)} plus any desired safety margin before running proxy full lifecycle.`, + ); + } + assertProxyFullLifecyclePreflight({ walletUtxos: utxos, collateralUtxos }); + } + + requireProxyShapeEnvironment(args.ctx, wallet.walletAddress, bot.paymentAddress); + const unsignedTxHex = await buildSelfSplitTransaction({ + walletId: wallet.walletId, + walletAddress: wallet.walletAddress, + collateralAddress: bot.paymentAddress, + utxos, + }); + const addResponse = await requestJson<{ id?: string; error?: string }>({ + url: `${args.ctx.apiBaseUrl}/api/v1/addTransaction`, + method: "POST", + token, + body: { + walletId: wallet.walletId, + address: bot.paymentAddress, + txCbor: unsignedTxHex, + txJson: JSON.stringify({ + source: "ci-route-chain", + kind: "proxy-lifecycle-utxo-shape", + walletType: args.walletType, + outputCollateralLovelace: PROXY_LIFECYCLE_COLLATERAL_SPLIT_LOVELACE.toString(), + outputCollateralAddress: bot.paymentAddress, + sourceUtxoCount: utxos.length, + totalLovelace: analysis.totalLovelace.toString(), + }), + description: `CI proxy lifecycle UTxO self-split (${args.walletType})`, + }, + }); + if (addResponse.status !== 201 || !addResponse.data?.id) { + throw new Error(`addTransaction proxy UTxO self-split failed (${addResponse.status}): ${stringifyRedacted(addResponse.data)}`); + } + + const transactionId = addResponse.data.id; + await runSigningFlow({ + ctx: args.ctx, + mnemonic: process.env.CI_MNEMONIC_2!, + signWalletType: args.walletType, + signerIndex: 1, + signerLabel: "signer1", + signBroadcast: false, + preferredTransactionId: transactionId, + requireBroadcastSuccess: false, + }); + await runSigningFlow({ + ctx: args.ctx, + mnemonic: process.env.CI_MNEMONIC_3!, + signWalletType: args.walletType, + signerIndex: 2, + signerLabel: "signer2", + signBroadcast: boolFromEnv(process.env.SIGN_BROADCAST, true), + preferredTransactionId: transactionId, + requireBroadcastSuccess: true, + }); + + const spentUtxoRefs = utxos.map(toRef); + const confirmation = await pollUntilUtxosConsumed({ + ctx: args.ctx, + walletId: wallet.walletId, + token, + address: bot.paymentAddress, + spentUtxoRefs, + }); + const [shapedUtxos, shapedCollateralUtxos] = await Promise.all([ + fetchFreshFreeUtxos({ + ctx: args.ctx, + walletId: wallet.walletId, + token, + address: bot.paymentAddress, + }), + fetchKeyAddressUtxos({ ctx: args.ctx, address: bot.paymentAddress }), + ]); + const shaped = assertProxyFullLifecyclePreflight({ + walletUtxos: shapedUtxos, + collateralUtxos: shapedCollateralUtxos, + }); + + return { + walletType: args.walletType, + walletId: wallet.walletId, + status: "split", + transactionId, + spentUtxoRefs, + attempts: confirmation.attempts, + totalLovelace: shaped.totalLovelace.toString(), + requiredTotalLovelace: shaped.requiredTotalLovelace.toString(), + drepSelectableLovelace: shaped.drepSelectableLovelace.toString(), + keyCollateralCandidates: shaped.keyCollateralCandidates, + }; +} diff --git a/scripts/ci/scenarios/manifest.ts b/scripts/ci/scenarios/manifest.ts new file mode 100644 index 00000000..21bfc7fc --- /dev/null +++ b/scripts/ci/scenarios/manifest.ts @@ -0,0 +1,99 @@ +import type { CIBootstrapContext, CIWalletType, Scenario } from "../framework/types"; +import { getRingWalletTypes } from "./steps/helpers"; +import { createScenarioPendingAndDiscovery, createScenarioAdaRouteHealth } from "./steps/discovery"; +import { createScenarioBotIdentity } from "./steps/botIdentity"; +import { createScenarioAuthPlane } from "./steps/authPlane"; +import { createScenarioSubmitDatum } from "./steps/datum"; +import { createScenarioGovernanceRoutes } from "./steps/governance"; +import { + createScenarioProxyFullLifecycle, + createScenarioProxySmoke, +} from "./steps/proxyBot"; +import { + createScenarioRealTransferAndSign, + createScenarioFinalAssertions, + type TransferLegRuntime, +} from "./steps/transferRing"; +import { + createScenarioDRepCertificates, + createScenarioStakeCertificates, +} from "./steps/certificates"; +import { createScenarioCreateWallet } from "./steps/walletLifecycle"; + +export const ROUTE_SCENARIO_IDS = [ + "scenario.wallet-discovery", + "scenario.ada-route-health", + "scenario.create-wallet", + "scenario.bot-identity", + "scenario.auth-plane", + "scenario.proxy-smoke", + "scenario.submit-datum", + "scenario.governance-routes", + "scenario.drep-certificates", + "scenario.stake-certificates", + "scenario.proxy-full-lifecycle", + "scenario.real-transfer-and-sign", + "scenario.final-assertions", +] as const; + +function createTransferRuntime(ctx: CIBootstrapContext): { transferLegs: TransferLegRuntime[] } { + const [legacy, hierarchical, sdk] = getRingWalletTypes(ctx); + return { + transferLegs: [ + { fromWalletType: legacy, toWalletType: hierarchical }, + { fromWalletType: hierarchical, toWalletType: sdk }, + { fromWalletType: sdk, toWalletType: legacy }, + ], + }; +} + +export function getScenarioManifest( + ctx: CIBootstrapContext, + requestedScenarioIds: string[] = [], +): Scenario[] { + const requested = new Set(requestedScenarioIds); + const shouldInclude = (id: (typeof ROUTE_SCENARIO_IDS)[number]) => + requested.size === 0 || requested.has(id); + + const hasLegacy = ctx.wallets.some((w) => w.type === "legacy"); + const hasSdk = ctx.wallets.some((w) => w.type === "sdk"); + let transferRuntime: { transferLegs: TransferLegRuntime[] } | undefined; + const getTransferRuntime = () => { + transferRuntime ??= createTransferRuntime(ctx); + return transferRuntime; + }; + + const scenarios: Scenario[] = []; + + if (shouldInclude("scenario.wallet-discovery")) scenarios.push(createScenarioPendingAndDiscovery(ctx)); + if (shouldInclude("scenario.ada-route-health")) scenarios.push(createScenarioAdaRouteHealth(ctx)); + if (shouldInclude("scenario.create-wallet")) scenarios.push(createScenarioCreateWallet(ctx)); + if (shouldInclude("scenario.bot-identity")) scenarios.push(createScenarioBotIdentity()); + if (shouldInclude("scenario.auth-plane")) scenarios.push(createScenarioAuthPlane(ctx)); + if (shouldInclude("scenario.proxy-smoke")) scenarios.push(createScenarioProxySmoke(ctx)); + if (shouldInclude("scenario.submit-datum")) scenarios.push(createScenarioSubmitDatum(ctx)); + if (shouldInclude("scenario.governance-routes")) scenarios.push(createScenarioGovernanceRoutes(ctx)); + + // Certificate scenarios run before the ring transfer so they use confirmed, + // unspent UTxOs. The ring transfer spends wallet UTxOs; running certs after + // it creates a race where the cert tx references UTxOs already in the mempool. + if (hasLegacy && hasSdk && shouldInclude("scenario.drep-certificates")) { + scenarios.push(createScenarioDRepCertificates()); + } + if (hasSdk && shouldInclude("scenario.stake-certificates")) { + scenarios.push(createScenarioStakeCertificates()); + } + + if (shouldInclude("scenario.proxy-full-lifecycle")) { + scenarios.push(createScenarioProxyFullLifecycle(ctx)); + } + + if (shouldInclude("scenario.real-transfer-and-sign")) { + scenarios.push(createScenarioRealTransferAndSign(getTransferRuntime())); + } + if (shouldInclude("scenario.final-assertions")) { + scenarios.push(createScenarioFinalAssertions(getTransferRuntime())); + } + + return scenarios; +} diff --git a/scripts/ci/scenarios/proxyChainRecovery.ts b/scripts/ci/scenarios/proxyChainRecovery.ts new file mode 100644 index 00000000..2b40b624 --- /dev/null +++ b/scripts/ci/scenarios/proxyChainRecovery.ts @@ -0,0 +1,369 @@ +import { PrismaClient, type Proxy as DbProxy } from "@prisma/client"; +import { BlockfrostProvider, type UTxO } from "@meshsdk/core"; +import { deriveProxyScripts } from "../../../src/lib/server/proxyTxBuilders"; +import type { UtxoRef } from "../../../src/lib/server/proxyUtxos"; +import type { CIBootstrapContext, CIWalletType } from "../framework/types"; +import { getWalletByType } from "./steps/helpers"; + +type ProxyRecoveryRow = Pick< + DbProxy, + "id" | "walletId" | "proxyAddress" | "authTokenId" | "paramUtxo" | "isActive" +>; + +type ProxyRecoveryCreateData = { + walletId: string; + proxyAddress: string; + authTokenId: string; + paramUtxo: string; + description: string; + isActive: true; +}; + +type ProxyRecoveryDb = { + wallet: { + findUnique: (args: { + where: { id: string }; + select: { id: true }; + }) => Promise<{ id: string } | null>; + }; + proxy: { + findFirst: (args: { + where: { authTokenId: string }; + select: Record; + }) => Promise; + create: (args: { + data: ProxyRecoveryCreateData; + select: Record; + }) => Promise; + update: (args: { + where: { id: string }; + data: { walletId: string; isActive: true }; + select: Record; + }) => Promise; + }; + $transaction?: (fn: (tx: ProxyRecoveryDb) => Promise) => Promise; +}; + +export type ProxyChainRecoveryProvider = { + fetchAddressUTxOs: (address: string) => Promise; + get: (path: string) => Promise; +}; + +type AssetHistoryEntry = { + tx_hash?: string; + action?: string; +}; + +type TxUtxoEntry = { + tx_hash?: string; + output_index?: number; +}; + +type TxUtxosResponse = { + inputs?: TxUtxoEntry[]; +}; + +export type ProxyChainRecoverySkipReason = + | "candidate-cap-exceeded" + | "asset-history-fetch-error" + | "no-mint-transaction" + | "tx-utxos-fetch-error" + | "no-derived-match" + | "already-current-active"; + +export type ProxyChainRecoveryResult = { + walletType: CIWalletType; + walletId: string; + walletAddress: string; + recovered: Array<{ + proxyId: string; + action: "created" | "reactivated" | "reattached"; + fromWalletId: string | null; + authTokenId: string; + proxyAddress: string; + paramUtxo: UtxoRef; + mintTxHash: string; + dRepId: string; + proxyUtxoCount?: number; + }>; + skipped: Array<{ + assetUnit: string; + reason: ProxyChainRecoverySkipReason; + detail?: string; + }>; +}; + +const DEFAULT_MAX_CANDIDATES = 25; +const ASSET_HISTORY_PAGE_SIZE = 100; + +const proxySelect: Record = { + id: true, + walletId: true, + proxyAddress: true, + authTokenId: true, + paramUtxo: true, + isActive: true, +}; + +let defaultDb: PrismaClient | undefined; + +function getDefaultDb(): PrismaClient { + defaultDb ??= new PrismaClient(); + return defaultDb; +} + +function createDefaultProvider(networkId: 0 | 1): ProxyChainRecoveryProvider { + const apiKey = + networkId === 0 + ? process.env.CI_BLOCKFROST_PREPROD_API_KEY?.trim() || + process.env.NEXT_PUBLIC_BLOCKFROST_API_KEY_PREPROD?.trim() + : process.env.CI_BLOCKFROST_MAINNET_API_KEY?.trim() || + process.env.NEXT_PUBLIC_BLOCKFROST_API_KEY_MAINNET?.trim(); + if (!apiKey) { + throw new Error(`Missing Blockfrost API key for proxy chain recovery on network ${networkId}`); + } + return new BlockfrostProvider(apiKey) as unknown as ProxyChainRecoveryProvider; +} + +async function runInTransaction( + db: ProxyRecoveryDb, + fn: (tx: ProxyRecoveryDb) => Promise, +): Promise { + if (typeof db.$transaction === "function") { + return db.$transaction(fn); + } + return fn(db); +} + +function positiveQuantity(quantity: string | undefined): boolean { + try { + return BigInt(quantity ?? "0") > 0n; + } catch { + return false; + } +} + +function collectAssetUnits(utxos: UTxO[]): string[] { + const units = new Set(); + for (const utxo of utxos) { + for (const asset of utxo.output.amount) { + if (asset.unit !== "lovelace" && positiveQuantity(asset.quantity)) { + units.add(asset.unit); + } + } + } + return [...units].sort(); +} + +function normalizeAssetHistory(value: unknown): AssetHistoryEntry[] { + return Array.isArray(value) ? (value as AssetHistoryEntry[]) : []; +} + +function normalizeTxUtxos(value: unknown): TxUtxosResponse { + return typeof value === "object" && value !== null ? (value as TxUtxosResponse) : {}; +} + +function findMintTxHash(history: AssetHistoryEntry[]): string | null { + const mint = history.find( + (entry) => entry.action === "minted" && typeof entry.tx_hash === "string" && entry.tx_hash, + ); + return mint?.tx_hash ?? null; +} + +function inputToRef(input: TxUtxoEntry): UtxoRef | null { + const txHash = typeof input.tx_hash === "string" ? input.tx_hash.trim() : ""; + const outputIndex = + typeof input.output_index === "number" && Number.isInteger(input.output_index) + ? input.output_index + : -1; + if (!txHash || outputIndex < 0) return null; + return { txHash, outputIndex }; +} + +async function inspectAssetCandidate(args: { + assetUnit: string; + provider: ProxyChainRecoveryProvider; + network: 0 | 1; +}): Promise< + | { + matched: true; + authTokenId: string; + proxyAddress: string; + paramUtxo: UtxoRef; + mintTxHash: string; + dRepId: string; + proxyUtxoCount?: number; + } + | { matched: false; reason: ProxyChainRecoverySkipReason; detail?: string } +> { + let history: AssetHistoryEntry[]; + try { + history = normalizeAssetHistory( + await args.provider.get( + `/assets/${encodeURIComponent(args.assetUnit)}/history?order=asc&count=${ASSET_HISTORY_PAGE_SIZE}`, + ), + ); + } catch (error) { + return { + matched: false, + reason: "asset-history-fetch-error", + detail: error instanceof Error ? error.message : String(error), + }; + } + + const mintTxHash = findMintTxHash(history); + if (!mintTxHash) { + return { matched: false, reason: "no-mint-transaction" }; + } + + let txUtxos: TxUtxosResponse; + try { + txUtxos = normalizeTxUtxos( + await args.provider.get(`/txs/${encodeURIComponent(mintTxHash)}/utxos`), + ); + } catch (error) { + return { + matched: false, + reason: "tx-utxos-fetch-error", + detail: error instanceof Error ? error.message : String(error), + }; + } + + const inputs = Array.isArray(txUtxos.inputs) ? txUtxos.inputs : []; + for (const input of inputs) { + const paramUtxo = inputToRef(input); + if (!paramUtxo) continue; + + const scripts = deriveProxyScripts({ paramUtxo, network: args.network }); + if (scripts.authTokenId !== args.assetUnit) continue; + + let proxyUtxoCount: number | undefined; + try { + proxyUtxoCount = (await args.provider.fetchAddressUTxOs(scripts.proxyAddress)).length; + } catch { + proxyUtxoCount = undefined; + } + + return { + matched: true, + authTokenId: scripts.authTokenId, + proxyAddress: scripts.proxyAddress, + paramUtxo, + mintTxHash, + dRepId: scripts.dRepId, + proxyUtxoCount, + }; + } + + return { matched: false, reason: "no-derived-match" }; +} + +export async function recoverProxyRowsFromChainForWalletType(args: { + ctx: CIBootstrapContext; + walletType: CIWalletType; + db?: ProxyRecoveryDb; + provider?: ProxyChainRecoveryProvider; + maxCandidates?: number; +}): Promise { + const wallet = getWalletByType(args.ctx, args.walletType); + if (!wallet) throw new Error(`Missing ${args.walletType} wallet`); + + const db = args.db ?? (getDefaultDb() as unknown as ProxyRecoveryDb); + const provider = args.provider ?? createDefaultProvider(args.ctx.networkId); + const currentWallet = await db.wallet.findUnique({ + where: { id: wallet.walletId }, + select: { id: true }, + }); + if (!currentWallet) { + throw new Error(`Current ${args.walletType} wallet row ${wallet.walletId} was not found`); + } + + const walletUtxos = await provider.fetchAddressUTxOs(wallet.walletAddress); + const assetUnits = collectAssetUnits(walletUtxos); + const maxCandidates = args.maxCandidates ?? DEFAULT_MAX_CANDIDATES; + const candidates = assetUnits.slice(0, maxCandidates); + const skipped: ProxyChainRecoveryResult["skipped"] = assetUnits + .slice(maxCandidates) + .map((assetUnit) => ({ + assetUnit, + reason: "candidate-cap-exceeded", + detail: `candidate limit ${maxCandidates} reached`, + })); + + const matches: Array<{ + assetUnit: string; + authTokenId: string; + proxyAddress: string; + paramUtxo: UtxoRef; + mintTxHash: string; + dRepId: string; + proxyUtxoCount?: number; + }> = []; + + for (const assetUnit of candidates) { + const inspected = await inspectAssetCandidate({ + assetUnit, + provider, + network: args.ctx.networkId, + }); + if (!inspected.matched) { + skipped.push({ assetUnit, reason: inspected.reason, detail: inspected.detail }); + continue; + } + matches.push({ assetUnit, ...inspected }); + } + + const recovered: ProxyChainRecoveryResult["recovered"] = []; + await runInTransaction(db, async (tx) => { + for (const match of matches) { + const existing = await tx.proxy.findFirst({ + where: { authTokenId: match.authTokenId }, + select: proxySelect, + }); + + if (existing?.walletId === wallet.walletId && existing.isActive) { + skipped.push({ assetUnit: match.assetUnit, reason: "already-current-active" }); + continue; + } + + const previousWalletId = existing?.walletId ?? null; + const row = existing + ? await tx.proxy.update({ + where: { id: existing.id }, + data: { walletId: wallet.walletId, isActive: true }, + select: proxySelect, + }) + : await tx.proxy.create({ + data: { + walletId: wallet.walletId, + proxyAddress: match.proxyAddress, + authTokenId: match.authTokenId, + paramUtxo: JSON.stringify(match.paramUtxo), + description: "Recovered CI proxy from chain", + isActive: true, + }, + select: proxySelect, + }); + + recovered.push({ + proxyId: row.id, + action: existing ? (previousWalletId === wallet.walletId ? "reactivated" : "reattached") : "created", + fromWalletId: previousWalletId, + authTokenId: match.authTokenId, + proxyAddress: match.proxyAddress, + paramUtxo: match.paramUtxo, + mintTxHash: match.mintTxHash, + dRepId: match.dRepId, + proxyUtxoCount: match.proxyUtxoCount, + }); + } + }); + + return { + walletType: args.walletType, + walletId: wallet.walletId, + walletAddress: wallet.walletAddress, + recovered, + skipped, + }; +} diff --git a/scripts/ci/scenarios/proxyLifecyclePreflight.ts b/scripts/ci/scenarios/proxyLifecyclePreflight.ts new file mode 100644 index 00000000..c996c337 --- /dev/null +++ b/scripts/ci/scenarios/proxyLifecyclePreflight.ts @@ -0,0 +1,181 @@ +export type UtxoRef = { txHash: string; outputIndex: number }; + +export type ScriptUtxo = { + input: UtxoRef; + output: { address: string; amount: { unit: string; quantity: string }[] }; +}; + +export type ProxyLifecycleUtxoShapeStatus = + | "pass" + | "needs-split" + | "insufficient-total" + | "insufficient-selectable" + | "insufficient-shape"; + +export const DREP_REGISTER_REQUIRED_LOVELACE = 505_000_000n; +export const LIFECYCLE_PROXY_LOVELACE = 10_000_000n; +export const FULL_LIFECYCLE_FEE_BUFFER_LOVELACE = 20_000_000n; +export const SETUP_UTXO_REQUIRED_LOVELACE = 20_000_000n; +export const COLLATERAL_REQUIRED_LOVELACE = 5_000_000n; +export const PROXY_SPEND_LOVELACE = 1_000_000n; +export const PROXY_LIFECYCLE_COLLATERAL_SPLIT_LOVELACE = 6_000_000n; +export const SELF_SPLIT_FEE_BUFFER_LOVELACE = 2_000_000n; +export const PROXY_FULL_LIFECYCLE_WALLET_TYPES = ["legacy", "hierarchical", "sdk"] as const; + +export function parseLovelace(utxo: ScriptUtxo): bigint { + return BigInt(utxo.output.amount.find((asset) => asset.unit === "lovelace")?.quantity ?? "0"); +} + +export function toRef(utxo: ScriptUtxo): UtxoRef { + return { txHash: utxo.input.txHash, outputIndex: utxo.input.outputIndex }; +} + +export function key(ref: UtxoRef): string { + return `${ref.txHash}:${ref.outputIndex}`; +} + +export function sameRef(left: UtxoRef, right: UtxoRef): boolean { + return key(left) === key(right); +} + +export function containsRef(refs: UtxoRef[], ref: UtxoRef): boolean { + return refs.some((existing) => sameRef(existing, ref)); +} + +export function formatAda(lovelace: bigint): string { + const ada = lovelace / 1_000_000n; + const remainder = lovelace % 1_000_000n; + if (remainder === 0n) return `${ada.toString()} ADA`; + return `${ada.toString()}.${remainder.toString().padStart(6, "0")} ADA`; +} + +export function getProxyFullLifecycleRequiredLovelace(): bigint { + return ( + DREP_REGISTER_REQUIRED_LOVELACE + + LIFECYCLE_PROXY_LOVELACE + + PROXY_SPEND_LOVELACE + + FULL_LIFECYCLE_FEE_BUFFER_LOVELACE + ); +} + +export type ProxyLifecycleUtxoShapeAnalysis = { + status: ProxyLifecycleUtxoShapeStatus; + totalLovelace: bigint; + largestUtxoLovelace: bigint; + setupCandidates: number; + keyCollateralCandidates: number; + drepSelectableLovelace: bigint; + drepRequiredLovelace: bigint; + requiredTotalLovelace: bigint; + selfSplitRequiredLovelace: bigint; + hasSetupCandidate: boolean; + hasKeyCollateral: boolean; + diagnostics: string; +}; + +export type ProxyLifecycleUtxoShapeInput = { + walletUtxos: ScriptUtxo[]; + collateralUtxos: ScriptUtxo[]; +}; + +export function analyzeProxyFullLifecycleUtxoShape(args: { + walletUtxos: ScriptUtxo[]; + collateralUtxos: ScriptUtxo[]; +}): ProxyLifecycleUtxoShapeAnalysis { + const lovelaces = args.walletUtxos.map(parseLovelace); + const totalLovelace = lovelaces.reduce((sum, value) => sum + value, 0n); + const largestUtxoLovelace = lovelaces.reduce( + (largest, value) => (value > largest ? value : largest), + 0n, + ); + const setupCandidates = lovelaces.filter((value) => value >= SETUP_UTXO_REQUIRED_LOVELACE).length; + const keyCollateralCandidates = args.collateralUtxos.filter( + (utxo) => + parseLovelace(utxo) >= COLLATERAL_REQUIRED_LOVELACE && + utxo.output.amount.every((asset) => asset.unit === "lovelace"), + ); + const hasSetupCandidate = setupCandidates > 0; + const hasKeyCollateral = keyCollateralCandidates.length > 0; + const drepRequiredLovelace = getProxyFullLifecycleRequiredLovelace(); + const drepSelectableLovelace = totalLovelace; + const requiredTotalLovelace = getProxyFullLifecycleRequiredLovelace(); + const selfSplitRequiredLovelace = + drepRequiredLovelace + PROXY_LIFECYCLE_COLLATERAL_SPLIT_LOVELACE + SELF_SPLIT_FEE_BUFFER_LOVELACE; + const diagnostics = + `total=${formatAda(totalLovelace)}, largestUtxO=${formatAda(largestUtxoLovelace)}, ` + + `setupCandidates=${setupCandidates}, keyCollateralCandidates=${keyCollateralCandidates.length}, ` + + `drepSelectable=${formatAda(drepSelectableLovelace)}, drepRequired=${formatAda(drepRequiredLovelace)}, ` + + `required=${formatAda(requiredTotalLovelace)} ` + + `(DRep register ${formatAda(DREP_REGISTER_REQUIRED_LOVELACE)} + ` + + `initial proxy ${formatAda(LIFECYCLE_PROXY_LOVELACE)} + ` + + `proxy spend ${formatAda(PROXY_SPEND_LOVELACE)} + ` + + `fee buffer ${formatAda(FULL_LIFECYCLE_FEE_BUFFER_LOVELACE)})`; + + let status: ProxyLifecycleUtxoShapeStatus = "pass"; + if (totalLovelace < requiredTotalLovelace) { + status = "insufficient-total"; + } else if (!hasSetupCandidate || !hasKeyCollateral) { + status = + totalLovelace >= selfSplitRequiredLovelace + ? "needs-split" + : "insufficient-shape"; + } else if (drepSelectableLovelace < drepRequiredLovelace) { + status = "insufficient-selectable"; + } + + return { + status, + totalLovelace, + largestUtxoLovelace, + setupCandidates, + keyCollateralCandidates: keyCollateralCandidates.length, + drepSelectableLovelace, + drepRequiredLovelace, + requiredTotalLovelace, + selfSplitRequiredLovelace, + hasSetupCandidate, + hasKeyCollateral, + diagnostics, + }; +} + +export function assertProxyFullLifecyclePreflight(args: { + walletUtxos: ScriptUtxo[]; + collateralUtxos: ScriptUtxo[]; +}): Omit< + ProxyLifecycleUtxoShapeAnalysis, + "status" | "diagnostics" | "selfSplitRequiredLovelace" | "hasSetupCandidate" | "hasKeyCollateral" +> { + const analysis = analyzeProxyFullLifecycleUtxoShape(args); + + if (analysis.keyCollateralCandidates === 0) { + throw new Error( + `Proxy full lifecycle preflight failed: no bot payment-address UTxO has at least ${formatAda(COLLATERAL_REQUIRED_LOVELACE)} for Plutus collateral. ${analysis.diagnostics}. Run proxy lifecycle UTxO shaping or fund the bot payment address before running proxy full lifecycle.`, + ); + } + if (analysis.setupCandidates === 0) { + throw new Error( + `Proxy full lifecycle preflight failed: no wallet UTxO has at least ${formatAda(SETUP_UTXO_REQUIRED_LOVELACE)} for proxy setup. ${analysis.diagnostics}. Fund or consolidate the CI wallet before running proxy full lifecycle.`, + ); + } + if (analysis.totalLovelace < analysis.requiredTotalLovelace) { + throw new Error( + `Proxy full lifecycle preflight failed: insufficient ADA for full lifecycle. ${analysis.diagnostics}. Add at least ${formatAda(analysis.requiredTotalLovelace - analysis.totalLovelace)} plus any desired safety margin before running proxy full lifecycle.`, + ); + } + if (analysis.drepSelectableLovelace < analysis.drepRequiredLovelace) { + throw new Error( + `Proxy full lifecycle preflight failed: DRep register cannot select enough ADA while reserving separate collateral and accounting for prior proxy setup/spend costs. ${analysis.diagnostics}. Add at least ${formatAda(analysis.drepRequiredLovelace - analysis.drepSelectableLovelace)} plus any desired safety margin, or consolidate spendable ADA outside the collateral UTxO.`, + ); + } + + return { + totalLovelace: analysis.totalLovelace, + largestUtxoLovelace: analysis.largestUtxoLovelace, + setupCandidates: analysis.setupCandidates, + keyCollateralCandidates: analysis.keyCollateralCandidates, + drepSelectableLovelace: analysis.drepSelectableLovelace, + drepRequiredLovelace: analysis.drepRequiredLovelace, + requiredTotalLovelace: analysis.requiredTotalLovelace, + }; +} diff --git a/scripts/ci/scenarios/proxyOrphanAdoption.ts b/scripts/ci/scenarios/proxyOrphanAdoption.ts new file mode 100644 index 00000000..9754a0a7 --- /dev/null +++ b/scripts/ci/scenarios/proxyOrphanAdoption.ts @@ -0,0 +1,307 @@ +import { PrismaClient, type Proxy as DbProxy, type Wallet as DbWallet } from "@prisma/client"; +import { BlockfrostProvider, type UTxO } from "@meshsdk/core"; +import { deriveProxyScripts } from "../../../src/lib/server/proxyTxBuilders"; +import { hasAsset, type UtxoRef } from "../../../src/lib/server/proxyUtxos"; +import { resolveWalletScriptAddressSafe } from "../../../src/lib/server/walletScriptAddress"; +import type { CIBootstrapContext, CIWalletType } from "../framework/types"; +import { getWalletByType } from "./steps/helpers"; + +type ProxyAdoptionWallet = Pick< + DbWallet, + | "id" + | "name" + | "signersAddresses" + | "signersStakeKeys" + | "signersDRepKeys" + | "signersDescriptions" + | "numRequiredSigners" + | "scriptCbor" + | "stakeCredentialHash" + | "type" + | "rawImportBodies" +>; + +type ProxyAdoptionRow = Pick< + DbProxy, + "id" | "walletId" | "proxyAddress" | "authTokenId" | "paramUtxo" | "isActive" +>; + +type ProxyAdoptionDb = { + wallet: { + findUnique: (args: { + where: { id: string }; + select: Record; + }) => Promise; + findMany: (args: { + select: Record; + }) => Promise; + }; + proxy: { + findMany: (args: { + where: { walletId: { in: string[] } }; + select: Record; + }) => Promise; + update: (args: { + where: { id: string }; + data: { walletId: string; isActive: true }; + select: { id: true; walletId: true; isActive: true }; + }) => Promise<{ id: string; walletId: string | null; isActive: boolean }>; + }; + $transaction?: (fn: (tx: ProxyAdoptionDb) => Promise) => Promise; +}; + +export type ProxyOrphanAdoptionProvider = { + fetchAddressUTxOs: (address: string) => Promise; +}; + +export type ProxyAdoptionSkipReason = + | "already-current-active" + | "invalid-param-utxo" + | "metadata-mismatch" + | "chain-empty" + | "chain-fetch-error"; + +export type ProxyAdoptionResult = { + walletType: CIWalletType; + walletId: string; + walletAddress: string; + historicalWalletIds: string[]; + adopted: Array<{ + proxyId: string; + fromWalletId: string | null; + authTokenId: string; + proxyAddress: string; + wasActive: boolean; + }>; + skipped: Array<{ + proxyId: string; + walletId: string | null; + reason: ProxyAdoptionSkipReason; + detail?: string; + }>; +}; + +const walletSelect: Record = { + id: true, + name: true, + signersAddresses: true, + signersStakeKeys: true, + signersDRepKeys: true, + signersDescriptions: true, + numRequiredSigners: true, + scriptCbor: true, + stakeCredentialHash: true, + type: true, + rawImportBodies: true, +}; + +const proxySelect: Record = { + id: true, + walletId: true, + proxyAddress: true, + authTokenId: true, + paramUtxo: true, + isActive: true, +}; + +let defaultDb: PrismaClient | undefined; + +function getDefaultDb(): PrismaClient { + defaultDb ??= new PrismaClient(); + return defaultDb; +} + +function createDefaultProvider(networkId: 0 | 1): ProxyOrphanAdoptionProvider { + const apiKey = + networkId === 0 + ? process.env.CI_BLOCKFROST_PREPROD_API_KEY?.trim() || + process.env.NEXT_PUBLIC_BLOCKFROST_API_KEY_PREPROD?.trim() + : process.env.CI_BLOCKFROST_MAINNET_API_KEY?.trim() || + process.env.NEXT_PUBLIC_BLOCKFROST_API_KEY_MAINNET?.trim(); + if (!apiKey) { + throw new Error(`Missing Blockfrost API key for proxy orphan adoption on network ${networkId}`); + } + return new BlockfrostProvider(apiKey); +} + +function parseParamUtxo(value: string): UtxoRef | null { + try { + const parsed = JSON.parse(value) as Partial; + const txHash = typeof parsed.txHash === "string" ? parsed.txHash.trim() : ""; + const outputIndex = + typeof parsed.outputIndex === "number" && Number.isInteger(parsed.outputIndex) + ? parsed.outputIndex + : -1; + if (!txHash || outputIndex < 0) return null; + return { txHash, outputIndex }; + } catch { + return null; + } +} + +async function runInTransaction( + db: ProxyAdoptionDb, + fn: (tx: ProxyAdoptionDb) => Promise, +): Promise { + if (typeof db.$transaction === "function") { + return db.$transaction(fn); + } + return fn(db); +} + +function resolveMatchingWalletIds(args: { + currentWallet: ProxyAdoptionWallet; + allWallets: ProxyAdoptionWallet[]; + fallbackAddress: string; +}): string[] { + const currentAddressResult = resolveWalletScriptAddressSafe( + args.currentWallet as DbWallet, + args.fallbackAddress, + ); + if ("error" in currentAddressResult) { + throw new Error(`Unable to resolve current wallet script address: ${currentAddressResult.error}`); + } + + return args.allWallets + .filter((wallet) => { + const resolved = resolveWalletScriptAddressSafe(wallet as DbWallet, args.fallbackAddress); + return "address" in resolved && resolved.address === currentAddressResult.address; + }) + .map((wallet) => wallet.id); +} + +function hasAuthToken(utxos: UTxO[], authTokenId: string): boolean { + return utxos.some((utxo) => hasAsset(utxo, authTokenId)); +} + +export async function adoptProxyOrphansForWalletType(args: { + ctx: CIBootstrapContext; + walletType: CIWalletType; + db?: ProxyAdoptionDb; + provider?: ProxyOrphanAdoptionProvider; +}): Promise { + const wallet = getWalletByType(args.ctx, args.walletType); + if (!wallet) throw new Error(`Missing ${args.walletType} wallet`); + + const db = args.db ?? (getDefaultDb() as unknown as ProxyAdoptionDb); + const provider = args.provider ?? createDefaultProvider(args.ctx.networkId); + const currentWallet = await db.wallet.findUnique({ + where: { id: wallet.walletId }, + select: walletSelect, + }); + if (!currentWallet) { + throw new Error(`Current ${args.walletType} wallet row ${wallet.walletId} was not found`); + } + + const allWallets = await db.wallet.findMany({ select: walletSelect }); + const matchingWalletIds = resolveMatchingWalletIds({ + currentWallet, + allWallets, + fallbackAddress: wallet.walletAddress, + }); + const historicalWalletIds = matchingWalletIds.filter((walletId) => walletId !== wallet.walletId); + if (matchingWalletIds.length === 0) { + return { + walletType: args.walletType, + walletId: wallet.walletId, + walletAddress: wallet.walletAddress, + historicalWalletIds: [], + adopted: [], + skipped: [], + }; + } + + const candidates = await db.proxy.findMany({ + where: { walletId: { in: matchingWalletIds } }, + select: proxySelect, + }); + + const walletUtxos = await provider.fetchAddressUTxOs(wallet.walletAddress); + const adopted: ProxyAdoptionResult["adopted"] = []; + const skipped: ProxyAdoptionResult["skipped"] = []; + const updates: ProxyAdoptionRow[] = []; + + for (const proxy of candidates) { + if (proxy.walletId === wallet.walletId && proxy.isActive) { + skipped.push({ + proxyId: proxy.id, + walletId: proxy.walletId, + reason: "already-current-active", + }); + continue; + } + + const paramUtxo = parseParamUtxo(proxy.paramUtxo); + if (!paramUtxo) { + skipped.push({ + proxyId: proxy.id, + walletId: proxy.walletId, + reason: "invalid-param-utxo", + }); + continue; + } + + const scripts = deriveProxyScripts({ paramUtxo, network: args.ctx.networkId }); + if (scripts.authTokenId !== proxy.authTokenId || scripts.proxyAddress !== proxy.proxyAddress) { + skipped.push({ + proxyId: proxy.id, + walletId: proxy.walletId, + reason: "metadata-mismatch", + }); + continue; + } + + let proxyUtxos: UTxO[]; + try { + proxyUtxos = await provider.fetchAddressUTxOs(proxy.proxyAddress); + } catch (error) { + skipped.push({ + proxyId: proxy.id, + walletId: proxy.walletId, + reason: "chain-fetch-error", + detail: error instanceof Error ? error.message : String(error), + }); + continue; + } + + if (!hasAuthToken(walletUtxos, proxy.authTokenId)) { + skipped.push({ + proxyId: proxy.id, + walletId: proxy.walletId, + reason: "chain-empty", + detail: proxyUtxos.length + ? "proxy address has UTxOs, but auth token is not at current wallet address" + : "no auth token at current wallet address and proxy address is empty", + }); + continue; + } + + updates.push(proxy); + } + + await runInTransaction(db, async (tx) => { + for (const proxy of updates) { + await tx.proxy.update({ + where: { id: proxy.id }, + data: { walletId: wallet.walletId, isActive: true }, + select: { id: true, walletId: true, isActive: true }, + }); + adopted.push({ + proxyId: proxy.id, + fromWalletId: proxy.walletId, + authTokenId: proxy.authTokenId, + proxyAddress: proxy.proxyAddress, + wasActive: proxy.isActive, + }); + } + }); + + return { + walletType: args.walletType, + walletId: wallet.walletId, + walletAddress: wallet.walletAddress, + historicalWalletIds, + adopted, + skipped, + }; +} diff --git a/scripts/ci/scenarios/steps/authPlane.ts b/scripts/ci/scenarios/steps/authPlane.ts new file mode 100644 index 00000000..509a2071 --- /dev/null +++ b/scripts/ci/scenarios/steps/authPlane.ts @@ -0,0 +1,233 @@ +import type { CIBootstrapContext, CIBotContext, Scenario } from "../../framework/types"; +import { requestJson } from "../../framework/http"; +import { getDefaultBot } from "../../framework/botContext"; +import { authenticateBot } from "../../framework/botAuth"; +import { stringifyRedacted } from "../../framework/redact"; +import { authenticateSignerWithMnemonic } from "../../framework/walletAuth"; +import { getWalletByType } from "./helpers"; + +export function createScenarioAuthPlane(ctx: CIBootstrapContext): Scenario { + return { + id: "scenario.auth-plane", + description: "Wallet auth route checks and negative auth assertions", + steps: [ + { + id: "v1.authNegative.walletIds.addressMismatch", + description: "Assert /api/v1/walletIds rejects mismatched address", + severity: "critical", + execute: async (runCtx) => { + const bot = getDefaultBot(runCtx); + const token = await authenticateBot({ ctx: runCtx, bot }); + const mismatchAddress = + runCtx.bots.find((candidate) => candidate.id !== bot.id)?.paymentAddress ?? + `${bot.paymentAddress}x`; + const response = await requestJson<{ error?: string }>({ + url: `${runCtx.apiBaseUrl}/api/v1/walletIds?address=${encodeURIComponent(mismatchAddress)}`, + method: "GET", + token, + }); + if (response.status !== 403) { + throw new Error( + `walletIds address mismatch expected 403, got ${response.status}: ${stringifyRedacted(response.data)}`, + ); + } + return { + message: "walletIds address mismatch correctly rejected with 403", + }; + }, + }, + ...ctx.walletTypes.map((walletType) => ({ + id: `v1.authNegative.addTransaction.addressMismatch.${walletType}`, + description: `Assert /api/v1/addTransaction rejects mismatched address (${walletType} walletId)`, + severity: "critical" as const, + execute: async (runCtx: CIBootstrapContext) => { + const bot = getDefaultBot(runCtx); + const token = await authenticateBot({ ctx: runCtx, bot }); + const targetWallet = getWalletByType(runCtx, walletType); + if (!targetWallet) { + throw new Error(`Missing ${walletType} wallet for addTransaction negative check`); + } + const mismatchAddress = + runCtx.bots.find((candidate: CIBotContext) => candidate.id !== bot.id)?.paymentAddress ?? + `${bot.paymentAddress}x`; + const response = await requestJson<{ error?: string }>({ + url: `${runCtx.apiBaseUrl}/api/v1/addTransaction`, + method: "POST", + token, + body: { + walletId: targetWallet.walletId, + address: mismatchAddress, + txCbor: "00", + txJson: "{}", + description: "CI address mismatch negative check", + }, + }); + if (response.status !== 403) { + throw new Error( + `addTransaction address mismatch expected 403, got ${response.status}: ${stringifyRedacted(response.data)}`, + ); + } + return { + message: "addTransaction address mismatch correctly rejected with 403", + artifacts: { walletId: targetWallet.walletId }, + }; + }, + })), + ...ctx.walletTypes.map((walletType) => ({ + id: `v1.authNegative.pendingTransactions.missingToken.${walletType}`, + description: `Assert /api/v1/pendingTransactions rejects missing token (${walletType} wallet)`, + severity: "critical" as const, + execute: async (runCtx: CIBootstrapContext) => { + const wallet = getWalletByType(runCtx, walletType); + if (!wallet) { + throw new Error(`Missing ${walletType} wallet for pendingTransactions negative check`); + } + const signerAddress = wallet.signerAddresses[0]; + if (!signerAddress) { + throw new Error("Missing signer address for pendingTransactions negative check"); + } + const response = await requestJson<{ error?: string }>({ + url: `${runCtx.apiBaseUrl}/api/v1/pendingTransactions?walletId=${encodeURIComponent(wallet.walletId)}&address=${encodeURIComponent(signerAddress)}`, + method: "GET", + }); + if (response.status !== 401) { + throw new Error( + `pendingTransactions missing token expected 401, got ${response.status}: ${stringifyRedacted(response.data)}`, + ); + } + return { + message: "pendingTransactions missing token correctly rejected with 401", + artifacts: { walletId: wallet.walletId }, + }; + }, + })), + { + id: "v1.authNegative.drepInfo.missingToken", + description: "Assert /api/v1/drepInfo rejects missing token with 401", + severity: "critical", + execute: async (runCtx) => { + const wallet = runCtx.wallets[0]; + if (!wallet) { + throw new Error("drepInfo negative check: no wallet in context"); + } + const signerAddress = wallet.signerAddresses[0] ?? ""; + const response = await requestJson<{ error?: string }>({ + url: `${runCtx.apiBaseUrl}/api/v1/drepInfo?walletId=${encodeURIComponent(wallet.walletId)}&address=${encodeURIComponent(signerAddress)}`, + method: "GET", + }); + if (response.status !== 401) { + throw new Error( + `drepInfo missing token expected 401, got ${response.status}: ${stringifyRedacted(response.data)}`, + ); + } + return { + message: "drepInfo missing token correctly rejected with 401", + }; + }, + }, + { + id: "v1.authNegative.stakeAccountInfo.missingToken", + description: "Assert /api/v1/stakeAccountInfo rejects missing token with 401", + severity: "critical", + execute: async (runCtx) => { + const stakeAddress = runCtx.signerStakeAddresses[0] ?? runCtx.sdkStakeAddress ?? "stake_test1abc"; + const response = await requestJson<{ error?: string }>({ + url: `${runCtx.apiBaseUrl}/api/v1/stakeAccountInfo?stakeAddress=${encodeURIComponent(stakeAddress)}`, + method: "GET", + }); + if (response.status !== 401) { + throw new Error( + `stakeAccountInfo missing token expected 401, got ${response.status}: ${stringifyRedacted(response.data)}`, + ); + } + return { + message: "stakeAccountInfo missing token correctly rejected with 401", + }; + }, + }, + { + id: "v1.authNegative.createWallet.missingToken", + description: "Assert /api/v1/createWallet rejects missing token with 401", + severity: "critical", + execute: async (runCtx) => { + const response = await requestJson<{ error?: string }>({ + url: `${runCtx.apiBaseUrl}/api/v1/createWallet`, + method: "POST", + body: { name: "should-be-rejected", signersAddresses: [] }, + }); + if (response.status !== 401) { + throw new Error( + `createWallet missing token expected 401, got ${response.status}: ${stringifyRedacted(response.data)}`, + ); + } + return { + message: "createWallet missing token correctly rejected with 401", + }; + }, + }, + { + id: "v1.getNonce.authSigner.signer2", + description: "Authenticate signer via getNonce + authSigner", + severity: "critical", + execute: async (runCtx) => { + const mnemonic = process.env.CI_MNEMONIC_2; + if (!mnemonic?.trim()) { + throw new Error("CI_MNEMONIC_2 is required for authSigner scenario"); + } + const authResult = await authenticateSignerWithMnemonic({ + ctx: runCtx, + mnemonic, + }); + return { + message: "Signer wallet auth succeeded through getNonce/authSigner", + artifacts: { + signerAddress: authResult.signerAddress, + nonceLength: authResult.nonce.length, + }, + }; + }, + }, + { + id: "v1.signTransaction.badTransactionId", + description: "Assert /api/v1/signTransaction returns 404 for a non-existent transactionId", + severity: "non-critical", + execute: async (runCtx) => { + const mnemonic = process.env.CI_MNEMONIC_2; + if (!mnemonic?.trim()) { + return { + message: "CI_MNEMONIC_2 not set; skipping signTransaction bad-id validation check", + artifacts: { skipped: true }, + }; + } + const wallet = getWalletByType(runCtx, runCtx.walletTypes[0] ?? "legacy"); + if (!wallet) { + throw new Error("signTransaction bad-id: no wallet in context"); + } + const authResult = await authenticateSignerWithMnemonic({ ctx: runCtx, mnemonic }); + const response = await requestJson<{ error?: string }>({ + url: `${runCtx.apiBaseUrl}/api/v1/signTransaction`, + method: "POST", + token: authResult.token, + body: { + walletId: wallet.walletId, + transactionId: "00000000-0000-0000-0000-000000000000", + address: authResult.signerAddress, + signature: "aabbccdd", + key: "eeff0011", + broadcast: false, + }, + }); + if (response.status !== 404) { + throw new Error( + `signTransaction bad transactionId expected 404, got ${response.status}: ${stringifyRedacted(response.data)}`, + ); + } + return { + message: "signTransaction non-existent transactionId correctly returns 404", + artifacts: { walletId: wallet.walletId }, + }; + }, + }, + ], + }; +} diff --git a/scripts/ci/scenarios/steps/botIdentity.ts b/scripts/ci/scenarios/steps/botIdentity.ts new file mode 100644 index 00000000..1220ae7c --- /dev/null +++ b/scripts/ci/scenarios/steps/botIdentity.ts @@ -0,0 +1,82 @@ +import type { Scenario } from "../../framework/types"; +import { requestJson } from "../../framework/http"; +import { getDefaultBot } from "../../framework/botContext"; +import { authenticateBot, deriveCiBotSecret, requireCiJwtSecret } from "../../framework/botAuth"; +import { stringifyRedacted } from "../../framework/redact"; + +export function createScenarioBotIdentity(): Scenario { + return { + id: "scenario.bot-identity", + description: "Bot profile route checks", + steps: [ + { + id: "v1.botAuth.explicitRouteCheck", + description: "Verify /api/v1/botAuth response shape directly (bypasses token cache)", + severity: "critical", + execute: async (ctx) => { + const bot = getDefaultBot(ctx); + const secret = deriveCiBotSecret(bot.paymentAddress, requireCiJwtSecret()); + const response = await requestJson<{ token?: string; error?: string }>({ + url: `${ctx.apiBaseUrl}/api/v1/botAuth`, + method: "POST", + body: { + botKeyId: bot.botKeyId, + secret, + paymentAddress: bot.paymentAddress, + }, + }); + if (response.status !== 200 || typeof response.data?.token !== "string") { + throw new Error( + `botAuth explicit check failed (${response.status}): ${stringifyRedacted(response.data)}`, + ); + } + const parts = response.data.token.split("."); + if (parts.length !== 3) { + throw new Error( + `botAuth: token is not a valid JWT — expected 3 dot-separated segments, got ${parts.length}`, + ); + } + return { + message: "botAuth explicit route check passed: response contains a well-formed JWT", + artifacts: { jwtSegmentCount: parts.length }, + }; + }, + }, + { + id: "v1.botMe.defaultBot", + description: "Verify default bot identity via /api/v1/botMe", + severity: "critical", + execute: async (ctx) => { + const bot = getDefaultBot(ctx); + const token = await authenticateBot({ ctx, bot }); + const response = await requestJson<{ + botId?: string; + paymentAddress?: string; + ownerAddress?: string; + error?: string; + }>({ + url: `${ctx.apiBaseUrl}/api/v1/botMe`, + method: "GET", + token, + }); + if (response.status !== 200) { + throw new Error(`botMe failed (${response.status}): ${stringifyRedacted(response.data)}`); + } + if (response.data.botId !== bot.botId) { + throw new Error("botMe returned unexpected botId"); + } + if (response.data.paymentAddress !== bot.paymentAddress) { + throw new Error("botMe returned unexpected paymentAddress"); + } + return { + message: `botMe resolved bot ${response.data.botId}`, + artifacts: { + botId: response.data.botId, + paymentAddress: response.data.paymentAddress, + }, + }; + }, + }, + ], + }; +} diff --git a/scripts/ci/scenarios/steps/certificates.ts b/scripts/ci/scenarios/steps/certificates.ts new file mode 100644 index 00000000..b86f1d6a --- /dev/null +++ b/scripts/ci/scenarios/steps/certificates.ts @@ -0,0 +1,737 @@ +import type { CIBootstrapContext, CIWalletType, RouteStep, Scenario } from "../../framework/types"; +import { requestJson } from "../../framework/http"; +import { runSigningFlow } from "../flows/signingFlow"; +import { runStakeCertSigningFlow } from "../flows/certificateSigningFlow"; +import { getDefaultBot } from "../../framework/botContext"; +import { authenticateBot } from "../../framework/botAuth"; +import { stringifyRedacted } from "../../framework/redact"; +import { boolFromEnv } from "../../framework/env"; + +type ScriptUtxo = { + input: { txHash: string; outputIndex: number }; + output: { address: string; amount: { unit: string; quantity: string }[] }; +}; + +async function fetchUtxoRefs(args: { + ctx: CIBootstrapContext; + walletId: string; + token: string; + botAddress: string; + fresh?: boolean; +}): Promise<{ txHash: string; outputIndex: number }[]> { + const { ctx, walletId, token, botAddress } = args; + const freshParam = args.fresh ? "&fresh=true" : ""; + const response = await requestJson({ + url: `${ctx.apiBaseUrl}/api/v1/freeUtxos?walletId=${encodeURIComponent(walletId)}&address=${encodeURIComponent(botAddress)}${freshParam}`, + method: "GET", + token, + }); + if (response.status !== 200 || !Array.isArray(response.data)) { + throw new Error( + `freeUtxos preflight failed (${response.status}): ${stringifyRedacted(response.data)}`, + ); + } + if (response.data.length === 0) { + throw new Error("No free UTxOs available in wallet for certificate transaction"); + } + return response.data.map((u) => ({ txHash: u.input.txHash, outputIndex: u.input.outputIndex })); +} + +/** + * Polls freeUtxos?fresh=true until none of the given spent UTxO refs appear in + * the result. This confirms the cert tx has been included in a block and its + * inputs are no longer unspent on-chain. + * + * Preprod block time is ~20 s. We retry every 8 s for up to 4 minutes. + */ +async function pollUntilUtxosConsumed(args: { + ctx: CIBootstrapContext; + walletId: string; + token: string; + botAddress: string; + spentUtxoRefs: { txHash: string; outputIndex: number }[]; + maxRetries?: number; + retryDelayMs?: number; +}): Promise<{ attempts: number }> { + const { ctx, walletId, token, botAddress, spentUtxoRefs } = args; + const maxRetries = args.maxRetries ?? 30; + const retryDelayMs = args.retryDelayMs ?? 8000; + const spentKeys = new Set(spentUtxoRefs.map((r) => `${r.txHash}:${r.outputIndex}`)); + + for (let attempt = 0; attempt < maxRetries; attempt++) { + if (attempt > 0) { + await new Promise((resolve) => setTimeout(resolve, retryDelayMs)); + } + const response = await requestJson({ + url: `${ctx.apiBaseUrl}/api/v1/freeUtxos?walletId=${encodeURIComponent(walletId)}&address=${encodeURIComponent(botAddress)}&fresh=true`, + method: "GET", + token, + }); + if (response.status !== 200 || !Array.isArray(response.data)) { + continue; + } + const hasOverlap = response.data.some((u) => + spentKeys.has(`${u.input.txHash}:${u.input.outputIndex}`), + ); + if (!hasOverlap) { + return { attempts: attempt + 1 }; + } + } + throw new Error( + `Timed out after ${maxRetries} attempts (${(maxRetries * (args.retryDelayMs ?? 8000)) / 1000}s) waiting for cert tx inputs to be confirmed on-chain`, + ); +} + +function createCertSigningStep(args: { + id: string; + description: string; + signerIndex: 1 | 2; + mnemonicEnvName: "CI_MNEMONIC_2" | "CI_MNEMONIC_3"; + walletType: CIWalletType; + signBroadcast: boolean; + requireBroadcastSuccess: boolean; + getTransactionId: () => string | undefined; + /** When true, use the stake-cert signing flow that submits both payment and stake key witnesses. */ + useStakeCertFlow?: boolean; +}): RouteStep { + return { + id: args.id, + description: args.description, + severity: "critical", + execute: async (ctx) => { + const mnemonic = process.env[args.mnemonicEnvName]; + if (!mnemonic?.trim()) { + throw new Error(`${args.mnemonicEnvName} is required for certificate signing`); + } + const txId = args.getTransactionId(); + if (!txId) { + throw new Error(`No transaction id available for signing step ${args.id}`); + } + const effectiveBroadcast = args.signBroadcast && boolFromEnv(process.env.SIGN_BROADCAST, true); + + if (args.useStakeCertFlow) { + const result = await runStakeCertSigningFlow({ + ctx, + mnemonic, + signerIndex: args.signerIndex, + signBroadcast: effectiveBroadcast, + preferredTransactionId: txId, + requireBroadcastSuccess: args.requireBroadcastSuccess, + }); + return { + message: `Stake cert sign (signer${args.signerIndex}) status=${result.status} submitted=${String(result.submitted)} stakeWitness=${String(result.stakeWitnessIncluded)}`, + artifacts: result as unknown as Record, + }; + } + + const result = await runSigningFlow({ + ctx, + mnemonic, + signWalletType: args.walletType, + signerIndex: args.signerIndex, + signerLabel: `signer${args.signerIndex}`, + signBroadcast: effectiveBroadcast, + preferredTransactionId: txId, + requireBroadcastSuccess: args.requireBroadcastSuccess, + }); + return { + message: `Certificate sign (${result.walletType}, signer${args.signerIndex}) status=${result.status} submitted=${String(result.submitted)}`, + artifacts: result as unknown as Record, + }; + }, + }; +} + +/** + * Builds the five steps for a single certificate action phase: + * 1. Propose tx via bot cert endpoint + * 2. Assert tx appears in pending + * 3. Signer 1 adds witness (no broadcast) + * 4. Signer 2 adds witness + broadcast + * 5. Assert tx cleared from pending (only when requireBroadcastSuccess=true) + * + * For staking cert (requireBroadcastSuccess=false): the staking certificate script + * uses stake key hashes (role-2), while signTransaction validates witnesses against + * the signer's payment key hash. Payment-key witnesses satisfy the spending script + * but cannot satisfy the separate stake-cert script, so broadcast will fail on-chain. + * Step 5 is omitted in that case. The test still validates that the API endpoint + * creates the pending transaction and that both signers can add witnesses. + */ +function createCertPhaseSteps(args: { + idPrefix: string; + walletType: CIWalletType; + certEndpoint: "botDRepCertificate" | "botStakeCertificate"; + action: string; + label: string; + runtime: { transactionId?: string; spentUtxoRefs?: { txHash: string; outputIndex: number }[] }; + requireBroadcastSuccess: boolean; + buildExtraBody?: (ctx: CIBootstrapContext) => Promise> | Record; + /** When true, each signing step uses the stake-cert flow (payment + stake witnesses). */ + useStakeCertFlow?: boolean; +}): RouteStep[] { + const { idPrefix, walletType, certEndpoint, action, label, runtime } = args; + + const steps: RouteStep[] = [ + // ── 1. Propose ─────────────────────────────────────────────────────────── + { + id: `${idPrefix}.propose`, + description: `Propose ${label}`, + severity: "critical", + execute: async (ctx) => { + const wallet = ctx.wallets.find((w) => w.type === walletType); + if (!wallet) { + throw new Error(`Wallet type "${walletType}" not found in CI context`); + } + const bot = getDefaultBot(ctx); + const token = await authenticateBot({ ctx, bot }); + const utxoRefs = await fetchUtxoRefs({ + ctx, + walletId: wallet.walletId, + token, + botAddress: bot.paymentAddress, + fresh: true, + }); + + const extraBody = args.buildExtraBody ? await args.buildExtraBody(ctx) : {}; + const body: Record = { + walletId: wallet.walletId, + address: bot.paymentAddress, + action, + utxoRefs, + description: label, + ...extraBody, + }; + + const response = await requestJson<{ id?: string; error?: string }>({ + url: `${ctx.apiBaseUrl}/api/v1/${certEndpoint}`, + method: "POST", + token, + body, + }); + if (response.status !== 201 || !response.data?.id) { + throw new Error( + `${certEndpoint} (${action}) failed (${response.status}): ${stringifyRedacted(response.data)}`, + ); + } + runtime.transactionId = response.data.id; + runtime.spentUtxoRefs = utxoRefs; + return { + message: `${label} tx created (${runtime.transactionId})`, + artifacts: { walletId: wallet.walletId, transactionId: runtime.transactionId, action }, + }; + }, + }, + + // ── 2. Assert pending ───────────────────────────────────────────────────── + { + id: `${idPrefix}.pending`, + description: `Assert ${label} tx is pending`, + severity: "critical", + execute: async (ctx) => { + const txId = runtime.transactionId; + const wallet = ctx.wallets.find((w) => w.type === walletType); + if (!txId || !wallet) { + throw new Error(`Missing transaction id or wallet context for ${idPrefix}`); + } + const bot = getDefaultBot(ctx); + const token = await authenticateBot({ ctx, bot }); + const response = await requestJson | { error?: string }>({ + url: `${ctx.apiBaseUrl}/api/v1/pendingTransactions?walletId=${encodeURIComponent(wallet.walletId)}&address=${encodeURIComponent(bot.paymentAddress)}`, + method: "GET", + token, + }); + if (response.status !== 200 || !Array.isArray(response.data)) { + throw new Error( + `pendingTransactions check failed (${response.status}): ${stringifyRedacted(response.data)}`, + ); + } + if (!response.data.some((tx) => tx.id === txId)) { + throw new Error(`Certificate tx ${txId} not found in pending transactions`); + } + return { + message: `${label} tx ${txId} is pending`, + artifacts: { transactionId: txId, pendingCount: response.data.length }, + }; + }, + }, + + // ── 3. Signer 1 witness (no broadcast) ─────────────────────────────────── + createCertSigningStep({ + id: `${idPrefix}.sign.signer1`, + description: `Signer 1 adds witness for ${label} (no broadcast)`, + signerIndex: 1, + mnemonicEnvName: "CI_MNEMONIC_2", + walletType, + signBroadcast: false, + requireBroadcastSuccess: false, + getTransactionId: () => runtime.transactionId, + useStakeCertFlow: args.useStakeCertFlow, + }), + + // ── 4. Signer 2 witness + broadcast ────────────────────────────────────── + createCertSigningStep({ + id: `${idPrefix}.sign.signer2`, + description: `Signer 2 signs and broadcasts ${label}`, + signerIndex: 2, + mnemonicEnvName: "CI_MNEMONIC_3", + walletType, + signBroadcast: true, + requireBroadcastSuccess: args.requireBroadcastSuccess, + getTransactionId: () => runtime.transactionId, + useStakeCertFlow: args.useStakeCertFlow, + }), + ]; + + // ── 5. Assert cleared (only when broadcast is required to succeed) ───────── + if (args.requireBroadcastSuccess) { + steps.push({ + id: `${idPrefix}.cleared`, + description: `Assert ${label} tx is cleared after broadcast`, + severity: "critical", + execute: async (ctx) => { + const txId = runtime.transactionId; + const wallet = ctx.wallets.find((w) => w.type === walletType); + if (!txId || !wallet) { + throw new Error(`Missing transaction id or wallet context for ${idPrefix}`); + } + const bot = getDefaultBot(ctx); + const token = await authenticateBot({ ctx, bot }); + const response = await requestJson | { error?: string }>({ + url: `${ctx.apiBaseUrl}/api/v1/pendingTransactions?walletId=${encodeURIComponent(wallet.walletId)}&address=${encodeURIComponent(bot.paymentAddress)}`, + method: "GET", + token, + }); + if (response.status !== 200 || !Array.isArray(response.data)) { + throw new Error( + `pendingTransactions cleared check failed (${response.status}): ${stringifyRedacted(response.data)}`, + ); + } + if (response.data.some((tx) => tx.id === txId)) { + throw new Error(`Certificate tx ${txId} is still pending after sign+broadcast`); + } + return { + message: `${label} tx ${txId} cleared from pending`, + artifacts: { transactionId: txId, pendingCount: response.data.length }, + }; + }, + }); + + // ── 6. Wait for on-chain confirmation ───────────────────────────────────── + // The next cert phase needs confirmed UTxOs. Poll freeUtxos?fresh=true until + // the inputs spent by this tx are no longer visible (tx included in a block). + steps.push({ + id: `${idPrefix}.onchain`, + description: `Wait for ${label} tx inputs to be confirmed on-chain`, + severity: "critical", + execute: async (ctx) => { + const wallet = ctx.wallets.find((w) => w.type === walletType); + if (!wallet) { + throw new Error(`Wallet type "${walletType}" not found in CI context`); + } + const spentRefs = runtime.spentUtxoRefs ?? []; + if (spentRefs.length === 0) { + return { message: "No spent UTxO refs recorded; skipping on-chain confirmation wait", artifacts: {} }; + } + const bot = getDefaultBot(ctx); + const token = await authenticateBot({ ctx, bot }); + const { attempts } = await pollUntilUtxosConsumed({ + ctx, + walletId: wallet.walletId, + token, + botAddress: bot.paymentAddress, + spentUtxoRefs: spentRefs, + }); + return { + message: `${label} inputs confirmed on-chain after ${attempts} poll attempt${attempts === 1 ? "" : "s"}`, + artifacts: { spentCount: spentRefs.length, attempts }, + }; + }, + }); + } + + return steps; +} + +/** + * Pre-hygiene step for a single wallet type: checks on-chain DRep state via + * GET /api/v1/drepInfo and deregisters if already registered, so the main + * register phase starts from a known clean state. + * + * Handles stale Blockfrost cache gracefully — if the broadcast is rejected + * with DRepNotRegistered or similar errors, the credential is confirmed clean + * and the step succeeds silently. + */ +function createDRepHygieneStep(walletType: CIWalletType): RouteStep { + return { + id: `v1.botDRepCertificate.${walletType}.hygiene`, + description: `Ensure ${walletType} DRep is deregistered before test`, + severity: "critical", + execute: async (ctx) => { + const wallet = ctx.wallets.find((w) => w.type === walletType); + if (!wallet) { + throw new Error(`Wallet type "${walletType}" not found in CI context`); + } + + const bot = getDefaultBot(ctx); + const token = await authenticateBot({ ctx, bot }); + + // Check on-chain DRep state. + const checkResp = await requestJson<{ active?: boolean; dRepId?: string; error?: string }>({ + url: `${ctx.apiBaseUrl}/api/v1/drepInfo?walletId=${encodeURIComponent(wallet.walletId)}&address=${encodeURIComponent(bot.paymentAddress)}`, + method: "GET", + token, + }); + if (checkResp.status !== 200) { + throw new Error(`drepInfo failed (${checkResp.status}): ${stringifyRedacted(checkResp.data)}`); + } + if (!checkResp.data?.active) { + return { + message: `${walletType} DRep not registered on-chain; proceeding to main test`, + artifacts: { walletId: wallet.walletId, active: false, dRepId: checkResp.data?.dRepId }, + }; + } + + // DRep is registered — retire it. + const utxoRefs = await fetchUtxoRefs({ + ctx, + walletId: wallet.walletId, + token, + botAddress: bot.paymentAddress, + fresh: true, + }); + + const proposeResp = await requestJson<{ id?: string; error?: string }>({ + url: `${ctx.apiBaseUrl}/api/v1/botDRepCertificate`, + method: "POST", + token, + body: { + walletId: wallet.walletId, + address: bot.paymentAddress, + action: "retire", + utxoRefs, + description: "DRep retirement (hygiene)", + }, + }); + if (proposeResp.status !== 201 || !proposeResp.data?.id) { + throw new Error(`botDRepCertificate (hygiene retire) failed (${proposeResp.status}): ${stringifyRedacted(proposeResp.data)}`); + } + const txId = proposeResp.data.id; + + const mnemonic1 = process.env.CI_MNEMONIC_2; + const mnemonic2 = process.env.CI_MNEMONIC_3; + if (!mnemonic1?.trim()) throw new Error("CI_MNEMONIC_2 is required for hygiene signing"); + if (!mnemonic2?.trim()) throw new Error("CI_MNEMONIC_3 is required for hygiene signing"); + + // Signer 1 — no broadcast. + const sign1Result = await runSigningFlow({ + ctx, + mnemonic: mnemonic1, + signWalletType: walletType, + signerIndex: 1, + signerLabel: "signer1", + signBroadcast: false, + preferredTransactionId: txId, + requireBroadcastSuccess: false, + }); + console.log(`[drep-hygiene:${walletType}] signer1 sign: status=${sign1Result.status}`); + + // Signer 2 — broadcast. Catch stale-cache rejections: if Blockfrost reported the DRep + // as active but it is not actually registered on-chain, the node rejects the retire cert. + try { + const sign2Result = await runSigningFlow({ + ctx, + mnemonic: mnemonic2, + signWalletType: walletType, + signerIndex: 2, + signerLabel: "signer2", + signBroadcast: true, + preferredTransactionId: txId, + requireBroadcastSuccess: true, + }); + console.log(`[drep-hygiene:${walletType}] signer2 sign: status=${sign2Result.status} submitted=${String(sign2Result.submitted)}`); + } catch (err) { + const errMsg = String(err); + console.log(`[drep-hygiene:${walletType}] signer2 broadcast failed: ${errMsg.slice(0, 300)}`); + const isStaleCache = + errMsg.includes("DRepNotRegistered") || + errMsg.includes("DRepAlreadyRetired") || + errMsg.includes("VotingDRepsNotRegistered") || + errMsg.includes("ValueNotConservedUTxO") || + errMsg.includes("value is not balanced"); + if (isStaleCache) { + return { + message: `Hygiene DRep retire broadcast rejected — credential already deregistered (stale Blockfrost cache)`, + artifacts: { walletId: wallet.walletId, txId, staleCache: true }, + }; + } + throw err; + } + + // Broadcast succeeded — wait for on-chain confirmation before the register phase. + const { attempts } = await pollUntilUtxosConsumed({ + ctx, + walletId: wallet.walletId, + token, + botAddress: bot.paymentAddress, + spentUtxoRefs: utxoRefs, + }); + return { + message: `Hygiene DRep retire confirmed on-chain after ${attempts} poll attempt${attempts === 1 ? "" : "s"}`, + artifacts: { walletId: wallet.walletId, txId, attempts }, + }; + }, + }; +} + +/** + * DRep registration and retirement for legacy and SDK wallets. + * + * Legacy wallet: payment script doubles as the DRep credential script, so + * standard payment-key witnesses satisfy both spending inputs + * and the DRep certificate → full sign + broadcast. + * + * SDK wallet: the CI bootstrap sets signersDRepKeys = payment key hashes, + * so the DRep certificate script also uses payment key hashes. + * Standard payment-key witnesses satisfy both scripts + * → full sign + broadcast. + * + * Pre-hygiene deregisters if already registered, then register then retire, + * leaving the wallet in its pre-test DRep state. + * Requires CI_DREP_ANCHOR_URL to be set. + */ +export function createScenarioDRepCertificates(): Scenario { + const legacyReg: { transactionId?: string; spentUtxoRefs?: { txHash: string; outputIndex: number }[] } = {}; + const legacyRetire: { transactionId?: string; spentUtxoRefs?: { txHash: string; outputIndex: number }[] } = {}; + const sdkReg: { transactionId?: string; spentUtxoRefs?: { txHash: string; outputIndex: number }[] } = {}; + const sdkRetire: { transactionId?: string; spentUtxoRefs?: { txHash: string; outputIndex: number }[] } = {}; + + function buildDRepRegBody(): Record { + const anchorUrl = process.env.CI_DREP_ANCHOR_URL?.trim(); + if (!anchorUrl) { + throw new Error("CI_DREP_ANCHOR_URL is required for DRep registration"); + } + const anchorJsonRaw = process.env.CI_DREP_ANCHOR_JSON?.trim(); + if (!anchorJsonRaw) { + throw new Error("CI_DREP_ANCHOR_JSON is required for DRep registration"); + } + let anchorJson: object; + try { + anchorJson = JSON.parse(anchorJsonRaw) as object; + } catch { + throw new Error("CI_DREP_ANCHOR_JSON is not valid JSON"); + } + return { anchorUrl, anchorJson }; + } + + return { + id: "scenario.drep-certificates", + description: + "Register and retire DRep for legacy and SDK wallets, restoring pre-test state", + steps: [ + // Legacy: hygiene (deregister if already registered) + createDRepHygieneStep("legacy"), + // Legacy: register + ...createCertPhaseSteps({ + idPrefix: "v1.botDRepCertificate.legacy.register", + walletType: "legacy", + certEndpoint: "botDRepCertificate", + action: "register", + label: "DRep registration (legacy)", + runtime: legacyReg, + requireBroadcastSuccess: true, + buildExtraBody: () => buildDRepRegBody(), + }), + // Legacy: retire + ...createCertPhaseSteps({ + idPrefix: "v1.botDRepCertificate.legacy.retire", + walletType: "legacy", + certEndpoint: "botDRepCertificate", + action: "retire", + label: "DRep retirement (legacy)", + runtime: legacyRetire, + requireBroadcastSuccess: true, + }), + // SDK: hygiene (deregister if already registered) + createDRepHygieneStep("sdk"), + // SDK: register + ...createCertPhaseSteps({ + idPrefix: "v1.botDRepCertificate.sdk.register", + walletType: "sdk", + certEndpoint: "botDRepCertificate", + action: "register", + label: "DRep registration (sdk)", + runtime: sdkReg, + requireBroadcastSuccess: true, + buildExtraBody: () => buildDRepRegBody(), + }), + // SDK: retire + ...createCertPhaseSteps({ + idPrefix: "v1.botDRepCertificate.sdk.retire", + walletType: "sdk", + certEndpoint: "botDRepCertificate", + action: "retire", + label: "DRep retirement (sdk)", + runtime: sdkRetire, + requireBroadcastSuccess: true, + }), + ], + }; +} + +/** + * Stake register_and_delegate then deregister for the SDK wallet. + * + * Uses register_and_delegate rather than bare register because the production + * stakingCertificates.ts includes .certificateScript() on the register cert. + * In Conway era a bare register cert with a script witness causes + * ExtraneousScriptWitnessesUTXOW; register_and_delegate avoids this because + * the delegate cert legitimately requires the same staking script. + * + * Pre-hygiene: a single self-contained step checks on-chain state via + * stakeAccountInfo and deregisters if needed. It handles stale Blockfrost + * cache gracefully — if the broadcast is rejected with StakeKeyNotRegisteredDELEG, + * the credential is confirmed clean (the check was a false positive) and the + * step succeeds. Because freeUtxos.ts no longer blocks UTxOs for rejected txs, + * any failed deregister attempt does not block subsequent proposals. + * + * Requires ctx.stakePoolIdHex to be set (CI_STAKE_POOL_ID_HEX). + */ +export function createScenarioStakeCertificates(): Scenario { + const registerAndDelegateRuntime: { transactionId?: string; spentUtxoRefs?: { txHash: string; outputIndex: number }[] } = {}; + const deregisterRuntime: { transactionId?: string; spentUtxoRefs?: { txHash: string; outputIndex: number }[] } = {}; + + return { + id: "scenario.stake-certificates", + description: + "Register-and-delegate then deregister staking for SDK wallet, restoring pre-test state", + steps: [ + // ── Pre-hygiene: ensure credential is deregistered before test ───────── + // Single self-contained step — handles Blockfrost stale-cache gracefully. + { + id: "v1.botStakeCertificate.sdk.hygiene", + description: "Ensure SDK wallet stake credential is deregistered before test", + severity: "critical", + execute: async (ctx) => { + const stakeAddress = ctx.sdkStakeAddress; + if (!stakeAddress) { + return { + message: "sdkStakeAddress not in CI context; skipping hygiene", + artifacts: { skipped: true }, + }; + } + + const bot = getDefaultBot(ctx); + const token = await authenticateBot({ ctx, bot }); + + // Check on-chain state via the app's stakeAccountInfo proxy. + const checkResp = await requestJson<{ active?: boolean; error?: string }>({ + url: `${ctx.apiBaseUrl}/api/v1/stakeAccountInfo?stakeAddress=${encodeURIComponent(stakeAddress)}`, + method: "GET", + token, + }); + if (checkResp.status !== 200) { + throw new Error(`stakeAccountInfo failed (${checkResp.status}): ${stringifyRedacted(checkResp.data)}`); + } + if (!checkResp.data?.active) { + return { + message: "Stake credential not registered on-chain; proceeding to main test", + artifacts: { stakeAddress, active: false }, + }; + } + + // Credential is registered — deregister it. + const wallet = ctx.wallets.find((w) => w.type === "sdk"); + if (!wallet) throw new Error('SDK wallet not found in CI context'); + + const utxoRefs = await fetchUtxoRefs({ ctx, walletId: wallet.walletId, token, botAddress: bot.paymentAddress, fresh: true }); + + const proposeResp = await requestJson<{ id?: string; error?: string }>({ + url: `${ctx.apiBaseUrl}/api/v1/botStakeCertificate`, + method: "POST", + token, + body: { walletId: wallet.walletId, address: bot.paymentAddress, action: "deregister", utxoRefs, description: "Stake deregistration (hygiene)" }, + }); + if (proposeResp.status !== 201 || !proposeResp.data?.id) { + throw new Error(`botStakeCertificate (hygiene deregister) failed (${proposeResp.status}): ${stringifyRedacted(proposeResp.data)}`); + } + const txId = proposeResp.data.id; + + const mnemonic1 = process.env.CI_MNEMONIC_2; + const mnemonic2 = process.env.CI_MNEMONIC_3; + if (!mnemonic1?.trim()) throw new Error("CI_MNEMONIC_2 is required for hygiene signing"); + if (!mnemonic2?.trim()) throw new Error("CI_MNEMONIC_3 is required for hygiene signing"); + + // Signer 1 — no broadcast (same as main test's deregister phase). + const sign1Result = await runStakeCertSigningFlow({ ctx, mnemonic: mnemonic1, signerIndex: 1, signBroadcast: false, preferredTransactionId: txId, requireBroadcastSuccess: false }); + console.log(`[hygiene] signer1 sign: status=${sign1Result.status} stakeWitness=${String(sign1Result.stakeWitnessIncluded)}`); + + // Signer 2 — broadcast with requireBroadcastSuccess: true, matching the + // main test's deregister phase. Catch stale-cache errors (the credential + // was reported active by Blockfrost but is not actually registered on-chain: + // StakeKeyNotRegisteredDELEG + ValueNotConservedUTxO from the missing 2 ADA + // deposit refund) and treat them as "already clean". + try { + const sign2Result = await runStakeCertSigningFlow({ ctx, mnemonic: mnemonic2, signerIndex: 2, signBroadcast: true, preferredTransactionId: txId, requireBroadcastSuccess: true }); + console.log(`[hygiene] signer2 sign: status=${sign2Result.status} submitted=${String(sign2Result.submitted)} stakeWitness=${String(sign2Result.stakeWitnessIncluded)}`); + } catch (err) { + const errMsg = String(err); + console.log(`[hygiene] signer2 broadcast failed: ${errMsg.slice(0, 300)}`); + const isStaleCache = + errMsg.includes("StakeKeyNotRegisteredDELEG") || + errMsg.includes("StakeKeyAlreadyDeregistered") || + errMsg.includes("StakeKeyNotRegistered") || + errMsg.includes("ValueNotConservedUTxO") || + errMsg.includes("value is not balanced"); + if (isStaleCache) { + return { + message: "Hygiene deregister broadcast rejected — credential already deregistered (stale Blockfrost cache)", + artifacts: { stakeAddress, txId, staleCache: true }, + }; + } + throw err; + } + + // Broadcast succeeded — wait for on-chain confirmation. + const { attempts } = await pollUntilUtxosConsumed({ ctx, walletId: wallet.walletId, token, botAddress: bot.paymentAddress, spentUtxoRefs: utxoRefs }); + return { + message: `Hygiene deregister confirmed on-chain after ${attempts} poll attempt${attempts === 1 ? "" : "s"}`, + artifacts: { stakeAddress, txId, attempts }, + }; + }, + }, + + // ── Main test: register_and_delegate ───────────────────────────────── + // Uses register_and_delegate so the staking script witness required by + // the delegate cert prevents ExtraneousScriptWitnessesUTXOW on the + // register cert. Requires ctx.stakePoolIdHex (CI_STAKE_POOL_ID_HEX). + ...createCertPhaseSteps({ + idPrefix: "v1.botStakeCertificate.sdk.registerAndDelegate", + walletType: "sdk", + certEndpoint: "botStakeCertificate", + action: "register_and_delegate", + label: "Stake register-and-delegate (sdk)", + runtime: registerAndDelegateRuntime, + requireBroadcastSuccess: true, + useStakeCertFlow: true, + buildExtraBody: (ctx) => { + if (!ctx.stakePoolIdHex) { + throw new Error("ctx.stakePoolIdHex is required for register_and_delegate — set CI_STAKE_POOL_ID_HEX"); + } + return { poolId: ctx.stakePoolIdHex }; + }, + }), + + // ── Main test: deregister (restore pre-test state) ──────────────────── + ...createCertPhaseSteps({ + idPrefix: "v1.botStakeCertificate.sdk.deregister", + walletType: "sdk", + certEndpoint: "botStakeCertificate", + action: "deregister", + label: "Stake deregistration (sdk)", + runtime: deregisterRuntime, + requireBroadcastSuccess: true, + useStakeCertFlow: true, + }), + ], + }; +} diff --git a/scripts/ci/scenarios/steps/datum.ts b/scripts/ci/scenarios/steps/datum.ts new file mode 100644 index 00000000..9d6c2cc6 --- /dev/null +++ b/scripts/ci/scenarios/steps/datum.ts @@ -0,0 +1,74 @@ +import type { CIBootstrapContext, Scenario } from "../../framework/types"; +import { requestJson } from "../../framework/http"; +import { stringifyRedacted } from "../../framework/redact"; +import { authenticateSignerWithMnemonic } from "../../framework/walletAuth"; +import { signDatumWithMnemonic } from "../../framework/datumSign"; +import { getWalletByType } from "./helpers"; + +export function createScenarioSubmitDatum(ctx: CIBootstrapContext): Scenario { + return { + id: "scenario.submit-datum", + description: "Datum submission route checks", + steps: ctx.walletTypes.map((walletType) => ({ + id: `v1.submitDatum.${walletType}.signer2`, + description: `Submit signed datum using signer auth token (${walletType} wallet)`, + severity: "critical" as const, + execute: async (runCtx: CIBootstrapContext) => { + const mnemonic = process.env.CI_MNEMONIC_2; + if (!mnemonic?.trim()) { + throw new Error("CI_MNEMONIC_2 is required for submitDatum scenario"); + } + const wallet = getWalletByType(runCtx, walletType); + if (!wallet) { + throw new Error(`Missing ${walletType} wallet for submitDatum scenario`); + } + const auth = await authenticateSignerWithMnemonic({ + ctx: runCtx, + mnemonic, + }); + const datum = JSON.stringify({ + source: "ci-route-chain", + kind: "submitDatum", + walletType: wallet.type, + walletId: wallet.walletId, + createdAt: new Date().toISOString(), + }); + const signedDatum = await signDatumWithMnemonic({ + ctx: runCtx, + mnemonic, + datum, + }); + if (signedDatum.signerAddress !== auth.signerAddress) { + throw new Error("Signer address mismatch between auth and datum signing"); + } + const response = await requestJson<{ id?: string; error?: string }>({ + url: `${runCtx.apiBaseUrl}/api/v1/submitDatum`, + method: "POST", + token: auth.token, + body: { + walletId: wallet.walletId, + signature: signedDatum.signature, + key: signedDatum.key, + address: auth.signerAddress, + datum, + callbackUrl: `${runCtx.apiBaseUrl}/api/v1/og`, + description: `CI submitDatum for ${wallet.type}`, + }, + }); + if (response.status !== 201 || !response.data?.id) { + throw new Error( + `submitDatum failed (${response.status}): ${stringifyRedacted(response.data)}`, + ); + } + return { + message: `submitDatum created signable ${response.data.id}`, + artifacts: { + signableId: response.data.id, + walletId: wallet.walletId, + signerAddress: auth.signerAddress, + }, + }; + }, + })), + }; +} diff --git a/scripts/ci/scenarios/steps/discovery.ts b/scripts/ci/scenarios/steps/discovery.ts new file mode 100644 index 00000000..74a9b4b8 --- /dev/null +++ b/scripts/ci/scenarios/steps/discovery.ts @@ -0,0 +1,250 @@ +import type { CIBootstrapContext, RouteStep, Scenario } from "../../framework/types"; +import { requestJson } from "../../framework/http"; +import { getDefaultBot } from "../../framework/botContext"; +import { authenticateBot } from "../../framework/botAuth"; +import { stringifyRedacted } from "../../framework/redact"; +import { getWalletByType } from "./helpers"; + +function createWalletIdsStep(): RouteStep { + return { + id: "v1.walletIds.botAddress", + description: "Verify bot wallet discovery via /api/v1/walletIds", + severity: "critical", + execute: async (ctx) => { + const bot = getDefaultBot(ctx); + const token = await authenticateBot({ ctx, bot }); + const response = await requestJson | { error?: string }>({ + url: `${ctx.apiBaseUrl}/api/v1/walletIds?address=${encodeURIComponent(bot.paymentAddress)}`, + method: "GET", + token, + }); + if (response.status !== 200 || !Array.isArray(response.data)) { + throw new Error(`walletIds failed (${response.status}): ${stringifyRedacted(response.data)}`); + } + + const ids = new Set( + response.data + .map((w) => (typeof w.walletId === "string" ? w.walletId : "")) + .filter(Boolean), + ); + const missing = ctx.wallets.map((w) => w.walletId).filter((id) => !ids.has(id)); + if (missing.length) { + throw new Error(`walletIds did not include expected wallets: ${missing.join(", ")}`); + } + + return { + message: `walletIds returned ${response.data.length} wallets and includes all bootstrap wallets`, + artifacts: { returnedWallets: response.data.length }, + }; + }, + }; +} + +function createPendingTransactionsZeroStep(walletType: string): RouteStep { + return { + id: `v1.pendingTransactions.zero.${walletType}`, + description: `Assert no pending transactions at bootstrap for ${walletType} wallet`, + severity: "non-critical", + execute: async (ctx) => { + const bot = getDefaultBot(ctx); + const token = await authenticateBot({ ctx, bot }); + const wallet = getWalletByType(ctx, walletType); + if (!wallet) { + throw new Error(`Missing wallet type in context: ${walletType}`); + } + const response = await requestJson({ + url: `${ctx.apiBaseUrl}/api/v1/pendingTransactions?walletId=${encodeURIComponent(wallet.walletId)}&address=${encodeURIComponent(bot.paymentAddress)}`, + method: "GET", + token, + }); + if (response.status !== 200 || !Array.isArray(response.data)) { + throw new Error( + `pendingTransactions zero-check failed for ${walletType} (${response.status}): ${stringifyRedacted(response.data)}`, + ); + } + if (response.data.length !== 0) { + throw new Error( + `pendingTransactions zero-check: expected 0 pending txs for ${walletType} at bootstrap, found ${response.data.length}. A previous CI run may have left stale state.`, + ); + } + return { + message: `pendingTransactions confirmed empty for ${walletType} at bootstrap`, + artifacts: { walletId: wallet.walletId, pendingCount: 0 }, + }; + }, + }; +} + +function createProxiesListStep(walletType: string): RouteStep { + return { + id: `v1.proxies.list.${walletType}`, + description: `List confirmed proxies for ${walletType} wallet`, + severity: "non-critical", + execute: async (ctx) => { + const bot = getDefaultBot(ctx); + const token = await authenticateBot({ ctx, bot }); + const wallet = getWalletByType(ctx, walletType); + if (!wallet) { + throw new Error(`Missing wallet type in context: ${walletType}`); + } + + const response = await requestJson({ + url: `${ctx.apiBaseUrl}/api/v1/proxies?walletId=${encodeURIComponent(wallet.walletId)}&address=${encodeURIComponent(bot.paymentAddress)}`, + method: "GET", + token, + }); + if (response.status !== 200 || !Array.isArray(response.data)) { + throw new Error( + `proxies list failed for ${walletType} (${response.status}): ${stringifyRedacted(response.data)}`, + ); + } + + return { + message: `proxies returned ${response.data.length} confirmed proxies for ${walletType}`, + artifacts: { walletId: wallet.walletId, proxyCount: response.data.length }, + }; + }, + }; +} + +function createLookupMultisigWalletStep(ctx: CIBootstrapContext): RouteStep { + return { + id: "v1.lookupMultisigWallet.signerKeyHash", + description: "Smoke-test public /api/v1/lookupMultisigWallet with a signer key hash", + severity: "non-critical", + execute: async (runCtx) => { + const signerAddress = runCtx.signerAddresses[0]; + if (!signerAddress) { + throw new Error("lookupMultisigWallet: no signer addresses in bootstrap context"); + } + const { resolvePaymentKeyHash } = await import("@meshsdk/core"); + const keyHash = resolvePaymentKeyHash(signerAddress); + const response = await requestJson({ + url: `${runCtx.apiBaseUrl}/api/v1/lookupMultisigWallet?pubKeyHashes=${encodeURIComponent(keyHash)}&network=${runCtx.networkId}`, + method: "GET", + }); + if (response.status !== 200 || !Array.isArray(response.data)) { + throw new Error( + `lookupMultisigWallet failed (${response.status}): ${stringifyRedacted(response.data)}`, + ); + } + return { + message: `lookupMultisigWallet returned ${response.data.length} on-chain metadata entries for signer key hash`, + artifacts: { keyHash, matchCount: response.data.length }, + }; + }, + }; +} + +function createFreeUtxosStep(walletType: string): RouteStep { + return { + id: `v1.freeUtxos.${walletType}`, + description: `Probe free UTxOs route for ${walletType} wallet`, + severity: "non-critical", + execute: async (ctx) => { + const bot = getDefaultBot(ctx); + const token = await authenticateBot({ ctx, bot }); + const wallet = getWalletByType(ctx, walletType); + if (!wallet) { + throw new Error(`Missing wallet type in context: ${walletType}`); + } + const response = await requestJson({ + url: `${ctx.apiBaseUrl}/api/v1/freeUtxos?walletId=${encodeURIComponent(wallet.walletId)}&address=${encodeURIComponent(bot.paymentAddress)}`, + method: "GET", + token, + }); + if (response.status !== 200 || !Array.isArray(response.data)) { + throw new Error( + `freeUtxos failed for ${walletType} (${response.status}): ${stringifyRedacted(response.data)}`, + ); + } + return { + message: `freeUtxos returned ${response.data.length} entries for ${walletType}`, + artifacts: { walletId: wallet.walletId, utxoCount: response.data.length }, + }; + }, + }; +} + +function createNativeScriptStep(walletType: string): RouteStep { + return { + id: `v1.nativeScript.${walletType}`, + description: `Fetch and validate native scripts for ${walletType} wallet`, + severity: "non-critical", + execute: async (ctx) => { + const bot = getDefaultBot(ctx); + const token = await authenticateBot({ ctx, bot }); + const wallet = getWalletByType(ctx, walletType); + if (!wallet) { + throw new Error(`Missing wallet type in context: ${walletType}`); + } + const response = await requestJson | { error?: string }>({ + url: `${ctx.apiBaseUrl}/api/v1/nativeScript?walletId=${encodeURIComponent(wallet.walletId)}&address=${encodeURIComponent(bot.paymentAddress)}`, + method: "GET", + token, + }); + if (response.status !== 200 || !Array.isArray(response.data)) { + throw new Error( + `nativeScript failed for ${walletType} (${response.status}): ${stringifyRedacted(response.data)}`, + ); + } + if (response.data.length === 0) { + throw new Error(`nativeScript returned no scripts for ${walletType}`); + } + + // Assert a payment script entry is present + const paymentEntry = response.data.find((entry) => entry.type === "payment"); + if (!paymentEntry) { + throw new Error( + `nativeScript: no "payment" type entry for ${walletType}; got types: ${response.data.map((e) => e.type).join(", ")}`, + ); + } + + // If the decoded payment script is an atLeast type, validate the required count + const script = paymentEntry.script as Record | null | undefined; + if (script && typeof script === "object" && script.type === "atLeast" && typeof script.required === "number") { + const numRequired = parseInt(process.env.CI_NUM_REQUIRED_SIGNERS ?? "2", 10); + if (script.required !== numRequired) { + throw new Error( + `nativeScript: atLeast required=${script.required} does not match CI_NUM_REQUIRED_SIGNERS=${numRequired} for ${walletType}`, + ); + } + } + + return { + message: `nativeScript returned ${response.data.length} script entries for ${walletType} (payment script present)`, + artifacts: { + walletId: wallet.walletId, + walletType, + scriptCount: response.data.length, + scriptTypes: response.data.map((e) => e.type), + nativeScripts: response.data, + }, + }; + }, + }; +} + +export function createScenarioPendingAndDiscovery(ctx: CIBootstrapContext): Scenario { + return { + id: "scenario.wallet-discovery", + description: "Wallet discovery checks across bootstrap wallets", + steps: [ + createWalletIdsStep(), + ...ctx.walletTypes.map((walletType) => createPendingTransactionsZeroStep(walletType)), + ...ctx.walletTypes.map((walletType) => createProxiesListStep(walletType)), + createLookupMultisigWalletStep(ctx), + ], + }; +} + +export function createScenarioAdaRouteHealth(ctx: CIBootstrapContext): Scenario { + return { + id: "scenario.ada-route-health", + description: "Route chain for transfer readiness (freeUtxos + nativeScript)", + steps: [ + ...ctx.walletTypes.map((walletType) => createFreeUtxosStep(walletType)), + ...ctx.walletTypes.map((walletType) => createNativeScriptStep(walletType)), + ], + }; +} diff --git a/scripts/ci/scenarios/steps/governance.ts b/scripts/ci/scenarios/steps/governance.ts new file mode 100644 index 00000000..a2eb6f67 --- /dev/null +++ b/scripts/ci/scenarios/steps/governance.ts @@ -0,0 +1,131 @@ +import type { CIBootstrapContext, Scenario } from "../../framework/types"; +import { requestJson } from "../../framework/http"; +import { getDefaultBot } from "../../framework/botContext"; +import { authenticateBot } from "../../framework/botAuth"; +import { stringifyRedacted } from "../../framework/redact"; +import { + buildBallotUpsertPayload, + getDeterministicActiveProposals, + type ActiveProposal, +} from "../../framework/governance"; +import { getWalletByType } from "./helpers"; + +export function createScenarioGovernanceRoutes(ctx: CIBootstrapContext): Scenario { + const runtime: { + activeProposals: ActiveProposal[]; + } = { + activeProposals: [], + }; + return { + id: "scenario.governance-routes", + description: "Governance route checks for active proposals and ballot upsert", + steps: [ + { + id: "v1.governanceActiveProposals.preprod", + description: "Fetch active governance proposals on preprod", + severity: "critical", + execute: async (runCtx) => { + const bot = getDefaultBot(runCtx); + const token = await authenticateBot({ ctx: runCtx, bot }); + const response = await requestJson<{ + proposals?: unknown[]; + activeCount?: number; + sourceCount?: number; + error?: string; + }>({ + url: `${runCtx.apiBaseUrl}/api/v1/governanceActiveProposals?network=0&count=20&page=1&order=desc&details=false`, + method: "GET", + token, + }); + if (response.status !== 200) { + throw new Error( + `governanceActiveProposals failed (${response.status}): ${stringifyRedacted(response.data)}`, + ); + } + runtime.activeProposals = getDeterministicActiveProposals(response.data, 2); + return { + message: `governanceActiveProposals returned ${runtime.activeProposals.length} usable active proposal(s)`, + artifacts: { + activeCount: response.data?.activeCount, + sourceCount: response.data?.sourceCount, + selectedProposalIds: runtime.activeProposals.map((proposal) => proposal.proposalId), + }, + }; + }, + }, + ...ctx.walletTypes.map((walletType) => ({ + id: `v1.botBallotsUpsert.${walletType}`, + description: `Upsert governance ballots from active proposals (${walletType} wallet, idempotent update)`, + severity: "critical" as const, + execute: async (runCtx: CIBootstrapContext) => { + if (!runtime.activeProposals.length) { + return { + message: "No active proposals available on preprod; ballot upsert route skipped", + artifacts: { + skipped: true, + }, + }; + } + const bot = getDefaultBot(runCtx); + const token = await authenticateBot({ ctx: runCtx, bot }); + const wallet = getWalletByType(runCtx, walletType); + if (!wallet) { + throw new Error(`Missing ${walletType} wallet for governance ballot upsert`); + } + const ballotName = `CI governance ballot ${runCtx.createdAt} ${walletType}`; + const firstPayload = buildBallotUpsertPayload({ + walletId: wallet.walletId, + ballotName, + proposals: runtime.activeProposals, + }); + const firstResponse = await requestJson<{ + ballot?: { id?: string; items?: string[]; choices?: string[] }; + error?: string; + }>({ + url: `${runCtx.apiBaseUrl}/api/v1/botBallotsUpsert`, + method: "POST", + token, + body: firstPayload as unknown as Record, + }); + if (firstResponse.status !== 200 || !firstResponse.data?.ballot?.id) { + throw new Error( + `botBallotsUpsert seed failed (${firstResponse.status}): ${stringifyRedacted(firstResponse.data)}`, + ); + } + const secondPayload = buildBallotUpsertPayload({ + walletId: wallet.walletId, + ballotName, + proposals: runtime.activeProposals, + secondPass: true, + }); + const secondResponse = await requestJson<{ + ballot?: { id?: string; items?: string[]; choices?: string[] }; + error?: string; + }>({ + url: `${runCtx.apiBaseUrl}/api/v1/botBallotsUpsert`, + method: "POST", + token, + body: secondPayload as unknown as Record, + }); + if (secondResponse.status !== 200 || !secondResponse.data?.ballot?.id) { + throw new Error( + `botBallotsUpsert update failed (${secondResponse.status}): ${stringifyRedacted(secondResponse.data)}`, + ); + } + if (secondResponse.data.ballot.id !== firstResponse.data.ballot.id) { + throw new Error("botBallotsUpsert update should target the same ballot"); + } + return { + message: `botBallotsUpsert updated ballot ${secondResponse.data.ballot.id}`, + artifacts: { + walletId: wallet.walletId, + ballotId: secondResponse.data.ballot.id, + proposalCount: runtime.activeProposals.length, + choices: secondResponse.data.ballot.choices ?? [], + }, + }; + }, + })), + ], + }; +} diff --git a/scripts/ci/scenarios/steps/helpers.ts b/scripts/ci/scenarios/steps/helpers.ts new file mode 100644 index 00000000..4b7816d9 --- /dev/null +++ b/scripts/ci/scenarios/steps/helpers.ts @@ -0,0 +1,17 @@ +import type { CIBootstrapContext, CIWalletType } from "../../framework/types"; + +export function getWalletByType(ctx: CIBootstrapContext, typeRaw: string) { + const type = typeRaw.trim().toLowerCase(); + return ctx.wallets.find((w) => w.type === type); +} + +export function getRingWalletTypes(ctx: CIBootstrapContext): [CIWalletType, CIWalletType, CIWalletType] { + const expected: CIWalletType[] = ["legacy", "hierarchical", "sdk"]; + const missing = expected.filter((walletType) => !ctx.wallets.some((wallet) => wallet.type === walletType)); + if (missing.length) { + throw new Error( + `Ring transfer scenario requires wallet types: legacy,hierarchical,sdk; missing: ${missing.join(", ")}`, + ); + } + return ["legacy", "hierarchical", "sdk"]; +} diff --git a/scripts/ci/scenarios/steps/proxyBot.ts b/scripts/ci/scenarios/steps/proxyBot.ts new file mode 100644 index 00000000..680212ed --- /dev/null +++ b/scripts/ci/scenarios/steps/proxyBot.ts @@ -0,0 +1,1541 @@ +import type { CIBootstrapContext, CIWalletType, RouteStep, Scenario } from "../../framework/types"; +import { boolFromEnv } from "../../framework/env"; +import { requestJson } from "../../framework/http"; +import { authenticateBot } from "../../framework/botAuth"; +import { getDefaultBot } from "../../framework/botContext"; +import { stringifyRedacted } from "../../framework/redact"; +import { getDeterministicActiveProposals, type ActiveProposal } from "../../framework/governance"; +import { runSigningFlow } from "../flows/signingFlow"; +import { ensureProxyLifecycleUtxoShape } from "../flows/utxoShapeFlow"; +import { recoverProxyRowsFromChainForWalletType } from "../proxyChainRecovery"; +import { adoptProxyOrphansForWalletType } from "../proxyOrphanAdoption"; +import { getWalletByType } from "./helpers"; +import { + assertProxyFullLifecyclePreflight, + COLLATERAL_REQUIRED_LOVELACE, + DREP_REGISTER_REQUIRED_LOVELACE, + formatAda, + FULL_LIFECYCLE_FEE_BUFFER_LOVELACE, + key, + LIFECYCLE_PROXY_LOVELACE, + parseLovelace, + PROXY_FULL_LIFECYCLE_WALLET_TYPES, + PROXY_SPEND_LOVELACE, + sameRef, + SETUP_UTXO_REQUIRED_LOVELACE, + toRef, + type ScriptUtxo, + type UtxoRef, +} from "../proxyLifecyclePreflight"; + +export { + analyzeProxyFullLifecycleUtxoShape, + assertProxyFullLifecyclePreflight, + DREP_REGISTER_REQUIRED_LOVELACE, + FULL_LIFECYCLE_FEE_BUFFER_LOVELACE, + LIFECYCLE_PROXY_LOVELACE, + PROXY_FULL_LIFECYCLE_WALLET_TYPES, + type ProxyLifecycleUtxoShapeAnalysis, + type ProxyLifecycleUtxoShapeStatus, + type ScriptUtxo, + type UtxoRef, +} from "../proxyLifecyclePreflight"; + +type ProxyRow = { id: string; proxyAddress: string; authTokenId: string; isActive?: boolean }; +type ProxySetup = { proxyAddress: string; authTokenId: string; paramUtxo: UtxoRef }; +type ProxyActionRequestRefs = { utxoRefs: UtxoRef[]; collateralRef: UtxoRef }; +type ProxyActionSelection = ProxyActionRequestRefs & Record; +type ProxyDRepInfoResponse = { active: boolean; dRepId: string; error?: string }; +type ProxyLifecycleSignerIndex = 0 | 1 | 2; +type ProxyLifecycleMnemonicEnvName = "CI_MNEMONIC_1" | "CI_MNEMONIC_2" | "CI_MNEMONIC_3"; + +const PROXY_LIFECYCLE_COLLATERAL_SIGNER_INDEX = 0; +const PROXY_LIFECYCLE_SIGNER_INDEXES = [0, 1] as const; +export const PROXY_ACTION_REQUIRED_LOVELACE = 2_000_000n; +export const PROXY_ACTION_FEE_BUFFER_LOVELACE = 2_000_000n; + +export function getProxyDRepAnchorUrl( + env: Record = process.env, +): string { + const anchorUrl = env.CI_DREP_ANCHOR_URL?.trim(); + if (!anchorUrl) { + throw new Error("CI_DREP_ANCHOR_URL is required for proxy DRep registration"); + } + return anchorUrl; +} + +function getTransactionId(data: unknown): string | undefined { + if (typeof data === "object" && data !== null) { + const record = data as Record; + if (typeof record.id === "string") return record.id; + return getTransactionId(record.transaction); + } + return undefined; +} + +function getSubmittedTxHash(data: unknown): string | undefined { + if (typeof data === "string") return data; + if (typeof data === "object" && data !== null) { + const record = data as Record; + if (typeof record.txHash === "string") return record.txHash; + return getSubmittedTxHash(record.transaction); + } + return undefined; +} + +function getCleanupPhase(data: unknown): "sweep" | "burn" | undefined { + if (typeof data === "object" && data !== null) { + const record = data as Record; + const cleanup = record.cleanup; + if (typeof cleanup === "object" && cleanup !== null) { + const phase = (cleanup as Record).phase; + if (phase === "sweep" || phase === "burn") return phase; + } + } + return undefined; +} + +export function normalizeJsonArtifact(value: unknown): unknown { + if (typeof value === "bigint") return value.toString(); + if (Array.isArray(value)) return value.map(normalizeJsonArtifact); + if (typeof value === "object" && value !== null) { + return Object.fromEntries( + Object.entries(value).map(([key, child]) => [key, normalizeJsonArtifact(child)]), + ); + } + return value; +} + +export function splitProxyActionSelection(selection: ProxyActionSelection): { + requestRefs: ProxyActionRequestRefs; + selectionArtifacts: Record; +} { + const { utxoRefs, collateralRef, ...selectionArtifacts } = selection; + return { + requestRefs: { utxoRefs, collateralRef }, + selectionArtifacts: normalizeJsonArtifact(selectionArtifacts) as Record, + }; +} + +export function shouldSkipCleanupBurnPropose(runtime: { + cleanupPhase?: "sweep" | "burn"; + cleanupBurnTransactionId?: string; +}): boolean { + return runtime.cleanupPhase === "burn" && !runtime.cleanupBurnTransactionId; +} + +export function shouldSkipCleanupBurnSigning(runtime: { + cleanupBurnSkipped?: boolean; + cleanupBurnTransactionId?: string; +}): boolean { + return runtime.cleanupBurnSkipped === true || !runtime.cleanupBurnTransactionId; +} + +export function shouldSkipActionConfirmation(runtime: { + actionTransactionId?: string; + actionUtxoRefs?: UtxoRef[]; +}): boolean { + return !runtime.actionTransactionId || !runtime.actionUtxoRefs?.length; +} + +async function fetchFreeUtxos(args: { + ctx: CIBootstrapContext; + walletId: string; + token: string; + address: string; + fresh?: boolean; +}): Promise { + const fresh = args.fresh ? "&fresh=true" : ""; + const response = await requestJson({ + url: `${args.ctx.apiBaseUrl}/api/v1/freeUtxos?walletId=${encodeURIComponent(args.walletId)}&address=${encodeURIComponent(args.address)}${fresh}`, + method: "GET", + token: args.token, + }); + if (response.status !== 200 || !Array.isArray(response.data)) { + throw new Error(`freeUtxos failed (${response.status}): ${stringifyRedacted(response.data)}`); + } + return response.data; +} + +async function fetchKeyAddressUtxos(args: { + ctx: CIBootstrapContext; + address: string; +}): Promise { + const apiKey = process.env.CI_BLOCKFROST_PREPROD_API_KEY?.trim(); + if (!apiKey) { + throw new Error("CI_BLOCKFROST_PREPROD_API_KEY is required to fetch proxy lifecycle key-address collateral"); + } + if (args.ctx.networkId !== 0) { + throw new Error(`Proxy lifecycle key collateral lookup is preprod-only. Expected networkId=0, received networkId=${args.ctx.networkId}`); + } + + const { BlockfrostProvider } = await import("@meshsdk/core"); + const provider = new BlockfrostProvider(apiKey); + const utxos = await provider.fetchAddressUTxOs(args.address); + return utxos.map((utxo) => ({ + input: utxo.input, + output: utxo.output, + })); +} + +function isAdaOnlyCollateral(utxo: ScriptUtxo): boolean { + return ( + parseLovelace(utxo) >= COLLATERAL_REQUIRED_LOVELACE && + utxo.output.amount.every((asset) => asset.unit === "lovelace") + ); +} + +function selectSeparateCollateral( + utxos: ScriptUtxo[], + context: string, +): ScriptUtxo { + const collateral = [...utxos] + .filter(isAdaOnlyCollateral) + .sort((left, right) => { + const leftLovelace = parseLovelace(left); + const rightLovelace = parseLovelace(right); + if (leftLovelace < rightLovelace) return -1; + if (leftLovelace > rightLovelace) return 1; + return 0; + })[0]; + if (!collateral) { + throw new Error( + `${context} requires an ADA-only bot payment-address collateral UTxO with at least ${formatAda(COLLATERAL_REQUIRED_LOVELACE)}`, + ); + } + return collateral; +} + +export function selectSetupRefs(args: { + walletUtxos: ScriptUtxo[]; + collateralUtxos: ScriptUtxo[]; +}): { utxoRefs: UtxoRef[]; collateralRef: UtxoRef } { + const setupUtxo = args.walletUtxos.find((utxo) => parseLovelace(utxo) >= SETUP_UTXO_REQUIRED_LOVELACE); + if (!setupUtxo) { + throw new Error(`proxy setup requires a wallet UTxO with at least ${formatAda(SETUP_UTXO_REQUIRED_LOVELACE)}`); + } + const setupRef = toRef(setupUtxo); + const collateral = selectSeparateCollateral(args.collateralUtxos, "proxy setup"); + return { utxoRefs: [setupRef], collateralRef: toRef(collateral) }; +} + +export function selectAuthTokenRefs(args: { + walletUtxos: ScriptUtxo[]; + collateralUtxos: ScriptUtxo[]; + authTokenId: string; + includeAllAuthTokens?: boolean; +}): { utxoRefs: UtxoRef[]; collateralRef: UtxoRef } { + const authTokenUtxos = args.walletUtxos.filter((utxo) => + utxo.output.amount.some((asset) => asset.unit === args.authTokenId && BigInt(asset.quantity) > 0n), + ); + if (!authTokenUtxos.length) { + throw new Error("No proxy auth-token UTxO found in freeUtxos response"); + } + const spendUtxos = args.includeAllAuthTokens ? authTokenUtxos : [authTokenUtxos[0]!]; + const refs = spendUtxos.map(toRef); + const collateral = selectSeparateCollateral(args.collateralUtxos, "proxy action"); + return { utxoRefs: refs, collateralRef: toRef(collateral) }; +} + +export function selectDRepRegisterRefs(args: { + walletUtxos: ScriptUtxo[]; + collateralUtxos: ScriptUtxo[]; + authTokenId: string; + requiredLovelace?: bigint; +}): { utxoRefs: UtxoRef[]; collateralRef: UtxoRef; selectedLovelace: bigint; requiredLovelace: bigint } { + const requiredLovelace = args.requiredLovelace ?? DREP_REGISTER_REQUIRED_LOVELACE; + const authTokenUtxo = args.walletUtxos.find((utxo) => + utxo.output.amount.some((asset) => asset.unit === args.authTokenId && BigInt(asset.quantity) > 0n), + ); + if (!authTokenUtxo) { + throw new Error("No proxy auth-token UTxO found in freeUtxos response"); + } + + const authRef = toRef(authTokenUtxo); + const collateral = selectSeparateCollateral(args.collateralUtxos, "proxy DRep register"); + const collateralRef = toRef(collateral); + const selectedRefs = [authRef]; + let selectedLovelace = parseLovelace(authTokenUtxo); + const fundingCandidates = [...args.walletUtxos] + .filter((utxo) => { + const ref = toRef(utxo); + return !sameRef(ref, authRef); + }) + .sort((left, right) => { + const leftLovelace = parseLovelace(left); + const rightLovelace = parseLovelace(right); + if (leftLovelace > rightLovelace) return -1; + if (leftLovelace < rightLovelace) return 1; + return 0; + }); + + for (const utxo of fundingCandidates) { + if (selectedLovelace >= requiredLovelace) break; + selectedRefs.push(toRef(utxo)); + selectedLovelace += parseLovelace(utxo); + } + + if (selectedLovelace < requiredLovelace) { + throw new Error( + `proxy DRep register requires ${formatAda(requiredLovelace)} in selected wallet inputs but only ${formatAda(selectedLovelace)} is available after reserving separate collateral. Fund or consolidate the CI wallet before running scenario.proxy-full-lifecycle.`, + ); + } + + return { + utxoRefs: selectedRefs, + collateralRef, + selectedLovelace, + requiredLovelace, + }; +} + +export function selectAuthTokenRefsWithMinLovelace(args: { + walletUtxos: ScriptUtxo[]; + collateralUtxos: ScriptUtxo[]; + authTokenId: string; + requiredLovelace: bigint; + context: string; +}): { utxoRefs: UtxoRef[]; collateralRef: UtxoRef; selectedLovelace: bigint; requiredLovelace: bigint } { + const authTokenUtxo = args.walletUtxos.find((utxo) => + utxo.output.amount.some((asset) => asset.unit === args.authTokenId && BigInt(asset.quantity) > 0n), + ); + if (!authTokenUtxo) { + throw new Error("No proxy auth-token UTxO found in freeUtxos response"); + } + + const authRef = toRef(authTokenUtxo); + const collateral = selectSeparateCollateral(args.collateralUtxos, args.context); + const collateralRef = toRef(collateral); + const selectedRefs = [authRef]; + let selectedLovelace = parseLovelace(authTokenUtxo); + const fundingCandidates = [...args.walletUtxos] + .filter((utxo) => { + const ref = toRef(utxo); + return !sameRef(ref, authRef); + }) + .sort((left, right) => { + const leftLovelace = parseLovelace(left); + const rightLovelace = parseLovelace(right); + if (leftLovelace > rightLovelace) return -1; + if (leftLovelace < rightLovelace) return 1; + return 0; + }); + + for (const utxo of fundingCandidates) { + if (selectedLovelace >= args.requiredLovelace) break; + selectedRefs.push(toRef(utxo)); + selectedLovelace += parseLovelace(utxo); + } + + if (selectedLovelace < args.requiredLovelace) { + throw new Error( + `${args.context} requires ${formatAda(args.requiredLovelace)} in selected wallet inputs but only ${formatAda(selectedLovelace)} is available after reserving separate collateral. Fund or consolidate the CI wallet before running scenario.proxy-full-lifecycle.`, + ); + } + + return { + utxoRefs: selectedRefs, + collateralRef, + selectedLovelace, + requiredLovelace: args.requiredLovelace, + }; +} + +async function pollUntilUtxosConsumed(args: { + ctx: CIBootstrapContext; + walletId: string; + token: string; + address: string; + spentUtxoRefs: UtxoRef[]; + maxRetries?: number; + retryDelayMs?: number; +}): Promise<{ attempts: number }> { + const maxRetries = args.maxRetries ?? 30; + const retryDelayMs = args.retryDelayMs ?? 8000; + const spent = new Set(args.spentUtxoRefs.map(key)); + for (let attempt = 0; attempt < maxRetries; attempt++) { + if (attempt > 0) { + await new Promise((resolve) => setTimeout(resolve, retryDelayMs)); + } + const utxos = await fetchFreeUtxos({ ...args, fresh: true }); + if (!utxos.some((utxo) => spent.has(key(toRef(utxo))))) { + return { attempts: attempt + 1 }; + } + } + throw new Error(`Timed out waiting for proxy transaction inputs to be confirmed`); +} + +type ProxyLifecycleHygieneDeps = { + requestJson: typeof requestJson; + authenticateBot: typeof authenticateBot; + getDefaultBot: typeof getDefaultBot; + fetchFreeUtxos: typeof fetchFreeUtxos; + fetchKeyAddressUtxos: typeof fetchKeyAddressUtxos; + runSigningFlow: typeof runSigningFlow; + pollUntilUtxosConsumed: typeof pollUntilUtxosConsumed; + env: Record; +}; + +const defaultProxyLifecycleHygieneDeps: ProxyLifecycleHygieneDeps = { + requestJson, + authenticateBot, + getDefaultBot, + fetchFreeUtxos, + fetchKeyAddressUtxos, + runSigningFlow, + pollUntilUtxosConsumed, + env: process.env, +}; + +async function listActiveProxies(args: { + ctx: CIBootstrapContext; + walletId: string; + address: string; + token: string; + requestJsonFn: typeof requestJson; +}): Promise { + const response = await args.requestJsonFn({ + url: `${args.ctx.apiBaseUrl}/api/v1/proxies?walletId=${encodeURIComponent(args.walletId)}&address=${encodeURIComponent(args.address)}`, + method: "GET", + token: args.token, + }); + if (response.status !== 200 || !Array.isArray(response.data)) { + throw new Error(`proxies list failed (${response.status}): ${stringifyRedacted(response.data)}`); + } + return response.data; +} + +async function fetchProxyDRepInfo(args: { + ctx: CIBootstrapContext; + walletId: string; + address: string; + proxyId: string; + token: string; + requestJsonFn: typeof requestJson; +}): Promise { + const response = await args.requestJsonFn({ + url: `${args.ctx.apiBaseUrl}/api/v1/proxyDRepInfo?walletId=${encodeURIComponent(args.walletId)}&address=${encodeURIComponent(args.address)}&proxyId=${encodeURIComponent(args.proxyId)}`, + method: "GET", + token: args.token, + }); + if (response.status !== 200 || typeof response.data?.active !== "boolean" || typeof response.data?.dRepId !== "string") { + throw new Error(`proxyDRepInfo failed (${response.status}): ${stringifyRedacted(response.data)}`); + } + return response.data; +} + +export async function runProxyFullLifecycleHygiene(args: { + ctx: CIBootstrapContext; + walletType: CIWalletType; + deps?: Partial; +}): Promise<{ message: string; artifacts: Record }> { + const deps = { ...defaultProxyLifecycleHygieneDeps, ...args.deps }; + const wallet = getWalletByType(args.ctx, args.walletType); + if (!wallet) throw new Error(`Missing ${args.walletType} wallet`); + const bot = deps.getDefaultBot(args.ctx); + const token = await deps.authenticateBot({ ctx: args.ctx, bot }); + const initialProxies = await listActiveProxies({ + ctx: args.ctx, + walletId: wallet.walletId, + address: bot.paymentAddress, + token, + requestJsonFn: deps.requestJson, + }); + + if (!initialProxies.length) { + return { + message: `proxy full lifecycle hygiene found no active proxies for ${args.walletType}`, + artifacts: { walletId: wallet.walletId, cleaned: [], noOp: true }, + }; + } + + const cleaned: Record[] = []; + const signer0Mnemonic = deps.env.CI_MNEMONIC_1; + const signer1Mnemonic = deps.env.CI_MNEMONIC_2; + if (!signer0Mnemonic?.trim()) throw new Error("CI_MNEMONIC_1 is required for proxy lifecycle hygiene signing"); + if (!signer1Mnemonic?.trim()) throw new Error("CI_MNEMONIC_2 is required for proxy lifecycle hygiene signing"); + + for (const proxy of initialProxies) { + let finalTxHash: string | undefined; + let finalTransactionId: string | undefined; + let finalPhase: "sweep" | "burn" | undefined; + const cleanupTransactions: Record[] = []; + let dRepDeregisterTransaction: Record | undefined; + + const dRepInfo = await fetchProxyDRepInfo({ + ctx: args.ctx, + walletId: wallet.walletId, + address: bot.paymentAddress, + proxyId: proxy.id, + token, + requestJsonFn: deps.requestJson, + }); + if (dRepInfo.active) { + const [walletUtxos, collateralUtxos] = await Promise.all([ + deps.fetchFreeUtxos({ + ctx: args.ctx, + walletId: wallet.walletId, + token, + address: bot.paymentAddress, + fresh: true, + }), + deps.fetchKeyAddressUtxos({ ctx: args.ctx, address: bot.paymentAddress }), + ]); + const selection = selectAuthTokenRefsWithMinLovelace({ + walletUtxos, + collateralUtxos, + authTokenId: proxy.authTokenId, + requiredLovelace: PROXY_ACTION_REQUIRED_LOVELACE + PROXY_ACTION_FEE_BUFFER_LOVELACE, + context: "proxy hygiene DRep deregister", + }); + const { requestRefs, selectionArtifacts } = splitProxyActionSelection(selection); + const response = await deps.requestJson({ + url: `${args.ctx.apiBaseUrl}/api/v1/proxyDRepCertificate`, + method: "POST", + token, + body: { + walletId: wallet.walletId, + address: bot.paymentAddress, + proxyId: proxy.id, + ...requestRefs, + action: "deregister", + description: `CI proxy full lifecycle hygiene DRep deregister (${args.walletType})`, + }, + }); + if (response.status !== 201) { + throw new Error(`proxyDRepCertificate hygiene failed (${response.status}): ${stringifyRedacted(response.data)}`); + } + + const txId = getTransactionId(response.data); + if (!txId) { + throw new Error(`proxyDRepCertificate hygiene response did not include a transaction id: ${stringifyRedacted(response.data)}`); + } + let txHash = getSubmittedTxHash(response.data); + + const signer0Result = await deps.runSigningFlow({ + ctx: args.ctx, + mnemonic: signer0Mnemonic, + signWalletType: args.walletType, + signerIndex: 0, + signBroadcast: false, + preferredTransactionId: txId, + requireBroadcastSuccess: false, + }); + const signer1Result = await deps.runSigningFlow({ + ctx: args.ctx, + mnemonic: signer1Mnemonic, + signWalletType: args.walletType, + signerIndex: 1, + signBroadcast: true, + preferredTransactionId: txId, + requireBroadcastSuccess: true, + }); + txHash = signer1Result.txHash ?? txHash; + + const confirmation = await deps.pollUntilUtxosConsumed({ + ctx: args.ctx, + walletId: wallet.walletId, + token, + address: bot.paymentAddress, + spentUtxoRefs: requestRefs.utxoRefs, + }); + dRepDeregisterTransaction = { + dRepId: dRepInfo.dRepId, + transactionId: txId, + txHash, + selectedUtxoRefs: requestRefs.utxoRefs, + selectionArtifacts, + confirmationAttempts: confirmation.attempts, + signer0Status: signer0Result.status, + signer1Status: signer1Result.status, + }; + } + + for (let pass = 0; pass < 2; pass += 1) { + const [walletUtxos, collateralUtxos] = await Promise.all([ + deps.fetchFreeUtxos({ + ctx: args.ctx, + walletId: wallet.walletId, + token, + address: bot.paymentAddress, + fresh: true, + }), + deps.fetchKeyAddressUtxos({ ctx: args.ctx, address: bot.paymentAddress }), + ]); + const selection = selectAuthTokenRefs({ + walletUtxos, + collateralUtxos, + authTokenId: proxy.authTokenId, + includeAllAuthTokens: true, + }); + const response = await deps.requestJson({ + url: `${args.ctx.apiBaseUrl}/api/v1/proxyCleanup`, + method: "POST", + token, + body: { + walletId: wallet.walletId, + address: bot.paymentAddress, + proxyId: proxy.id, + ...selection, + deactivateProxy: true, + description: `CI proxy full lifecycle hygiene (${args.walletType})`, + }, + }); + if (response.status !== 201) { + throw new Error(`proxyCleanup hygiene failed (${response.status}): ${stringifyRedacted(response.data)}`); + } + + const txId = getTransactionId(response.data); + if (!txId) { + throw new Error(`proxyCleanup hygiene response did not include a transaction id: ${stringifyRedacted(response.data)}`); + } + finalTransactionId = txId; + finalTxHash = getSubmittedTxHash(response.data); + finalPhase = getCleanupPhase(response.data); + + const signer0Result = await deps.runSigningFlow({ + ctx: args.ctx, + mnemonic: signer0Mnemonic, + signWalletType: args.walletType, + signerIndex: 0, + signBroadcast: false, + preferredTransactionId: txId, + requireBroadcastSuccess: false, + }); + const signer1Result = await deps.runSigningFlow({ + ctx: args.ctx, + mnemonic: signer1Mnemonic, + signWalletType: args.walletType, + signerIndex: 1, + signBroadcast: true, + preferredTransactionId: txId, + requireBroadcastSuccess: true, + }); + finalTxHash = signer1Result.txHash ?? finalTxHash; + + const confirmation = await deps.pollUntilUtxosConsumed({ + ctx: args.ctx, + walletId: wallet.walletId, + token, + address: bot.paymentAddress, + spentUtxoRefs: selection.utxoRefs, + }); + cleanupTransactions.push({ + phase: finalPhase, + transactionId: txId, + txHash: finalTxHash, + selectedUtxoRefs: selection.utxoRefs, + confirmationAttempts: confirmation.attempts, + signer0Status: signer0Result.status, + signer1Status: signer1Result.status, + }); + + if (finalPhase === "burn") break; + } + + if (finalPhase !== "burn") { + throw new Error(`proxy hygiene could not reach burn phase for active proxy ${proxy.id}`); + } + + const finalizeResponse = await deps.requestJson<{ proxy?: ProxyRow; error?: string }>({ + url: `${args.ctx.apiBaseUrl}/api/v1/proxyCleanupFinalize`, + method: "POST", + token, + body: { + walletId: wallet.walletId, + address: bot.paymentAddress, + proxyId: proxy.id, + txHash: finalTxHash ?? finalTransactionId ?? "submitted", + }, + retries: 3, + }); + if (finalizeResponse.status !== 201 || finalizeResponse.data?.proxy?.isActive !== false) { + throw new Error(`proxyCleanupFinalize hygiene failed (${finalizeResponse.status}): ${stringifyRedacted(finalizeResponse.data)}`); + } + + const remainingProxies = await listActiveProxies({ + ctx: args.ctx, + walletId: wallet.walletId, + address: bot.paymentAddress, + token, + requestJsonFn: deps.requestJson, + }); + if (remainingProxies.some((candidate) => candidate.id === proxy.id)) { + throw new Error(`hygiene-cleaned proxy ${proxy.id} is still listed as active`); + } + + cleaned.push({ + proxyId: proxy.id, + authTokenId: proxy.authTokenId, + proxyAddress: proxy.proxyAddress, + dRep: { + dRepId: dRepInfo.dRepId, + wasActive: dRepInfo.active, + deregisterTransaction: dRepDeregisterTransaction, + }, + finalTxHash, + cleanupTransactions, + }); + } + + return { + message: `proxy full lifecycle hygiene cleaned ${cleaned.length} active proxy/proxies for ${args.walletType}`, + artifacts: normalizeJsonArtifact({ walletId: wallet.walletId, cleaned, noOp: false }) as Record, + }; +} + +function createExpectedStatusStep(args: { + id: string; + description: string; + method: "GET" | "POST"; + url: (ctx: CIBootstrapContext) => string; + token?: (ctx: CIBootstrapContext) => Promise; + body?: (ctx: CIBootstrapContext) => Record; + expectedStatus: number; + validate?: (data: unknown) => void; +}): RouteStep { + return { + id: args.id, + description: args.description, + severity: "critical", + execute: async (ctx) => { + const token = args.token ? await args.token(ctx) : undefined; + const response = await requestJson<{ error?: string }>({ + url: args.url(ctx), + method: args.method, + token, + body: args.body?.(ctx), + }); + if (response.status !== args.expectedStatus) { + throw new Error( + `${args.id} expected ${args.expectedStatus}, got ${response.status}: ${stringifyRedacted(response.data)}`, + ); + } + args.validate?.(response.data); + return { message: `${args.id} returned expected ${args.expectedStatus}` }; + }, + }; +} + +export function createScenarioProxySmoke(ctx: CIBootstrapContext): Scenario { + return { + id: "scenario.proxy-smoke", + description: "Proxy bot API smoke and negative validation checks", + steps: [ + ...ctx.walletTypes.map((walletType) => { + const wallet = getWalletByType(ctx, walletType); + return createExpectedStatusStep({ + id: `v1.proxies.missingToken.${walletType}`, + description: `Assert /api/v1/proxies rejects missing token (${walletType})`, + method: "GET", + url: (runCtx) => { + const target = wallet ?? getWalletByType(runCtx, walletType); + if (!target) throw new Error(`Missing ${walletType} wallet`); + const address = target.signerAddresses[0] ?? runCtx.signerAddresses[0] ?? ""; + return `${runCtx.apiBaseUrl}/api/v1/proxies?walletId=${encodeURIComponent(target.walletId)}&address=${encodeURIComponent(address)}`; + }, + expectedStatus: 401, + }); + }), + ...ctx.walletTypes.map((walletType) => + createExpectedStatusStep({ + id: `v1.proxies.list.${walletType}`, + description: `Assert /api/v1/proxies returns active proxy list (${walletType})`, + method: "GET", + token: async (runCtx) => authenticateBot({ ctx: runCtx, bot: getDefaultBot(runCtx) }), + url: (runCtx) => { + const wallet = getWalletByType(runCtx, walletType); + if (!wallet) throw new Error(`Missing ${walletType} wallet`); + const bot = getDefaultBot(runCtx); + return `${runCtx.apiBaseUrl}/api/v1/proxies?walletId=${encodeURIComponent(wallet.walletId)}&address=${encodeURIComponent(bot.paymentAddress)}`; + }, + expectedStatus: 200, + validate: (data) => { + if (!Array.isArray(data)) { + throw new Error(`v1.proxies.list.${walletType} expected array response: ${stringifyRedacted(data)}`); + } + }, + }), + ), + ...ctx.walletTypes.map((walletType) => + createExpectedStatusStep({ + id: `v1.proxies.addressMismatch.${walletType}`, + description: `Assert /api/v1/proxies rejects address mismatch (${walletType})`, + method: "GET", + token: async (runCtx) => authenticateBot({ ctx: runCtx, bot: getDefaultBot(runCtx) }), + url: (runCtx) => { + const wallet = getWalletByType(runCtx, walletType); + if (!wallet) throw new Error(`Missing ${walletType} wallet`); + const bot = getDefaultBot(runCtx); + const mismatch = runCtx.bots.find((candidate) => candidate.id !== bot.id)?.paymentAddress ?? `${bot.paymentAddress}x`; + return `${runCtx.apiBaseUrl}/api/v1/proxies?walletId=${encodeURIComponent(wallet.walletId)}&address=${encodeURIComponent(mismatch)}`; + }, + expectedStatus: 403, + }), + ), + ...[ + "proxySetup", + "proxySetupFinalize", + "proxySpend", + "proxyDRepCertificate", + "proxyVote", + "proxyCleanup", + "proxyCleanupFinalize", + ].map((route) => + createExpectedStatusStep({ + id: `v1.${route}.malformedBody`, + description: `Assert /api/v1/${route} rejects malformed body before chain work`, + method: "POST", + token: async (runCtx) => authenticateBot({ ctx: runCtx, bot: getDefaultBot(runCtx) }), + url: (runCtx) => `${runCtx.apiBaseUrl}/api/v1/${route}`, + body: (runCtx) => ({ + walletId: runCtx.wallets[0]?.walletId ?? "missing-wallet", + address: getDefaultBot(runCtx).paymentAddress, + ...(route === "proxySetup" ? { initialProxyLovelace: "0" } : {}), + }), + expectedStatus: 400, + }), + ), + ], + }; +} + +function createSignStep(args: { + id: string; + description: string; + walletType: CIWalletType; + signerIndex: ProxyLifecycleSignerIndex; + mnemonicEnvName: ProxyLifecycleMnemonicEnvName; + signBroadcast: boolean; + getTransactionId: () => string | undefined; + setTxHash?: (txHash: string | undefined) => void; + shouldSkip?: () => boolean; +}): RouteStep { + return { + id: args.id, + description: args.description, + severity: "critical", + execute: async (ctx) => { + if (args.shouldSkip?.()) { + return { message: "Signing skipped", artifacts: { skipped: true } }; + } + const txId = args.getTransactionId(); + if (!txId) { + return { message: "No pending transaction id; signing skipped", artifacts: { skipped: true } }; + } + const mnemonic = process.env[args.mnemonicEnvName]; + if (!mnemonic?.trim()) { + throw new Error(`${args.mnemonicEnvName} is required for proxy lifecycle signing`); + } + const result = await runSigningFlow({ + ctx, + mnemonic, + signWalletType: args.walletType, + signerIndex: args.signerIndex, + signBroadcast: args.signBroadcast && boolFromEnv(process.env.SIGN_BROADCAST, true), + preferredTransactionId: txId, + requireBroadcastSuccess: args.signBroadcast, + }); + args.setTxHash?.(result.txHash); + return { + message: `Proxy lifecycle sign signerIndex=${args.signerIndex} status=${result.status} submitted=${String(result.submitted)}`, + artifacts: result as unknown as Record, + }; + }, + }; +} + +export function requireSetupTxHash(runtime: { + setupTransactionId?: string; + setupTxHash?: string; +}): string { + const txHash = runtime.setupTxHash?.trim(); + if (txHash) return txHash; + + throw new Error( + `proxy setup was not broadcast; signer step returned submitted=false for transaction ${runtime.setupTransactionId ?? "unknown"}`, + ); +} + +function createSetupLifecycleSteps(args: { + walletType: CIWalletType; + runtime: { + setup?: ProxySetup; + proxyId?: string; + setupTransactionId?: string; + setupTxHash?: string; + setupUtxoRefs?: UtxoRef[]; + }; +}): RouteStep[] { + const { walletType, runtime } = args; + return [ + { + id: `v1.proxy.lifecycle.setup.propose.${walletType}`, + description: `Build proxy setup transaction (${walletType})`, + severity: "critical", + execute: async (ctx) => { + const wallet = getWalletByType(ctx, walletType); + if (!wallet) throw new Error(`Missing ${walletType} wallet`); + const bot = getDefaultBot(ctx); + const token = await authenticateBot({ ctx, bot }); + const [walletUtxos, collateralUtxos] = await Promise.all([ + fetchFreeUtxos({ ctx, walletId: wallet.walletId, token, address: bot.paymentAddress, fresh: true }), + fetchKeyAddressUtxos({ ctx, address: bot.paymentAddress }), + ]); + const refs = selectSetupRefs({ walletUtxos, collateralUtxos }); + const response = await requestJson<{ transaction?: unknown; setup?: ProxySetup; error?: string }>({ + url: `${ctx.apiBaseUrl}/api/v1/proxySetup`, + method: "POST", + token, + body: { + walletId: wallet.walletId, + address: bot.paymentAddress, + ...refs, + initialProxyLovelace: LIFECYCLE_PROXY_LOVELACE.toString(), + description: `CI proxy setup (${walletType})`, + }, + }); + if (response.status !== 201 || !response.data?.setup) { + throw new Error(`proxySetup failed (${response.status}): ${stringifyRedacted(response.data)}`); + } + runtime.setup = response.data.setup; + runtime.setupUtxoRefs = refs.utxoRefs; + runtime.setupTransactionId = getTransactionId(response.data); + runtime.setupTxHash = getSubmittedTxHash(response.data); + return { + message: `proxySetup created setup for ${walletType}`, + artifacts: { + walletId: wallet.walletId, + setup: runtime.setup, + transactionId: runtime.setupTransactionId, + txHash: runtime.setupTxHash, + collateralRef: refs.collateralRef, + collateralOwnerSignerIndex: PROXY_LIFECYCLE_COLLATERAL_SIGNER_INDEX, + signerIndexes: [...PROXY_LIFECYCLE_SIGNER_INDEXES], + }, + }; + }, + }, + createSignStep({ + id: `v1.proxy.lifecycle.setup.signer0.${walletType}`, + description: `Signer index 0 adds collateral witness for proxy setup (${walletType})`, + walletType, + signerIndex: 0, + mnemonicEnvName: "CI_MNEMONIC_1", + signBroadcast: false, + getTransactionId: () => runtime.setupTransactionId, + }), + createSignStep({ + id: `v1.proxy.lifecycle.setup.signer1.${walletType}`, + description: `Signer index 1 broadcasts proxy setup (${walletType})`, + walletType, + signerIndex: 1, + mnemonicEnvName: "CI_MNEMONIC_2", + signBroadcast: true, + getTransactionId: () => runtime.setupTransactionId, + setTxHash: (txHash) => { + runtime.setupTxHash = txHash ?? runtime.setupTxHash; + }, + }), + { + id: `v1.proxy.lifecycle.setup.finalize.${walletType}`, + description: `Finalize confirmed proxy setup (${walletType})`, + severity: "critical", + execute: async (ctx) => { + const wallet = getWalletByType(ctx, walletType); + if (!wallet || !runtime.setup) throw new Error("Missing wallet or proxy setup metadata"); + const bot = getDefaultBot(ctx); + const token = await authenticateBot({ ctx, bot }); + const setupTxHash = requireSetupTxHash(runtime); + if (runtime.setupUtxoRefs?.length && runtime.setupTransactionId) { + await pollUntilUtxosConsumed({ ctx, walletId: wallet.walletId, token, address: bot.paymentAddress, spentUtxoRefs: runtime.setupUtxoRefs }); + } + const response = await requestJson<{ proxy?: ProxyRow; error?: string }>({ + url: `${ctx.apiBaseUrl}/api/v1/proxySetupFinalize`, + method: "POST", + token, + body: { + walletId: wallet.walletId, + address: bot.paymentAddress, + txHash: setupTxHash, + ...runtime.setup, + description: `CI proxy setup (${walletType})`, + }, + retries: 3, + }); + if (response.status !== 201 || !response.data?.proxy?.id) { + throw new Error(`proxySetupFinalize failed (${response.status}): ${stringifyRedacted(response.data)}`); + } + runtime.proxyId = response.data.proxy.id; + return { message: `proxySetupFinalize created proxy ${runtime.proxyId}`, artifacts: { proxy: response.data.proxy } }; + }, + }, + { + id: `v1.proxy.lifecycle.proxies.active.${walletType}`, + description: `Assert finalized proxy is listed (${walletType})`, + severity: "critical", + execute: async (ctx) => { + const wallet = getWalletByType(ctx, walletType); + if (!wallet || !runtime.proxyId) throw new Error("Missing wallet or proxy id"); + const bot = getDefaultBot(ctx); + const token = await authenticateBot({ ctx, bot }); + const response = await requestJson({ + url: `${ctx.apiBaseUrl}/api/v1/proxies?walletId=${encodeURIComponent(wallet.walletId)}&address=${encodeURIComponent(bot.paymentAddress)}`, + method: "GET", + token, + }); + if (response.status !== 200 || !Array.isArray(response.data) || !response.data.some((proxy) => proxy.id === runtime.proxyId)) { + throw new Error(`proxies did not include finalized proxy (${response.status}): ${stringifyRedacted(response.data)}`); + } + return { message: `proxies includes active proxy ${runtime.proxyId}`, artifacts: { proxyId: runtime.proxyId } }; + }, + }, + ]; +} + +function createProxyActionStep(args: { + id: string; + description: string; + walletType: CIWalletType; + endpoint: "proxySpend" | "proxyDRepCertificate" | "proxyVote" | "proxyCleanup"; + runtime: { + setup?: ProxySetup; + proxyId?: string; + activeProposals?: ActiveProposal[]; + actionTransactionId?: string; + actionTxHash?: string; + actionUtxoRefs?: UtxoRef[]; + cleanupPhase?: "sweep" | "burn"; + cleanupBurnSkipped?: boolean; + cleanupBurnTransactionId?: string; + }; + buildBody: (ctx: CIBootstrapContext, refs: ProxyActionRequestRefs) => Record | null; + selectRefs?: (args: { walletUtxos: ScriptUtxo[]; collateralUtxos: ScriptUtxo[]; authTokenId: string }) => ProxyActionSelection; + includeAllAuthTokens?: boolean; + shouldSkip?: () => boolean; + onSkip?: () => void; + onSuccess?: () => void; + beforeResolveRefs?: (ctx: CIBootstrapContext) => Promise; +}): RouteStep { + return { + id: args.id, + description: args.description, + severity: "critical", + execute: async (ctx) => { + const wallet = getWalletByType(ctx, args.walletType); + if (!wallet || !args.runtime.proxyId || !args.runtime.setup) throw new Error("Missing proxy lifecycle runtime"); + if (args.shouldSkip?.()) { + args.onSkip?.(); + return { message: `${args.endpoint} skipped`, artifacts: { skipped: true } }; + } + const bot = getDefaultBot(ctx); + const token = await authenticateBot({ ctx, bot }); + await args.beforeResolveRefs?.(ctx); + const [walletUtxos, collateralUtxos] = await Promise.all([ + fetchFreeUtxos({ ctx, walletId: wallet.walletId, token, address: bot.paymentAddress, fresh: true }), + fetchKeyAddressUtxos({ ctx, address: bot.paymentAddress }), + ]); + const selection = + args.selectRefs?.({ walletUtxos, collateralUtxos, authTokenId: args.runtime.setup.authTokenId }) ?? + selectAuthTokenRefs({ + walletUtxos, + collateralUtxos, + authTokenId: args.runtime.setup.authTokenId, + includeAllAuthTokens: args.includeAllAuthTokens, + }); + const { requestRefs, selectionArtifacts } = splitProxyActionSelection(selection); + args.runtime.actionTransactionId = undefined; + args.runtime.actionTxHash = undefined; + args.runtime.actionUtxoRefs = undefined; + const extraBody = args.buildBody(ctx, requestRefs); + if (!extraBody) { + return { message: `${args.endpoint} skipped`, artifacts: { skipped: true } }; + } + const response = await requestJson({ + url: `${ctx.apiBaseUrl}/api/v1/${args.endpoint}`, + method: "POST", + token, + body: { + walletId: wallet.walletId, + address: bot.paymentAddress, + proxyId: args.runtime.proxyId, + ...requestRefs, + ...extraBody, + }, + }); + if (response.status !== 201) { + throw new Error(`${args.endpoint} failed (${response.status}): ${stringifyRedacted(response.data)}`); + } + args.runtime.actionTransactionId = getTransactionId(response.data); + args.runtime.actionTxHash = getSubmittedTxHash(response.data); + args.runtime.actionUtxoRefs = requestRefs.utxoRefs; + if (args.endpoint === "proxyCleanup") { + args.runtime.cleanupPhase = getCleanupPhase(response.data); + } + args.onSuccess?.(); + const hasSelectionArtifacts = Object.keys(selectionArtifacts).length > 0; + return { + message: `${args.endpoint} transaction created`, + artifacts: { + transactionId: args.runtime.actionTransactionId, + txHash: args.runtime.actionTxHash, + cleanupPhase: args.runtime.cleanupPhase, + collateralRef: requestRefs.collateralRef, + collateralOwnerSignerIndex: PROXY_LIFECYCLE_COLLATERAL_SIGNER_INDEX, + signerIndexes: [...PROXY_LIFECYCLE_SIGNER_INDEXES], + ...(hasSelectionArtifacts ? { selectionArtifacts } : {}), + }, + }; + }, + }; +} + +function createActionSigningSteps(args: { + prefix: string; + walletType: CIWalletType; + runtime: { actionTransactionId?: string; actionTxHash?: string }; + shouldSkip?: () => boolean; +}): RouteStep[] { + return [ + createSignStep({ + id: `${args.prefix}.signer0`, + description: `${args.prefix} signer index 0 collateral witness`, + walletType: args.walletType, + signerIndex: 0, + mnemonicEnvName: "CI_MNEMONIC_1", + signBroadcast: false, + getTransactionId: () => args.runtime.actionTransactionId, + shouldSkip: args.shouldSkip, + }), + createSignStep({ + id: `${args.prefix}.signer1`, + description: `${args.prefix} signer index 1 broadcast`, + walletType: args.walletType, + signerIndex: 1, + mnemonicEnvName: "CI_MNEMONIC_2", + signBroadcast: true, + getTransactionId: () => args.runtime.actionTransactionId, + shouldSkip: args.shouldSkip, + setTxHash: (txHash) => { + args.runtime.actionTxHash = txHash ?? args.runtime.actionTxHash; + }, + }), + ]; +} + +function createWaitForActionConfirmationStep(args: { + id: string; + description: string; + walletType: CIWalletType; + runtime: { actionTransactionId?: string; actionUtxoRefs?: UtxoRef[] }; + shouldSkip?: () => boolean; +}): RouteStep { + return { + id: args.id, + description: args.description, + severity: "critical", + execute: async (ctx) => { + if (args.shouldSkip?.() || shouldSkipActionConfirmation(args.runtime)) { + return { message: "Confirmation wait skipped", artifacts: { skipped: true } }; + } + const wallet = getWalletByType(ctx, args.walletType); + if (!wallet) throw new Error(`Missing ${args.walletType} wallet`); + const bot = getDefaultBot(ctx); + const token = await authenticateBot({ ctx, bot }); + const result = await pollUntilUtxosConsumed({ + ctx, + walletId: wallet.walletId, + token, + address: bot.paymentAddress, + spentUtxoRefs: args.runtime.actionUtxoRefs!, + }); + return { + message: `Confirmed proxy action inputs consumed after ${result.attempts} attempt(s)`, + artifacts: { + transactionId: args.runtime.actionTransactionId, + attempts: result.attempts, + }, + }; + }, + }; +} + +function createProxyFullLifecycleHygieneStep(walletType: CIWalletType): RouteStep { + return { + id: `v1.proxy.full.hygiene.${walletType}`, + description: "Clean stale active proxy lifecycle rows before starting", + severity: "critical", + execute: async (ctx) => runProxyFullLifecycleHygiene({ ctx, walletType }), + }; +} + +function createProxyFullLifecycleChainRecoveryStep(walletType: CIWalletType): RouteStep { + return { + id: `v1.proxy.full.recoverFromChain.${walletType}`, + description: "Recover stale proxy rows from on-chain CI wallet evidence", + severity: "critical", + execute: async (ctx) => { + const result = await recoverProxyRowsFromChainForWalletType({ ctx, walletType }); + return { + message: result.recovered.length + ? `recovered ${result.recovered.length} proxy row(s) from chain for ${walletType}` + : `no proxy rows recovered from chain for ${walletType}`, + artifacts: normalizeJsonArtifact(result) as Record, + }; + }, + }; +} + +function createProxyFullLifecycleAdoptionStep(walletType: CIWalletType): RouteStep { + return { + id: `v1.proxy.full.adoptOrphans.${walletType}`, + description: "Adopt stale proxy rows from historical deterministic CI wallets", + severity: "critical", + execute: async (ctx) => { + const result = await adoptProxyOrphansForWalletType({ ctx, walletType }); + return { + message: result.adopted.length + ? `adopted ${result.adopted.length} orphan proxy row(s) for ${walletType}` + : `no orphan proxy rows adopted for ${walletType}`, + artifacts: normalizeJsonArtifact(result) as Record, + }; + }, + }; +} + +function createProxyFullLifecycleSteps(walletType: CIWalletType): RouteStep[] { + const runtime: { + setup?: ProxySetup; + proxyId?: string; + setupTransactionId?: string; + setupTxHash?: string; + setupUtxoRefs?: UtxoRef[]; + actionTransactionId?: string; + actionTxHash?: string; + actionUtxoRefs?: UtxoRef[]; + activeProposals?: ActiveProposal[]; + attemptedVote?: boolean; + cleanupPhase?: "sweep" | "burn"; + cleanupBurnSkipped?: boolean; + cleanupBurnTransactionId?: string; + } = {}; + + return [ + createProxyFullLifecycleChainRecoveryStep(walletType), + createProxyFullLifecycleAdoptionStep(walletType), + createProxyFullLifecycleHygieneStep(walletType), + { + id: `v1.proxy.full.utxoShape.${walletType}`, + description: "Ensure proxy full-lifecycle wallet has separate setup and collateral UTxOs", + severity: "critical", + execute: async (runCtx) => { + const result = await ensureProxyLifecycleUtxoShape({ ctx: runCtx, walletType }); + return { + message: + result.status === "already-shaped" + ? `proxy full lifecycle UTxO shape already satisfied for ${walletType}` + : `proxy full lifecycle UTxO self-split confirmed for ${walletType}`, + artifacts: result as unknown as Record, + }; + }, + }, + { + id: `v1.proxy.full.preflight.${walletType}`, + description: "Verify proxy full-lifecycle ADA budget and UTxO shape", + severity: "critical", + execute: async (runCtx) => { + const wallet = getWalletByType(runCtx, walletType); + if (!wallet) throw new Error(`Missing ${walletType} wallet`); + const bot = getDefaultBot(runCtx); + const token = await authenticateBot({ ctx: runCtx, bot }); + const [walletUtxos, collateralUtxos] = await Promise.all([ + fetchFreeUtxos({ + ctx: runCtx, + walletId: wallet.walletId, + token, + address: bot.paymentAddress, + fresh: true, + }), + fetchKeyAddressUtxos({ ctx: runCtx, address: bot.paymentAddress }), + ]); + const result = assertProxyFullLifecyclePreflight({ + walletUtxos, + collateralUtxos, + }); + return { + message: `proxy full lifecycle preflight passed with ${formatAda(result.totalLovelace)} available and ${formatAda(result.requiredTotalLovelace)} required`, + artifacts: { + totalLovelace: result.totalLovelace.toString(), + largestUtxoLovelace: result.largestUtxoLovelace.toString(), + setupCandidates: result.setupCandidates, + keyCollateralCandidates: result.keyCollateralCandidates, + drepSelectableLovelace: result.drepSelectableLovelace.toString(), + drepRequiredLovelace: result.drepRequiredLovelace.toString(), + requiredTotalLovelace: result.requiredTotalLovelace.toString(), + }, + }; + }, + }, + ...createSetupLifecycleSteps({ walletType, runtime }), + createProxyActionStep({ + id: `v1.proxy.full.spend.propose.${walletType}`, + description: "Build proxy spend transaction", + walletType, + endpoint: "proxySpend", + runtime, + buildBody: (runCtx) => ({ + outputs: [{ address: getWalletByType(runCtx, walletType)?.walletAddress ?? "", unit: "lovelace", amount: PROXY_SPEND_LOVELACE.toString() }], + description: "CI proxy spend", + }), + }), + ...createActionSigningSteps({ prefix: `v1.proxy.full.spend.${walletType}`, walletType, runtime }), + createWaitForActionConfirmationStep({ + id: `v1.proxy.full.spend.confirmed.${walletType}`, + description: "Wait for proxy spend inputs to be confirmed consumed", + walletType, + runtime, + }), + createProxyActionStep({ + id: `v1.proxy.full.drepRegister.propose.${walletType}`, + description: "Build proxy DRep register transaction", + walletType, + endpoint: "proxyDRepCertificate", + runtime, + selectRefs: ({ walletUtxos, collateralUtxos, authTokenId }) => { + return selectDRepRegisterRefs({ + walletUtxos, + collateralUtxos, + authTokenId, + requiredLovelace: DREP_REGISTER_REQUIRED_LOVELACE + FULL_LIFECYCLE_FEE_BUFFER_LOVELACE, + }); + }, + buildBody: () => ({ + action: "register", + anchorUrl: getProxyDRepAnchorUrl(), + anchorJson: { name: "CI Proxy DRep", purpose: "route-chain" }, + description: "CI proxy DRep register", + }), + }), + ...createActionSigningSteps({ prefix: `v1.proxy.full.drepRegister.${walletType}`, walletType, runtime }), + createWaitForActionConfirmationStep({ + id: `v1.proxy.full.drepRegister.confirmed.${walletType}`, + description: "Wait for proxy DRep register inputs to be confirmed consumed", + walletType, + runtime, + }), + { + id: `v1.proxy.full.activeProposals.${walletType}`, + description: "Fetch active proposals for optional proxy vote", + severity: "critical", + execute: async (runCtx) => { + const bot = getDefaultBot(runCtx); + const token = await authenticateBot({ ctx: runCtx, bot }); + const response = await requestJson<{ proposals?: unknown[]; activeCount?: number; sourceCount?: number; error?: string }>({ + url: `${runCtx.apiBaseUrl}/api/v1/governanceActiveProposals?network=0&count=20&page=1&order=desc&details=false`, + method: "GET", + token, + }); + if (response.status !== 200) { + throw new Error(`governanceActiveProposals failed (${response.status}): ${stringifyRedacted(response.data)}`); + } + runtime.activeProposals = getDeterministicActiveProposals(response.data, 1); + return { + message: `selected ${runtime.activeProposals.length} active proposal(s) for optional proxy vote`, + artifacts: { selectedProposalIds: runtime.activeProposals.map((proposal) => proposal.proposalId) }, + }; + }, + }, + createProxyActionStep({ + id: `v1.proxy.full.vote.propose.${walletType}`, + description: "Build proxy vote transaction when proposals exist", + walletType, + endpoint: "proxyVote", + runtime, + selectRefs: ({ walletUtxos, collateralUtxos, authTokenId }) => + selectAuthTokenRefsWithMinLovelace({ + walletUtxos, + collateralUtxos, + authTokenId, + requiredLovelace: PROXY_ACTION_REQUIRED_LOVELACE + PROXY_ACTION_FEE_BUFFER_LOVELACE, + context: "proxy vote", + }), + buildBody: () => { + const proposal = runtime.activeProposals?.[0]; + if (!proposal) return null; + runtime.attemptedVote = true; + return { + votes: [{ proposalId: proposal.proposalId, voteKind: "Abstain" }], + description: "CI proxy vote", + }; + }, + }), + ...createActionSigningSteps({ prefix: `v1.proxy.full.vote.${walletType}`, walletType, runtime }), + createWaitForActionConfirmationStep({ + id: `v1.proxy.full.vote.confirmed.${walletType}`, + description: "Wait for proxy vote inputs to be confirmed consumed", + walletType, + runtime, + }), + createProxyActionStep({ + id: `v1.proxy.full.drepDeregister.propose.${walletType}`, + description: "Build proxy DRep deregister transaction", + walletType, + endpoint: "proxyDRepCertificate", + runtime, + selectRefs: ({ walletUtxos, collateralUtxos, authTokenId }) => + selectAuthTokenRefsWithMinLovelace({ + walletUtxos, + collateralUtxos, + authTokenId, + requiredLovelace: PROXY_ACTION_REQUIRED_LOVELACE + PROXY_ACTION_FEE_BUFFER_LOVELACE, + context: "proxy DRep deregister", + }), + buildBody: () => ({ + action: "deregister", + description: "CI proxy DRep deregister", + }), + }), + ...createActionSigningSteps({ prefix: `v1.proxy.full.drepDeregister.${walletType}`, walletType, runtime }), + createWaitForActionConfirmationStep({ + id: `v1.proxy.full.drepDeregister.confirmed.${walletType}`, + description: "Wait for proxy DRep deregister inputs to be confirmed consumed", + walletType, + runtime, + }), + createProxyActionStep({ + id: `v1.proxy.full.cleanup.initial.propose.${walletType}`, + description: "Build initial proxy cleanup transaction", + walletType, + endpoint: "proxyCleanup", + runtime, + includeAllAuthTokens: true, + buildBody: () => ({ + deactivateProxy: true, + description: "CI proxy cleanup", + }), + }), + ...createActionSigningSteps({ prefix: `v1.proxy.full.cleanup.initial.${walletType}`, walletType, runtime }), + createWaitForActionConfirmationStep({ + id: `v1.proxy.full.cleanup.initial.confirmed.${walletType}`, + description: "Wait for initial proxy cleanup inputs to be confirmed consumed", + walletType, + runtime, + }), + createProxyActionStep({ + id: `v1.proxy.full.cleanup.burn.propose.${walletType}`, + description: "Build proxy cleanup burn transaction after sweep", + walletType, + endpoint: "proxyCleanup", + runtime, + includeAllAuthTokens: true, + shouldSkip: () => shouldSkipCleanupBurnPropose(runtime), + onSkip: () => { + runtime.cleanupBurnSkipped = true; + runtime.cleanupBurnTransactionId = undefined; + }, + onSuccess: () => { + runtime.cleanupBurnSkipped = false; + runtime.cleanupBurnTransactionId = runtime.actionTransactionId; + }, + buildBody: () => ({ + deactivateProxy: true, + description: "CI proxy cleanup burn", + }), + }), + ...createActionSigningSteps({ + prefix: `v1.proxy.full.cleanup.burn.${walletType}`, + walletType, + runtime, + shouldSkip: () => shouldSkipCleanupBurnSigning(runtime), + }), + createWaitForActionConfirmationStep({ + id: `v1.proxy.full.cleanup.burn.confirmed.${walletType}`, + description: "Wait for proxy cleanup burn inputs to be confirmed consumed", + walletType, + runtime, + shouldSkip: () => shouldSkipCleanupBurnSigning(runtime), + }), + { + id: `v1.proxy.full.cleanup.finalize.${walletType}`, + description: "Finalize proxy cleanup and deactivate proxy", + severity: "critical", + execute: async (runCtx) => { + const wallet = getWalletByType(runCtx, walletType); + if (!wallet || !runtime.proxyId) throw new Error("Missing wallet or proxy id for cleanup finalize"); + const bot = getDefaultBot(runCtx); + const token = await authenticateBot({ ctx: runCtx, bot }); + if (runtime.actionUtxoRefs?.length && runtime.actionTransactionId) { + await pollUntilUtxosConsumed({ ctx: runCtx, walletId: wallet.walletId, token, address: bot.paymentAddress, spentUtxoRefs: runtime.actionUtxoRefs }); + } + const response = await requestJson<{ proxy?: ProxyRow; error?: string }>({ + url: `${runCtx.apiBaseUrl}/api/v1/proxyCleanupFinalize`, + method: "POST", + token, + body: { + walletId: wallet.walletId, + address: bot.paymentAddress, + proxyId: runtime.proxyId, + txHash: runtime.actionTxHash ?? runtime.actionTransactionId ?? "submitted", + }, + retries: 3, + }); + if (response.status !== 201 || response.data?.proxy?.isActive !== false) { + throw new Error(`proxyCleanupFinalize failed (${response.status}): ${stringifyRedacted(response.data)}`); + } + return { message: `proxy ${runtime.proxyId} deactivated after cleanup`, artifacts: { proxy: response.data.proxy } }; + }, + }, + { + id: `v1.proxy.full.cleanup.proxies.inactive.${walletType}`, + description: "Assert cleaned proxy is no longer listed as active", + severity: "critical", + execute: async (runCtx) => { + const wallet = getWalletByType(runCtx, walletType); + if (!wallet || !runtime.proxyId) throw new Error("Missing wallet or proxy id after cleanup"); + const bot = getDefaultBot(runCtx); + const token = await authenticateBot({ ctx: runCtx, bot }); + const response = await requestJson({ + url: `${runCtx.apiBaseUrl}/api/v1/proxies?walletId=${encodeURIComponent(wallet.walletId)}&address=${encodeURIComponent(bot.paymentAddress)}`, + method: "GET", + token, + }); + if (response.status !== 200 || !Array.isArray(response.data)) { + throw new Error(`proxies list failed after cleanup (${response.status}): ${stringifyRedacted(response.data)}`); + } + if (response.data.some((proxy) => proxy.id === runtime.proxyId)) { + throw new Error(`cleaned proxy ${runtime.proxyId} is still listed as active`); + } + return { message: `proxy ${runtime.proxyId} is no longer listed as active` }; + }, + }, + ]; +} + +export function createScenarioProxyFullLifecycle(ctx: CIBootstrapContext): Scenario { + const eligibleWalletTypes = PROXY_FULL_LIFECYCLE_WALLET_TYPES.filter( + (walletType) => + ctx.walletTypes.includes(walletType) && + ctx.wallets.some((wallet) => wallet.type === walletType), + ); + + const steps: RouteStep[] = eligibleWalletTypes.length + ? eligibleWalletTypes.flatMap((walletType) => createProxyFullLifecycleSteps(walletType)) + : [ + { + id: "v1.proxy.full.precondition", + description: "Assert proxy full lifecycle has an eligible wallet type", + severity: "critical", + execute: async () => { + throw new Error( + `scenario.proxy-full-lifecycle requires at least one of ${PROXY_FULL_LIFECYCLE_WALLET_TYPES.join(", ")} in CI_WALLET_TYPES`, + ); + }, + }, + ]; + + return { + id: "scenario.proxy-full-lifecycle", + description: "Proxy spend, governance, and cleanup lifecycle for legacy, hierarchical, and SDK wallets", + steps, + }; +} diff --git a/scripts/ci/scenarios/steps/template-route-step.ts b/scripts/ci/scenarios/steps/template-route-step.ts new file mode 100644 index 00000000..00374812 --- /dev/null +++ b/scripts/ci/scenarios/steps/template-route-step.ts @@ -0,0 +1,51 @@ +import type { CIBootstrapContext, RouteStep, StepRunResult } from "../../framework/types"; +import { requestJson } from "../../framework/http"; +import { getDefaultBot } from "../../framework/botContext"; +import { authenticateBot } from "../../framework/botAuth"; +import { stringifyRedacted } from "../../framework/redact"; + +/** + * Copy this file when adding a new route step. + * + * Suggested flow: + * 1) Rename the exported factory function. + * 2) Replace `id` and `description` with route-specific values. + * 3) Define deterministic inputs from context/env. + * 4) Perform request(s) with requestJson(). + * 5) Add strict assertions and return concise artifacts. + */ +export function createTemplateRouteStep(): RouteStep { + return { + id: "template.route.step", + description: "Template step - replace with real route behavior", + severity: "critical", + execute: async (ctx: CIBootstrapContext): Promise => { + const bot = getDefaultBot(ctx); + const token = await authenticateBot({ ctx, bot }); + const wallet = ctx.wallets[0]; + if (!wallet) { + throw new Error("No wallets available in CI bootstrap context"); + } + + const response = await requestJson({ + url: `${ctx.apiBaseUrl}/api/v1/pendingTransactions?walletId=${encodeURIComponent(wallet.walletId)}&address=${encodeURIComponent(bot.paymentAddress)}`, + method: "GET", + token, + }); + + if (response.status !== 200) { + throw new Error( + `Template step expected 200, got ${response.status}: ${stringifyRedacted(response.data)}`, + ); + } + + return { + message: "Template route step passed", + artifacts: { + walletId: wallet.walletId, + status: response.status, + }, + }; + }, + }; +} diff --git a/scripts/ci/scenarios/steps/transferRing.ts b/scripts/ci/scenarios/steps/transferRing.ts new file mode 100644 index 00000000..17b00f8b --- /dev/null +++ b/scripts/ci/scenarios/steps/transferRing.ts @@ -0,0 +1,243 @@ +import type { CIBootstrapContext, CIWalletType, RouteStep, Scenario } from "../../framework/types"; +import { requestJson } from "../../framework/http"; +import { runSigningFlow } from "../flows/signingFlow"; +import { seedRealTransferTransaction } from "../flows/transferFlow"; +import { getDefaultBot } from "../../framework/botContext"; +import { authenticateBot } from "../../framework/botAuth"; +import { stringifyRedacted } from "../../framework/redact"; +import { boolFromEnv } from "../../framework/env"; + +export type TransferLegRuntime = { + fromWalletType: CIWalletType; + toWalletType: CIWalletType; + fromWalletId?: string; + transferTxId?: string; +}; + +function createSigningStep(args: { + id: string; + description: string; + signerIndex: number; + mnemonicEnvName: "CI_MNEMONIC_1" | "CI_MNEMONIC_2" | "CI_MNEMONIC_3"; + signWalletType?: string; + signBroadcast: boolean; + requireBroadcastSuccess: boolean; + preferredTransactionId?: () => string | undefined; +}): RouteStep { + return { + id: args.id, + description: args.description, + severity: "critical", + execute: async (ctx) => { + const mnemonic = process.env[args.mnemonicEnvName]; + if (!mnemonic || !mnemonic.trim()) { + throw new Error(`${args.mnemonicEnvName} is required for signing scenario`); + } + const result = await runSigningFlow({ + ctx, + mnemonic, + signWalletType: args.signWalletType ?? process.env.CI_SIGN_WALLET_TYPE ?? "legacy", + signerIndex: args.signerIndex, + signerLabel: `signer${args.signerIndex}`, + signBroadcast: args.signBroadcast && boolFromEnv(process.env.SIGN_BROADCAST, true), + preferredTransactionId: args.preferredTransactionId?.(), + requireBroadcastSuccess: args.requireBroadcastSuccess, + }); + return { + message: `signTransaction completed for ${result.walletType} (status=${result.status}, submitted=${String(result.submitted)})`, + artifacts: result as unknown as Record, + }; + }, + }; +} + +export function createScenarioRealTransferAndSign(runtime: { transferLegs: TransferLegRuntime[] }): Scenario { + return { + id: "scenario.real-transfer-and-sign", + description: "Build ring transfer txs across multisig wallets and sign+broadcast each leg", + steps: runtime.transferLegs.flatMap((leg, index) => { + const legName = `${leg.fromWalletType}To${leg.toWalletType}`; + const legOrdinal = index + 1; + return [ + { + id: `v1.addTransaction.realTransfer.${legName}`, + description: `Create ring leg ${legOrdinal} transfer (${leg.fromWalletType} -> ${leg.toWalletType})`, + severity: "critical" as const, + execute: async (ctx: CIBootstrapContext) => { + const mnemonic = process.env.CI_MNEMONIC_2; + if (!mnemonic || !mnemonic.trim()) { + throw new Error("CI_MNEMONIC_2 is required for transfer scenario"); + } + const transferResult = await seedRealTransferTransaction({ + ctx, + fromMnemonic: mnemonic, + fromWalletType: leg.fromWalletType, + toWalletType: leg.toWalletType, + transferLovelace: process.env.CI_TRANSFER_LOVELACE, + }); + leg.transferTxId = transferResult.transactionId; + leg.fromWalletId = transferResult.fromWalletId; + return { + message: `Real transfer tx created (${transferResult.transactionId}) for ${leg.fromWalletType} -> ${leg.toWalletType}`, + artifacts: transferResult as unknown as Record, + }; + }, + }, + { + id: `v1.pendingTransactions.ringTransfer.present.${legName}`, + description: `Assert ring leg ${legOrdinal} transaction is pending in source wallet`, + severity: "critical" as const, + execute: async (ctx: CIBootstrapContext) => { + const txId = leg.transferTxId; + const walletId = leg.fromWalletId; + if (!txId || !walletId) { + throw new Error(`Transfer runtime context missing for ring leg ${legName}`); + } + const bot = getDefaultBot(ctx); + const token = await authenticateBot({ ctx, bot }); + const response = await requestJson | { error?: string }>({ + url: `${ctx.apiBaseUrl}/api/v1/pendingTransactions?walletId=${encodeURIComponent(walletId)}&address=${encodeURIComponent(bot.paymentAddress)}`, + method: "GET", + token, + }); + if (response.status !== 200 || !Array.isArray(response.data)) { + throw new Error( + `pendingTransactions ring leg present check failed (${response.status}): ${stringifyRedacted(response.data)}`, + ); + } + const found = response.data.some((tx) => tx.id === txId); + if (!found) { + throw new Error(`Transfer tx ${txId} not found in pending for wallet ${walletId}`); + } + return { + message: `Transfer tx ${txId} is present in pending transactions`, + artifacts: { walletId, transactionId: txId, pendingCount: response.data.length }, + }; + }, + }, + createSigningStep({ + id: `v1.signTransaction.ringTransfer.signer1.${legName}`, + description: `Signer 1 adds witness without broadcast for ring leg ${legOrdinal}`, + signerIndex: 1, + mnemonicEnvName: "CI_MNEMONIC_2", + signWalletType: leg.fromWalletType, + signBroadcast: false, + requireBroadcastSuccess: false, + preferredTransactionId: () => leg.transferTxId, + }), + createSigningStep({ + id: `v1.signTransaction.ringTransfer.signer2.${legName}`, + description: `Signer 2 signs and broadcasts ring leg ${legOrdinal}`, + signerIndex: 2, + mnemonicEnvName: "CI_MNEMONIC_3", + signWalletType: leg.fromWalletType, + signBroadcast: true, + requireBroadcastSuccess: true, + preferredTransactionId: () => leg.transferTxId, + }), + { + id: `v1.pendingTransactions.ringTransfer.removed.${legName}`, + description: `Assert ring leg ${legOrdinal} transaction is cleared from pending`, + severity: "critical" as const, + execute: async (ctx: CIBootstrapContext) => { + const txId = leg.transferTxId; + const walletId = leg.fromWalletId; + if (!txId || !walletId) { + throw new Error(`Transfer runtime context missing for ring leg ${legName}`); + } + const bot = getDefaultBot(ctx); + const token = await authenticateBot({ ctx, bot }); + const response = await requestJson | { error?: string }>({ + url: `${ctx.apiBaseUrl}/api/v1/pendingTransactions?walletId=${encodeURIComponent(walletId)}&address=${encodeURIComponent(bot.paymentAddress)}`, + method: "GET", + token, + }); + if (response.status !== 200 || !Array.isArray(response.data)) { + throw new Error( + `pendingTransactions ring leg removed check failed (${response.status}): ${stringifyRedacted(response.data)}`, + ); + } + const stillPending = response.data.some((tx) => tx.id === txId); + if (stillPending) { + throw new Error(`Transfer tx ${txId} is still pending after sign+broadcast`); + } + return { + message: `Transfer tx ${txId} removed from pending transactions`, + artifacts: { walletId, transactionId: txId, pendingCount: response.data.length }, + }; + }, + }, + ]; + }), + }; +} + +export function createScenarioFinalAssertions(runtime: { transferLegs: TransferLegRuntime[] }): Scenario { + return { + id: "scenario.final-assertions", + description: "Validate final state after transfer/sign route chain", + steps: [ + { + id: "v1.pendingTransactions.allRingTransfersRemoved", + description: "Assert all signed ring transfer transactions are no longer pending", + severity: "critical", + execute: async (ctx) => { + const bot = getDefaultBot(ctx); + const token = await authenticateBot({ ctx, bot }); + const checked: Array<{ walletId: string; transactionId: string; pendingCount: number }> = []; + for (const leg of runtime.transferLegs) { + const txId = leg.transferTxId; + const walletId = leg.fromWalletId; + if (!txId || !walletId) { + throw new Error( + `Transfer runtime context missing transaction/wallet id for ${leg.fromWalletType} -> ${leg.toWalletType}`, + ); + } + const response = await requestJson | { error?: string }>({ + url: `${ctx.apiBaseUrl}/api/v1/pendingTransactions?walletId=${encodeURIComponent(walletId)}&address=${encodeURIComponent(bot.paymentAddress)}`, + method: "GET", + token, + }); + if (response.status !== 200 || !Array.isArray(response.data)) { + throw new Error( + `pendingTransactions final assertion failed (${response.status}): ${stringifyRedacted(response.data)}`, + ); + } + const stillPending = response.data.some((tx) => tx.id === txId); + if (stillPending) { + throw new Error(`Transfer tx ${txId} is still pending after sign+broadcast`); + } + checked.push({ walletId, transactionId: txId, pendingCount: response.data.length }); + } + return { + message: `All ${checked.length} ring transfer txs are no longer present in pending transactions`, + artifacts: { checked }, + }; + }, + }, + { + id: "v1.walletIds.postTransfer", + description: "Assert wallet discovery remains consistent after transfer flow", + severity: "non-critical", + execute: async (ctx) => { + const bot = getDefaultBot(ctx); + const token = await authenticateBot({ ctx, bot }); + const response = await requestJson | { error?: string }>({ + url: `${ctx.apiBaseUrl}/api/v1/walletIds?address=${encodeURIComponent(bot.paymentAddress)}`, + method: "GET", + token, + }); + if (response.status !== 200 || !Array.isArray(response.data)) { + throw new Error( + `walletIds post-transfer failed (${response.status}): ${stringifyRedacted(response.data)}`, + ); + } + return { + message: `walletIds remains healthy after transfer (${response.data.length} wallets)`, + artifacts: { walletCount: response.data.length }, + }; + }, + }, + ], + }; +} diff --git a/scripts/ci/scenarios/steps/walletLifecycle.ts b/scripts/ci/scenarios/steps/walletLifecycle.ts new file mode 100644 index 00000000..4781844f --- /dev/null +++ b/scripts/ci/scenarios/steps/walletLifecycle.ts @@ -0,0 +1,129 @@ +import type { CIBootstrapContext, Scenario } from "../../framework/types"; +import { requestJson } from "../../framework/http"; +import { getDefaultBot } from "../../framework/botContext"; +import { authenticateBot } from "../../framework/botAuth"; +import { stringifyRedacted } from "../../framework/redact"; + +export function createScenarioCreateWallet(ctx: CIBootstrapContext): Scenario { + const runtime: { createdWalletId?: string } = {}; + return { + id: "scenario.create-wallet", + description: "Verify POST /api/v1/createWallet creates a wallet via the bot API", + steps: [ + { + id: "v1.createWallet.botCreate", + description: "Create a new multisig wallet via /api/v1/createWallet (bot-authenticated)", + severity: "critical", + execute: async (runCtx) => { + const bot = getDefaultBot(runCtx); + const token = await authenticateBot({ ctx: runCtx, bot }); + const signerAddresses = runCtx.signerAddresses.slice(0, 3).filter(Boolean); + if (signerAddresses.length < 1) { + throw new Error("createWallet: no signer addresses in bootstrap context"); + } + const numRequiredSigners = Math.min( + parseInt(process.env.CI_NUM_REQUIRED_SIGNERS ?? "2", 10), + signerAddresses.length, + ); + const response = await requestJson<{ + walletId?: string; + address?: string; + name?: string; + error?: string; + }>({ + url: `${runCtx.apiBaseUrl}/api/v1/createWallet`, + method: "POST", + token, + body: { + name: `CI create-wallet ${runCtx.createdAt}`, + signersAddresses: signerAddresses, + numRequiredSigners, + scriptType: "atLeast", + network: runCtx.networkId, + }, + }); + if (response.status !== 201) { + throw new Error( + `createWallet expected 201, got ${response.status}: ${stringifyRedacted(response.data)}`, + ); + } + if (typeof response.data.walletId !== "string" || !response.data.walletId) { + throw new Error("createWallet: response missing walletId"); + } + if (typeof response.data.address !== "string" || !response.data.address) { + throw new Error("createWallet: response missing address"); + } + runtime.createdWalletId = response.data.walletId; + return { + message: `createWallet succeeded: walletId=${response.data.walletId}`, + artifacts: { + walletId: response.data.walletId, + address: response.data.address, + name: response.data.name, + }, + }; + }, + }, + { + id: "v1.createWallet.appearsInWalletIds", + description: "Confirm created wallet appears in /api/v1/walletIds for the bot", + severity: "critical", + execute: async (runCtx) => { + if (!runtime.createdWalletId) { + throw new Error("createWallet.appearsInWalletIds: no walletId from prior step"); + } + const bot = getDefaultBot(runCtx); + const token = await authenticateBot({ ctx: runCtx, bot }); + const response = await requestJson | { error?: string }>({ + url: `${runCtx.apiBaseUrl}/api/v1/walletIds?address=${encodeURIComponent(bot.paymentAddress)}`, + method: "GET", + token, + }); + if (response.status !== 200 || !Array.isArray(response.data)) { + throw new Error( + `walletIds check after createWallet failed (${response.status}): ${stringifyRedacted(response.data)}`, + ); + } + const found = response.data.some( + (w) => w.walletId === runtime.createdWalletId, + ); + if (!found) { + throw new Error( + `createWallet: walletId ${runtime.createdWalletId} not found in walletIds after creation`, + ); + } + return { + message: `Created wallet ${runtime.createdWalletId} confirmed in walletIds`, + artifacts: { + walletId: runtime.createdWalletId, + totalWallets: response.data.length, + }, + }; + }, + }, + { + id: "v1.createWallet.cleanup", + description: "Delete the CI test wallet from the database (WalletBotAccess then Wallet)", + severity: "non-critical", + execute: async () => { + if (!runtime.createdWalletId) { + return { message: "createWallet.cleanup: no walletId to clean up; skipping" }; + } + const { PrismaClient } = await import("@prisma/client"); + const prisma = new PrismaClient(); + try { + // WalletBotAccess has no cascade relation — must be deleted before the Wallet row. + await prisma.walletBotAccess.deleteMany({ where: { walletId: runtime.createdWalletId } }); + await prisma.wallet.delete({ where: { id: runtime.createdWalletId } }); + return { + message: `createWallet cleanup: deleted wallet ${runtime.createdWalletId}`, + artifacts: { walletId: runtime.createdWalletId }, + }; + } finally { + await prisma.$disconnect(); + } + }, + }, + ], + }; +} diff --git a/skills-lock.json b/skills-lock.json new file mode 100644 index 00000000..eae98e16 --- /dev/null +++ b/skills-lock.json @@ -0,0 +1,10 @@ +{ + "version": 1, + "skills": { + "supabase-postgres-best-practices": { + "source": "supabase/agent-skills", + "sourceType": "github", + "computedHash": "9c87c315aed143ee3b34bec8117100f5035e0df09e6b23e1ecc772cff434c9ad" + } + } +} diff --git a/src/__tests__/addTransaction.bot.test.ts b/src/__tests__/addTransaction.bot.test.ts new file mode 100644 index 00000000..484f7cf3 --- /dev/null +++ b/src/__tests__/addTransaction.bot.test.ts @@ -0,0 +1,119 @@ +import { beforeAll, beforeEach, describe, expect, it, jest } from "@jest/globals"; +import type { NextApiRequest, NextApiResponse } from "next"; +import { BOT_TEST_ADDRESS, createMockResponse, makeBearerAuth, makeBotJwtPayload } from "./apiTestUtils"; + +const addCorsHeadersMock = jest.fn<(res: NextApiResponse) => void>(); +const corsMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => Promise>(); +const applyRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => boolean>(); +const applyBotRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse, botId: string) => boolean>(); +const enforceBodySizeMock = jest.fn<(req: NextApiRequest, res: NextApiResponse, maxBytes: number) => boolean>(); +const verifyJwtMock: jest.Mock = jest.fn(); +const isBotJwtMock: jest.Mock = jest.fn(); +const assertBotWalletAccessMock: jest.Mock = jest.fn(); +const createTransactionMock: jest.Mock = jest.fn(); +const transactionFromHexMock: jest.Mock = jest.fn(); + +jest.mock("@/lib/cors", () => ({ + __esModule: true, + addCorsCacheBustingHeaders: addCorsHeadersMock, + cors: corsMock, +}), { virtual: true }); + +jest.mock("@/lib/security/requestGuards", () => ({ + __esModule: true, + applyRateLimit: applyRateLimitMock, + applyBotRateLimit: applyBotRateLimitMock, + enforceBodySize: enforceBodySizeMock, +}), { virtual: true }); + +jest.mock("@/lib/verifyJwt", () => ({ + __esModule: true, + verifyJwt: verifyJwtMock, + isBotJwt: isBotJwtMock, +}), { virtual: true }); + +jest.mock("@/lib/auth/botAccess", () => ({ + __esModule: true, + assertBotWalletAccess: assertBotWalletAccessMock, +}), { virtual: true }); + +jest.mock("@/utils/get-provider", () => ({ + __esModule: true, + getProvider: () => ({ submitTx: jest.fn() }), +}), { virtual: true }); + +jest.mock("@meshsdk/core-csl", () => ({ + __esModule: true, + csl: { + Transaction: { + from_hex: transactionFromHexMock, + }, + }, +}), { virtual: true }); + +jest.mock("@/server/db", () => ({ + __esModule: true, + db: { + transaction: { create: createTransactionMock }, + wallet: { findUnique: jest.fn() }, + }, +}), { virtual: true }); + +let handler: (req: NextApiRequest, res: NextApiResponse) => Promise; + +beforeAll(async () => { + ({ default: handler } = await import("../pages/api/v1/addTransaction")); +}); + +beforeEach(() => { + jest.clearAllMocks(); + applyRateLimitMock.mockReturnValue(true); + applyBotRateLimitMock.mockReturnValue(true); + enforceBodySizeMock.mockReturnValue(true); + corsMock.mockResolvedValue(undefined); + verifyJwtMock.mockReturnValue(makeBotJwtPayload()); + isBotJwtMock.mockReturnValue(true); + transactionFromHexMock.mockReturnValue({}); + (assertBotWalletAccessMock as any).mockResolvedValue({ + wallet: { id: "wallet-1", signersAddresses: [BOT_TEST_ADDRESS], numRequiredSigners: 2, type: "atLeast" }, + role: "cosigner", + }); + (createTransactionMock as any).mockResolvedValue({ id: "tx-1" }); +}); + +describe("addTransaction bot API", () => { + it("returns 403 when bot wallet access fails", async () => { + (assertBotWalletAccessMock as any).mockRejectedValue(new Error("no access")); + const req = { + method: "POST", + headers: makeBearerAuth(), + body: { + walletId: "wallet-1", + address: BOT_TEST_ADDRESS, + txCbor: "deadbeef", + txJson: "{}", + }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + await handler(req, res); + expect(res.status).toHaveBeenCalledWith(403); + }); + + it("creates pending transaction for authorized bot", async () => { + const req = { + method: "POST", + headers: makeBearerAuth(), + body: { + walletId: "wallet-1", + address: BOT_TEST_ADDRESS, + txCbor: "deadbeef", + txJson: "{}", + }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + await handler(req, res); + expect(createTransactionMock).toHaveBeenCalled(); + expect(res.status).toHaveBeenCalledWith(201); + expect(res.json).toHaveBeenCalledWith({ id: "tx-1" }); + }); +}); diff --git a/src/__tests__/addTransaction.test.ts b/src/__tests__/addTransaction.test.ts new file mode 100644 index 00000000..651b05b1 --- /dev/null +++ b/src/__tests__/addTransaction.test.ts @@ -0,0 +1,246 @@ +import { beforeAll, beforeEach, describe, expect, it, jest } from '@jest/globals'; +import type { NextApiRequest, NextApiResponse } from 'next'; + +// --- mocks --------------------------------------------------------------- + +const addCorsCacheBustingHeadersMock = jest.fn<(res: NextApiResponse) => void>(); +const corsMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => Promise>(); + +jest.mock( + '@/lib/cors', + () => ({ + __esModule: true, + addCorsCacheBustingHeaders: addCorsCacheBustingHeadersMock, + cors: corsMock, + }), + { virtual: true }, +); + +const verifyJwtMock = jest.fn<(token: string | undefined) => { address: string } | null>(); +const isBotJwtMock = jest.fn<(payload: unknown) => boolean>(); + +jest.mock( + '@/lib/verifyJwt', + () => ({ + __esModule: true, + verifyJwt: verifyJwtMock, + isBotJwt: isBotJwtMock, + }), + { virtual: true }, +); + +const applyRateLimitMock = jest.fn< + (req: NextApiRequest, res: NextApiResponse, options?: unknown) => boolean +>(); +const applyBotRateLimitMock = jest.fn< + (req: NextApiRequest, res: NextApiResponse, botId: string) => boolean +>(); +const enforceBodySizeMock = jest.fn< + (req: NextApiRequest, res: NextApiResponse, maxBytes: number) => boolean +>(); + +jest.mock( + '@/lib/security/requestGuards', + () => ({ + __esModule: true, + applyRateLimit: applyRateLimitMock, + applyBotRateLimit: applyBotRateLimitMock, + enforceBodySize: enforceBodySizeMock, + }), + { virtual: true }, +); + +const assertBotWalletAccessMock = jest.fn< + (db: unknown, walletId: string, payload: unknown, ...rest: unknown[]) => Promise<{ wallet: unknown }> +>(); + +jest.mock( + '@/lib/auth/botAccess', + () => ({ + __esModule: true, + assertBotWalletAccess: assertBotWalletAccessMock, + }), + { virtual: true }, +); + +const dbTransactionCreateMock = jest.fn<(args: unknown) => Promise>(); +const dbWalletFindUniqueMock = jest.fn<(args: unknown) => Promise>(); + +const dbMock = { + transaction: { create: dbTransactionCreateMock }, + wallet: { findUnique: dbWalletFindUniqueMock }, +}; + +jest.mock( + '@/server/db', + () => ({ + __esModule: true, + db: dbMock, + }), + { virtual: true }, +); + +const getProviderMock = jest.fn<(network: number) => { submitTx: (cbor: string) => unknown }>(); + +jest.mock( + '@/utils/get-provider', + () => ({ + __esModule: true, + getProvider: getProviderMock, + }), + { virtual: true }, +); + +const transactionFromHexMock = jest.fn<(hex: string) => { _parsed: true }>(); + +jest.mock( + '@meshsdk/core-csl', + () => ({ + __esModule: true, + csl: { + Transaction: { from_hex: transactionFromHexMock }, + }, + }), + { virtual: true }, +); + +// --- helpers ------------------------------------------------------------- + +type ResponseMock = NextApiResponse & { statusCode?: number }; + +function createMockResponse(): ResponseMock { + const res = { + statusCode: undefined as number | undefined, + status: jest.fn<(code: number) => NextApiResponse>(), + json: jest.fn<(payload: unknown) => unknown>(), + end: jest.fn<() => void>(), + setHeader: jest.fn<(name: string, value: string) => void>(), + }; + + res.status.mockImplementation((code: number) => { + res.statusCode = code; + return res as unknown as NextApiResponse; + }); + + res.json.mockImplementation((payload: unknown) => payload); + + return res as unknown as ResponseMock; +} + +const VALID_CBOR = '84a3'.padEnd(64, '0'); +const ADDRESS = 'addr_test1qpcallerexample'; +const WALLET_ID = 'wallet-id-1'; +const TOKEN = 'caller-token'; + +function baseBody(overrides: Record = {}) { + return { + walletId: WALLET_ID, + address: ADDRESS, + txCbor: VALID_CBOR, + txJson: JSON.stringify({ outputs: [] }), + description: 'test tx', + ...overrides, + }; +} + +function buildReq(body: Record): NextApiRequest { + return { + method: 'POST', + headers: { authorization: `Bearer ${TOKEN}` }, + body, + } as unknown as NextApiRequest; +} + +let handler: (req: NextApiRequest, res: NextApiResponse) => Promise; + +beforeAll(async () => { + ({ default: handler } = await import('../pages/api/v1/addTransaction')); +}); + +beforeEach(() => { + jest.clearAllMocks(); + + corsMock.mockResolvedValue(undefined); + addCorsCacheBustingHeadersMock.mockImplementation(() => undefined); + applyRateLimitMock.mockReturnValue(true); + applyBotRateLimitMock.mockReturnValue(true); + enforceBodySizeMock.mockReturnValue(true); + verifyJwtMock.mockReturnValue({ address: ADDRESS }); + isBotJwtMock.mockReturnValue(false); + transactionFromHexMock.mockReturnValue({ _parsed: true }); + dbWalletFindUniqueMock.mockResolvedValue({ + id: WALLET_ID, + type: 'atLeast', + numRequiredSigners: 2, + signersAddresses: [ADDRESS], + }); + dbTransactionCreateMock.mockResolvedValue({ id: 'new-tx-id' }); +}); + +// --- tests --------------------------------------------------------------- + +describe('addTransaction API route validation', () => { + it('rejects malformed CBOR with 400 and does not write to the DB', async () => { + transactionFromHexMock.mockImplementation(() => { + throw new Error('cbor deserialization failed'); + }); + + const res = createMockResponse(); + await handler(buildReq(baseBody({ txCbor: 'deadbeef' })), res); + + expect(res.status).toHaveBeenCalledWith(400); + expect(res.json).toHaveBeenCalledWith( + expect.objectContaining({ + error: expect.stringContaining('Invalid transaction CBOR'), + }), + ); + expect(dbTransactionCreateMock).not.toHaveBeenCalled(); + }); + + it('rejects non-string txCbor with 400', async () => { + const res = createMockResponse(); + await handler(buildReq(baseBody({ txCbor: 12345 })), res); + + expect(res.status).toHaveBeenCalledWith(400); + expect(res.json).toHaveBeenCalledWith( + expect.objectContaining({ + error: expect.stringContaining('Invalid txCbor'), + }), + ); + expect(transactionFromHexMock).not.toHaveBeenCalled(); + expect(dbTransactionCreateMock).not.toHaveBeenCalled(); + }); + + it('rejects unparseable txJson string with 400', async () => { + const res = createMockResponse(); + await handler(buildReq(baseBody({ txJson: '{not json' })), res); + + expect(res.status).toHaveBeenCalledWith(400); + expect(res.json).toHaveBeenCalledWith( + expect.objectContaining({ + error: expect.stringContaining('Invalid txJson'), + }), + ); + expect(dbTransactionCreateMock).not.toHaveBeenCalled(); + }); + + it('persists the transaction when CBOR and JSON are both valid', async () => { + const res = createMockResponse(); + await handler(buildReq(baseBody()), res); + + expect(transactionFromHexMock).toHaveBeenCalledWith(VALID_CBOR); + expect(dbTransactionCreateMock).toHaveBeenCalledTimes(1); + expect(res.status).toHaveBeenCalledWith(201); + }); + + it('accepts a txJson that is already an object', async () => { + const res = createMockResponse(); + await handler( + buildReq(baseBody({ txJson: { outputs: [], certificates: [] } })), + res, + ); + + expect(dbTransactionCreateMock).toHaveBeenCalledTimes(1); + expect(res.status).toHaveBeenCalledWith(201); + }); +}); diff --git a/src/__tests__/apiSecurity.test.ts b/src/__tests__/apiSecurity.test.ts index 667c6959..56843542 100644 --- a/src/__tests__/apiSecurity.test.ts +++ b/src/__tests__/apiSecurity.test.ts @@ -67,6 +67,8 @@ describe("wallet router authorization", () => { db: baseDb as any, session: null, sessionAddress: null, + sessionWallets: [], + primaryWallet: null, ip: "3.3.3.3", }); @@ -99,6 +101,8 @@ describe("wallet router authorization", () => { db: baseDb as any, session: { user: { id: "addr1" }, expires: new Date().toISOString() } as any, sessionAddress: "addr1", + sessionWallets: [], + primaryWallet: null, ip: "4.4.4.4", }); @@ -132,6 +136,8 @@ describe("wallet router authorization", () => { db: baseDb as any, session: { user: { id: "addr1" }, expires: new Date().toISOString() } as any, sessionAddress: "addr1", + sessionWallets: [], + primaryWallet: null, ip: "5.5.5.5", }); diff --git a/src/__tests__/apiTestUtils.ts b/src/__tests__/apiTestUtils.ts new file mode 100644 index 00000000..4070cf12 --- /dev/null +++ b/src/__tests__/apiTestUtils.ts @@ -0,0 +1,52 @@ +import { jest } from "@jest/globals"; +import type { NextApiRequest, NextApiResponse } from "next"; + +export type BotJwtPayload = { + address: string; + botId: string; + type: "bot"; +}; + +export const BOT_TEST_ADDRESS = "addr_test1qpbotintegrationfixture000000000000000000000000"; +export const BOT_TEST_ID = "bot-test-id"; + +export function makeBotJwtPayload( + overrides: Partial = {}, +): BotJwtPayload { + return { + address: BOT_TEST_ADDRESS, + botId: BOT_TEST_ID, + type: "bot", + ...overrides, + }; +} + +export type ResponseMock = NextApiResponse & { statusCode?: number }; + +export function createMockResponse(): ResponseMock { + const res = { + statusCode: undefined as number | undefined, + status: jest.fn<(code: number) => NextApiResponse>(), + json: jest.fn<(payload: unknown) => unknown>(), + end: jest.fn<() => void>(), + setHeader: jest.fn<(name: string, value: string) => void>(), + }; + + res.status.mockImplementation((code: number) => { + res.statusCode = code; + return res as unknown as NextApiResponse; + }); + res.json.mockImplementation((payload: unknown) => payload); + + return res as unknown as ResponseMock; +} + +export function makeBearerAuth(token = "bot-token"): Record { + return { authorization: `Bearer ${token}` }; +} + +export function makeApiRequest( + request: Partial, +): NextApiRequest { + return request as NextApiRequest; +} diff --git a/src/__tests__/bot-api-testing.md b/src/__tests__/bot-api-testing.md new file mode 100644 index 00000000..85e45955 --- /dev/null +++ b/src/__tests__/bot-api-testing.md @@ -0,0 +1,111 @@ +# Bot API Testing Guide + +## Bot-Runnable Route Matrix + +| Route | Unit Test File | Happy Path | Auth/Access Failure | +| --- | --- | --- | --- | +| `/api/v1/botAuth` | `src/__tests__/botAuth.test.ts` | token + bot id returned | invalid secret rejected | +| `/api/v1/botMe` | `src/__tests__/botMe.test.ts` | profile payload returned | non-bot token rejected | +| `/api/v1/createWallet` | `src/__tests__/createWallet.bot.test.ts` | wallet created + bot access upserted | invalid signer address rejected | +| `/api/v1/walletIds` | `src/__tests__/walletIds.bot.test.ts` | wallet ids returned | address mismatch rejected | +| `/api/v1/pendingTransactions` | `src/__tests__/pendingTransactions.bot.test.ts` | pending tx list returned | wallet access denied | +| `/api/v1/freeUtxos` | `src/__tests__/freeUtxos.bot.test.ts` | free UTxOs returned | wallet access denied | +| `/api/v1/addTransaction` | `src/__tests__/addTransaction.bot.test.ts` | tx record created | bot wallet access denied | +| `/api/v1/nativeScript` | `src/__tests__/nativeScript.bot.test.ts` | script response returned | address mismatch rejected | +| `/api/v1/governanceActiveProposals` | `src/__tests__/governanceActiveProposals.test.ts` | active proposals returned | missing/invalid token rejected | +| `/api/v1/botBallotsUpsert` | `src/__tests__/botBallotsUpsert.test.ts` | ballot upsert paths covered | input and conflict errors covered | +| `/api/v1/signTransaction` | `src/__tests__/signTransaction.bot.test.ts` | witness recorded for bot cosigner | non-cosigner role rejected | +| `/api/v1/submitDatum` | `src/__tests__/submitDatum.bot.test.ts` | signable datum created | invalid signature rejected | + +## New Bot Route Test Checklist + +- Add a `*.bot.test.ts` file in `src/__tests__/` with the route name. +- Use `createMockResponse()` and bot payload defaults from `src/__tests__/apiTestUtils.ts`. +- Cover at least: + - one success response with expected JSON shape, + - one auth/scope/access failure branch, + - one method/validation branch when route-specific risk is high. +- Keep network and chain helpers mocked; keep route logic and DB interactions under test. + +## Integration Smoke Tests + +- File: `src/__tests__/botApi.integration.test.ts` +- Default behavior: skipped unless `RUN_BOT_API_INTEGRATION=true` +- Purpose: exercise real Prisma DB writes/reads for bot auth, wallet access reads, mutating routes, and one signature-heavy route with mocked signature validator. + +### Required env for integration run + +- `RUN_BOT_API_INTEGRATION=true` +- `DATABASE_URL=` +- `JWT_SECRET=<32+ char secret>` +- `SKIP_ENV_VALIDATION=true` (recommended for test-only runs) + +## PR Workflow: Containers + CI Wallet Smoke + +- Workflow: `.github/workflows/pr-multisig-v1-smoke.yml` +- Triggers: `pull_request` and `workflow_dispatch` (manual test runs) +- Compose stack: `docker-compose.ci.yml` +- CI scripts: + - `scripts/ci/cli/bootstrap.ts` + - `scripts/ci/cli/route-chain.ts` (route-chain runner; filter with `CI_ROUTE_SCENARIOS`) + - `scripts/ci/cli/sign-transaction.ts` (ad-hoc sign helper) + - `scripts/ci/scenarios/manifest.ts` (scenario registry) + +### Required GitHub repository secrets + +- `CI_JWT_SECRET` (32+ chars) +- `CI_MNEMONIC_1` (space-separated words) +- `CI_MNEMONIC_2` (space-separated words) +- `CI_MNEMONIC_3` (space-separated words) +- `CI_BLOCKFROST_PREPROD_API_KEY` (required; transfer and signing scenarios use live preprod data) +- `CI_BLOCKFROST_MAINNET_API_KEY` (optional; only needed if smoke coverage is expanded to mainnet-dependent routes) + +### Runtime flags used by the workflow + +- `CI_NETWORK_ID` (default `0` for preprod/testnet) +- `CI_NUM_REQUIRED_SIGNERS` (default `2`; controls `numRequiredSigners` and hierarchical inner `atLeast.required`) +- `CI_WALLET_TYPES` (default `legacy,hierarchical,sdk`) +- `CI_SIGN_WALLET_TYPE` (which wallet type signing smoke targets: `legacy` | `hierarchical` | `sdk`) +- `SIGN_BROADCAST` (`true`; broadcast is always enabled for CI route-chain signing) +- `CI_TRANSFER_LOVELACE` (optional transfer amount for real-transfer scenario, default `2000000`) +- `CI_ROUTE_SCENARIOS` (optional comma-separated scenario ids for targeted route-chain runs) + +Validation behavior: + +- Invalid values in `CI_WALLET_TYPES` now fail fast (must be `legacy`, `hierarchical`, `sdk`). +- Unknown scenario ids in `CI_ROUTE_SCENARIOS` now fail fast with available ids listed. + +### What phase 1 validates + +- Starts Postgres + app containers on PR. +- Derives signer addresses from the three mnemonic secrets. +- Creates selected wallet types (`legacy`, `hierarchical`, `sdk`) through `/api/v1/createWallet`. +- Uses a nested payment script for `hierarchical` wallets (`all` wrapping `atLeast`) while keeping signer keys payment-only. +- Verifies route-chain health for bot routes (`walletIds`, `pendingTransactions`, `freeUtxos`, `signTransaction`) using shared bootstrap context. +- Executes a real transfer flow: + - build transfer tx via `/api/v1/addTransaction` + - sign and broadcast via `/api/v1/signTransaction` + - assert final state via `/api/v1/pendingTransactions` +- Uploads machine-readable route-chain report artifact from `ci-artifacts/ci-route-chain-report.json`. + +### Built-in route-chain scenarios + +- `scenario.pending-and-discovery` +- `scenario.pending-per-wallet` +- `scenario.ada-route-health` +- `scenario.real-transfer-and-sign` +- `scenario.final-assertions` + +### Add a new v1 route test step + +1. Add a new step module or helper in `scripts/ci/scenarios/`. + - You can start from `scripts/ci/scenarios/steps/template-route-step.ts`. +2. Implement the standard step contract: + - `id` + - `description` + - `execute(ctx)` with deterministic assertions + - optional `artifacts` for failure triage +3. Register the step in `scripts/ci/scenarios/manifest.ts`. +4. Run the route-chain smoke locally/CI and verify step-level report output. + +This keeps wallet bootstrap stable while route coverage grows through small, isolated step additions. diff --git a/src/__tests__/botApi.integration.test.ts b/src/__tests__/botApi.integration.test.ts new file mode 100644 index 00000000..56abceb0 --- /dev/null +++ b/src/__tests__/botApi.integration.test.ts @@ -0,0 +1,176 @@ +import { beforeAll, beforeEach, describe, expect, it, jest } from "@jest/globals"; +import type { NextApiRequest, NextApiResponse } from "next"; +import { randomUUID } from "crypto"; +import { createMockResponse } from "./apiTestUtils"; +import { hashBotKeySecret } from "../lib/auth/botKey"; + +const addCorsHeadersMock = jest.fn<(res: NextApiResponse) => void>(); +const corsMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => Promise>(); + +jest.mock("@/lib/cors", () => ({ + __esModule: true, + addCorsCacheBustingHeaders: addCorsHeadersMock, + cors: corsMock, +}), { virtual: true }); + +jest.mock("@/lib/security/requestGuards", () => ({ + __esModule: true, + applyRateLimit: () => true, + applyBotRateLimit: () => true, + applyStrictRateLimit: () => true, + enforceBodySize: () => true, +}), { virtual: true }); + +jest.mock("@/env", () => ({ + __esModule: true, + env: { + DATABASE_URL: process.env.DATABASE_URL, + NODE_ENV: "test", + }, +}), { virtual: true }); + +jest.mock("@meshsdk/core-cst", () => ({ + __esModule: true, + checkSignature: async () => true, +})); + +const runIntegration = process.env.RUN_BOT_API_INTEGRATION === "true"; +const describeIntegration = runIntegration ? describe : describe.skip; + +let botAuthHandler: (req: NextApiRequest, res: NextApiResponse) => Promise; +let botMeHandler: (req: NextApiRequest, res: NextApiResponse) => Promise; +let addTransactionHandler: (req: NextApiRequest, res: NextApiResponse) => Promise; +let submitDatumHandler: (req: NextApiRequest, res: NextApiResponse) => Promise; +let db: any; + +function firstJsonCall(res: ReturnType): T { + return (res.json as unknown as jest.Mock).mock.calls[0]?.[0] as T; +} + +describeIntegration("bot API integration smoke", () => { + beforeAll(async () => { + ({ db } = await import("../server/db")); + ({ default: botAuthHandler } = await import("../pages/api/v1/botAuth")); + ({ default: botMeHandler } = await import("../pages/api/v1/botMe")); + ({ default: addTransactionHandler } = await import("../pages/api/v1/addTransaction")); + ({ default: submitDatumHandler } = await import("../pages/api/v1/submitDatum")); + }); + + beforeEach(() => { + jest.clearAllMocks(); + corsMock.mockResolvedValue(undefined); + }); + + it("authenticates bot and fetches bot profile", async () => { + const suffix = randomUUID().replace(/-/g, "").slice(0, 12); + const ownerAddress = `owner_${suffix}`; + const paymentAddress = `addr_test1qpbotintegration${suffix}000000000000000000000000`; + const secret = `secret-${suffix}`; + + const botKey = await db.botKey.create({ + data: { + ownerAddress, + name: `bot-${suffix}`, + keyHash: hashBotKeySecret(secret), + scope: JSON.stringify(["multisig:create", "multisig:read", "multisig:sign"]), + }, + }); + + const authReq = { + method: "POST", + body: { botKeyId: botKey.id, secret, paymentAddress }, + } as unknown as NextApiRequest; + const authRes = createMockResponse(); + await botAuthHandler(authReq, authRes); + expect(authRes.status).toHaveBeenCalledWith(200); + const authBody = firstJsonCall<{ token: string; botId: string }>(authRes); + expect(authBody.token).toBeTruthy(); + + const meReq = { + method: "GET", + headers: { authorization: `Bearer ${authBody.token}` }, + query: {}, + } as unknown as NextApiRequest; + const meRes = createMockResponse(); + await botMeHandler(meReq, meRes); + expect(meRes.status).toHaveBeenCalledWith(200); + await db.botUser.deleteMany({ where: { botKeyId: botKey.id } }); + await db.botKey.delete({ where: { id: botKey.id } }); + }); + + it("runs mutating and signature-heavy bot routes against real db", async () => { + const suffix = randomUUID().replace(/-/g, "").slice(0, 12); + const paymentAddress = `addr_test1qpbotintegrationmut${suffix}000000000000000000000`; + const secret = `secret-mut-${suffix}`; + + const botKey = await db.botKey.create({ + data: { + ownerAddress: `owner_mut_${suffix}`, + name: `bot-mut-${suffix}`, + keyHash: hashBotKeySecret(secret), + scope: JSON.stringify(["multisig:read", "multisig:sign"]), + }, + }); + + const wallet = await db.wallet.create({ + data: { + name: `wallet-mut-${suffix}`, + description: null, + signersAddresses: [paymentAddress], + signersStakeKeys: [], + signersDRepKeys: [], + signersDescriptions: [""], + numRequiredSigners: 2, + scriptCbor: "deadbeef", + stakeCredentialHash: null, + type: "atLeast", + ownerAddress: "all", + }, + }); + + const authReq = { + method: "POST", + body: { botKeyId: botKey.id, secret, paymentAddress }, + } as unknown as NextApiRequest; + const authRes = createMockResponse(); + await botAuthHandler(authReq, authRes); + const authBody = firstJsonCall<{ token: string }>(authRes); + + const addReq = { + method: "POST", + headers: { authorization: `Bearer ${authBody.token}` }, + body: { + walletId: wallet.id, + address: paymentAddress, + txCbor: "deadbeef", + txJson: "{\"body\":{}}", + }, + } as unknown as NextApiRequest; + const addRes = createMockResponse(); + await addTransactionHandler(addReq, addRes); + expect(addRes.status).toHaveBeenCalledWith(201); + + const submitReq = { + method: "POST", + headers: { authorization: `Bearer ${authBody.token}`, origin: "https://integration.test" }, + body: { + walletId: wallet.id, + signature: "sig", + key: "key", + address: paymentAddress, + datum: "payload", + callbackUrl: "https://integration.test/callback", + }, + } as unknown as NextApiRequest; + const submitRes = createMockResponse(); + await submitDatumHandler(submitReq, submitRes); + expect(submitRes.status).toHaveBeenCalledWith(201); + + await db.transaction.deleteMany({ where: { walletId: wallet.id } }); + await db.signable.deleteMany({ where: { walletId: wallet.id } }); + await db.walletBotAccess.deleteMany({ where: { walletId: wallet.id } }); + await db.wallet.delete({ where: { id: wallet.id } }); + await db.botUser.deleteMany({ where: { botKeyId: botKey.id } }); + await db.botKey.delete({ where: { id: botKey.id } }); + }); +}); diff --git a/src/__tests__/botAuth.test.ts b/src/__tests__/botAuth.test.ts new file mode 100644 index 00000000..a9cf3122 --- /dev/null +++ b/src/__tests__/botAuth.test.ts @@ -0,0 +1,120 @@ +import { beforeAll, beforeEach, describe, expect, it, jest } from "@jest/globals"; +import type { NextApiRequest, NextApiResponse } from "next"; +import { createMockResponse } from "./apiTestUtils"; + +const addCorsHeadersMock = jest.fn<(res: NextApiResponse) => void>(); +const corsMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => Promise>(); +const applyStrictRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => boolean>(); +const enforceBodySizeMock = jest.fn<(req: NextApiRequest, res: NextApiResponse, maxBytes: number) => boolean>(); +const verifyBotKeySecretMock = jest.fn<(secret: string, hash: string) => boolean>(); +const parseScopeMock = jest.fn<(scope: string) => string[]>(); +const scopeIncludesMock = jest.fn<(scopes: string[], minScope: string) => boolean>(); +const signMock: jest.Mock = jest.fn(); +const findBotKeyMock: jest.Mock = jest.fn(); +const findBotUserByAddressMock: jest.Mock = jest.fn(); +const upsertBotUserMock: jest.Mock = jest.fn(); + +jest.mock("@/lib/cors", () => ({ + __esModule: true, + addCorsCacheBustingHeaders: addCorsHeadersMock, + cors: corsMock, +}), { virtual: true }); + +jest.mock("@/lib/security/requestGuards", () => ({ + __esModule: true, + applyStrictRateLimit: applyStrictRateLimitMock, + enforceBodySize: enforceBodySizeMock, +}), { virtual: true }); + +jest.mock("@/lib/auth/botKey", () => ({ + __esModule: true, + verifyBotKeySecret: verifyBotKeySecretMock, + parseScope: parseScopeMock, + scopeIncludes: scopeIncludesMock, +}), { virtual: true }); + +jest.mock("jsonwebtoken", () => ({ + __esModule: true, + sign: signMock, +})); + +jest.mock("@/server/db", () => ({ + __esModule: true, + db: { + botKey: { findUnique: findBotKeyMock }, + botUser: { + findUnique: findBotUserByAddressMock, + upsert: upsertBotUserMock, + }, + }, +}), { virtual: true }); + +let handler: (req: NextApiRequest, res: NextApiResponse) => Promise; + +beforeAll(async () => { + process.env.JWT_SECRET = "x".repeat(32); + ({ default: handler } = await import("../pages/api/v1/botAuth")); +}); + +beforeEach(() => { + jest.clearAllMocks(); + applyStrictRateLimitMock.mockReturnValue(true); + enforceBodySizeMock.mockReturnValue(true); + corsMock.mockResolvedValue(undefined); + verifyBotKeySecretMock.mockReturnValue(true); + parseScopeMock.mockReturnValue(["multisig:read"]); + scopeIncludesMock.mockReturnValue(true); + signMock.mockReturnValue("signed-jwt"); + (findBotKeyMock as any).mockResolvedValue({ + id: "bot-key-id", + keyHash: "hashed", + scope: JSON.stringify(["multisig:read"]), + }); + (findBotUserByAddressMock as any).mockResolvedValue(null); + (upsertBotUserMock as any).mockResolvedValue({ + id: "bot-user-id", + paymentAddress: "addr_test1qpbot00000000000000000000000000000000000", + }); +}); + +describe("botAuth API", () => { + it("returns 401 for invalid bot secret", async () => { + verifyBotKeySecretMock.mockReturnValue(false); + const req = { + method: "POST", + body: { + botKeyId: "bot-key-id", + secret: "wrong", + paymentAddress: "addr_test1qpbot00000000000000000000000000000000000", + }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + + await handler(req, res); + + expect(res.status).toHaveBeenCalledWith(401); + expect(res.json).toHaveBeenCalledWith({ error: "Invalid bot key" }); + }); + + it("returns token and botId for valid request", async () => { + const req = { + method: "POST", + body: { + botKeyId: "bot-key-id", + secret: "secret", + paymentAddress: "addr_test1qpbot00000000000000000000000000000000000", + }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + + await handler(req, res); + + expect(upsertBotUserMock).toHaveBeenCalled(); + expect(signMock).toHaveBeenCalled(); + expect(res.status).toHaveBeenCalledWith(200); + expect(res.json).toHaveBeenCalledWith({ + token: "signed-jwt", + botId: "bot-user-id", + }); + }); +}); diff --git a/src/__tests__/botBallotsUpsert.test.ts b/src/__tests__/botBallotsUpsert.test.ts index 1d487b77..4c5d202c 100644 --- a/src/__tests__/botBallotsUpsert.test.ts +++ b/src/__tests__/botBallotsUpsert.test.ts @@ -6,22 +6,24 @@ const corsMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => Promise< const applyRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => boolean>(); const applyBotRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse, botId: string) => boolean>(); const enforceBodySizeMock = jest.fn<(req: NextApiRequest, res: NextApiResponse, maxBytes: number) => boolean>(); -const verifyJwtMock = jest.fn(); -const isBotJwtMock = jest.fn(); -const assertBotWalletAccessMock = jest.fn(); -const findBotUserMock = jest.fn(); -const transactionMock = jest.fn(); -const parseScopeMock = jest.fn(); -const scopeIncludesMock = jest.fn(); +const verifyJwtMock = jest.fn<() => unknown>(); +const isBotJwtMock = jest.fn<() => boolean>(); +const assertBotWalletAccessMock = jest.fn<() => Promise>(); +const findBotUserMock = jest.fn<() => Promise>(); +const transactionMock = jest.fn<(cb: (tx: typeof txMock) => Promise) => Promise>(); +const parseScopeMock = jest.fn<(scope: string) => string[]>(); +const scopeIncludesMock = jest.fn<(scopes: string[], required: string) => boolean>(); const isValidChoiceMock = jest.fn(); -const parseProposalIdMock = jest.fn(); +const parseProposalIdMock = jest.fn< + (value: string) => { txHash: string; certIndex: number } +>(); const txMock = { ballot: { - findUnique: jest.fn(), - findMany: jest.fn(), - create: jest.fn(), - updateMany: jest.fn(), + findUnique: jest.fn<() => Promise>(), + findMany: jest.fn<() => Promise>(), + create: jest.fn<() => Promise>(), + updateMany: jest.fn<() => Promise>(), }, }; @@ -132,14 +134,14 @@ beforeEach(() => { corsMock.mockResolvedValue(undefined); verifyJwtMock.mockReturnValue({ address: "addr_test1", botId: "bot-1", type: "bot" }); isBotJwtMock.mockReturnValue(true); - parseScopeMock.mockImplementation((scope: string) => JSON.parse(scope)); - scopeIncludesMock.mockImplementation((scopes: string[], required: string) => + parseScopeMock.mockImplementation((scope) => JSON.parse(scope) as string[]); + scopeIncludesMock.mockImplementation((scopes, required) => scopes.includes(required), ); isValidChoiceMock.mockReturnValue(true); - parseProposalIdMock.mockImplementation((value: string) => { + parseProposalIdMock.mockImplementation((value) => { const [txHash, certIndex] = value.split("#"); - return { txHash, certIndex: Number(certIndex) }; + return { txHash: txHash ?? "", certIndex: Number(certIndex) }; }); findBotUserMock.mockResolvedValue({ id: "bot-1", diff --git a/src/__tests__/botMe.test.ts b/src/__tests__/botMe.test.ts new file mode 100644 index 00000000..9e146990 --- /dev/null +++ b/src/__tests__/botMe.test.ts @@ -0,0 +1,89 @@ +import { beforeAll, beforeEach, describe, expect, it, jest } from "@jest/globals"; +import type { NextApiRequest, NextApiResponse } from "next"; +import { createMockResponse, makeBearerAuth, makeBotJwtPayload } from "./apiTestUtils"; + +const addCorsHeadersMock = jest.fn<(res: NextApiResponse) => void>(); +const corsMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => Promise>(); +const applyRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => boolean>(); +const applyBotRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse, botId: string) => boolean>(); +const verifyJwtMock: jest.Mock = jest.fn(); +const isBotJwtMock: jest.Mock = jest.fn(); +const findBotUserMock: jest.Mock = jest.fn(); + +jest.mock("@/lib/cors", () => ({ + __esModule: true, + addCorsCacheBustingHeaders: addCorsHeadersMock, + cors: corsMock, +}), { virtual: true }); + +jest.mock("@/lib/security/requestGuards", () => ({ + __esModule: true, + applyRateLimit: applyRateLimitMock, + applyBotRateLimit: applyBotRateLimitMock, +}), { virtual: true }); + +jest.mock("@/lib/verifyJwt", () => ({ + __esModule: true, + verifyJwt: verifyJwtMock, + isBotJwt: isBotJwtMock, +}), { virtual: true }); + +jest.mock("@/server/db", () => ({ + __esModule: true, + db: { + botUser: { findUnique: findBotUserMock }, + }, +}), { virtual: true }); + +let handler: (req: NextApiRequest, res: NextApiResponse) => Promise; + +beforeAll(async () => { + ({ default: handler } = await import("../pages/api/v1/botMe")); +}); + +beforeEach(() => { + jest.clearAllMocks(); + applyRateLimitMock.mockReturnValue(true); + applyBotRateLimitMock.mockReturnValue(true); + corsMock.mockResolvedValue(undefined); + verifyJwtMock.mockReturnValue(makeBotJwtPayload()); + isBotJwtMock.mockReturnValue(true); + (findBotUserMock as any).mockResolvedValue({ + id: "bot-test-id", + paymentAddress: "addr_test1qpbot", + displayName: null, + botKey: { ownerAddress: "addr_test1qphuman", name: "My Bot" }, + }); +}); + +describe("botMe API", () => { + it("rejects non-bot tokens", async () => { + isBotJwtMock.mockReturnValue(false); + const req = { + method: "GET", + headers: makeBearerAuth(), + query: {}, + } as unknown as NextApiRequest; + const res = createMockResponse(); + await handler(req, res); + expect(res.status).toHaveBeenCalledWith(403); + }); + + it("returns bot profile for valid bot token", async () => { + const req = { + method: "GET", + headers: makeBearerAuth(), + query: {}, + } as unknown as NextApiRequest; + const res = createMockResponse(); + await handler(req, res); + expect(res.status).toHaveBeenCalledWith(200); + expect(res.json).toHaveBeenCalledWith({ + botId: "bot-test-id", + paymentAddress: "addr_test1qpbot", + displayName: null, + botName: "My Bot", + ownerAddress: "addr_test1qphuman", + }); + }); +}); diff --git a/src/__tests__/ciHttp.test.ts b/src/__tests__/ciHttp.test.ts new file mode 100644 index 00000000..5ad6e4cc --- /dev/null +++ b/src/__tests__/ciHttp.test.ts @@ -0,0 +1,119 @@ +import { afterEach, beforeEach, describe, expect, it, jest } from "@jest/globals"; +import { requestJson } from "../../scripts/ci/framework/http"; + +function jsonResponse(status: number, data: unknown, headers?: HeadersInit): Response { + return new Response(JSON.stringify(data), { status, headers }); +} + +describe("CI requestJson retry policy", () => { + let fetchMock: jest.SpiedFunction; + + beforeEach(() => { + fetchMock = jest.spyOn(globalThis, "fetch"); + }); + + afterEach(() => { + fetchMock.mockRestore(); + }); + + it("retries transient 429 responses", async () => { + fetchMock + .mockResolvedValueOnce(jsonResponse(429, { error: "Too many requests" })) + .mockResolvedValueOnce(jsonResponse(200, { ok: true })); + + const response = await requestJson<{ ok?: boolean }>({ + url: "http://example.test/rate-limited", + retries: 1, + retryDelayMs: 0, + maxRetryDelayMs: 0, + }); + + expect(response).toEqual({ status: 200, data: { ok: true } }); + expect(fetchMock).toHaveBeenCalledTimes(2); + }); + + it("does not retry non-transient validation responses", async () => { + fetchMock.mockResolvedValueOnce(jsonResponse(400, { error: "Bad Request" })); + + const response = await requestJson<{ error?: string }>({ + url: "http://example.test/bad-request", + retries: 3, + retryDelayMs: 0, + maxRetryDelayMs: 0, + }); + + expect(response).toEqual({ status: 400, data: { error: "Bad Request" } }); + expect(fetchMock).toHaveBeenCalledTimes(1); + }); + + it("returns the final transient response after retries are exhausted", async () => { + fetchMock + .mockResolvedValueOnce(jsonResponse(429, { error: "Too many requests" })) + .mockResolvedValueOnce(jsonResponse(429, { error: "Still rate limited" })); + + const response = await requestJson<{ error?: string }>({ + url: "http://example.test/rate-limited", + retries: 1, + retryDelayMs: 0, + maxRetryDelayMs: 0, + }); + + expect(response).toEqual({ status: 429, data: { error: "Still rate limited" } }); + expect(fetchMock).toHaveBeenCalledTimes(2); + }); + + it("does not retry transient responses when retries are disabled", async () => { + fetchMock.mockResolvedValueOnce( + jsonResponse(502, { + error: "Transaction witness recorded, but submission to network failed", + }), + ); + + const response = await requestJson<{ error?: string }>({ + url: "http://example.test/signTransaction", + method: "POST", + body: { transactionId: "tx-1" }, + retries: 0, + }); + + expect(response).toEqual({ + status: 502, + data: { + error: "Transaction witness recorded, but submission to network failed", + }, + }); + expect(fetchMock).toHaveBeenCalledTimes(1); + }); + + it("retries failed fetch attempts", async () => { + fetchMock + .mockRejectedValueOnce(new Error("connection reset")) + .mockResolvedValueOnce(jsonResponse(200, { ok: true })); + + const response = await requestJson<{ ok?: boolean }>({ + url: "http://example.test/flaky", + retries: 1, + retryDelayMs: 0, + maxRetryDelayMs: 0, + }); + + expect(response).toEqual({ status: 200, data: { ok: true } }); + expect(fetchMock).toHaveBeenCalledTimes(2); + }); + + it("rejects BigInt request bodies before fetch retries", async () => { + await expect( + requestJson({ + url: "http://example.test/bigint", + method: "POST", + body: { + nested: { + selectedLovelace: 1n, + }, + }, + }), + ).rejects.toThrow(/non-JSON BigInt at body\.nested\.selectedLovelace/); + + expect(fetchMock).not.toHaveBeenCalled(); + }); +}); diff --git a/src/__tests__/ciScenarioManifest.test.ts b/src/__tests__/ciScenarioManifest.test.ts new file mode 100644 index 00000000..0fae816f --- /dev/null +++ b/src/__tests__/ciScenarioManifest.test.ts @@ -0,0 +1,68 @@ +import { describe, expect, it } from "@jest/globals"; +import { getScenarioManifest, ROUTE_SCENARIO_IDS } from "../../scripts/ci/scenarios/manifest"; +import type { CIBootstrapContext, CIWalletType } from "../../scripts/ci/framework/types"; + +const mkContext = (walletTypes: CIWalletType[]): CIBootstrapContext => ({ + schemaVersion: 3, + createdAt: "2026-04-29T00:00:00.000Z", + apiBaseUrl: "http://localhost:3000", + networkId: 0, + walletTypes, + wallets: walletTypes.map((type) => ({ + type, + walletId: `${type}-wallet-id`, + walletAddress: `addr_test_${type}`, + signerAddresses: ["addr_test_signer_1", "addr_test_signer_2", "addr_test_signer_3"], + })), + bots: [ + { + id: "bot-1", + paymentAddress: "addr_test_signer_1", + botKeyId: "bot-key-1", + botId: "bot-user-1", + }, + ], + defaultBotId: "bot-1", + signerAddresses: ["addr_test_signer_1", "addr_test_signer_2", "addr_test_signer_3"], + signerStakeAddresses: ["stake_test_1", "stake_test_2", "stake_test_3"], +}); + +describe("route-chain scenario manifest", () => { + it("exposes all known scenario ids for filter validation", () => { + expect(ROUTE_SCENARIO_IDS).toContain("scenario.proxy-smoke"); + expect(ROUTE_SCENARIO_IDS).toContain("scenario.real-transfer-and-sign"); + }); + + it("runs create-wallet before request-heavy default-bot scenarios", () => { + const scenarios = getScenarioManifest(mkContext(["legacy", "hierarchical", "sdk"])); + const ids = scenarios.map((scenario) => scenario.id); + + expect(ids.indexOf("scenario.create-wallet")).toBeLessThan( + ids.indexOf("scenario.bot-identity"), + ); + expect(ids.indexOf("scenario.create-wallet")).toBeLessThan( + ids.indexOf("scenario.auth-plane"), + ); + expect(ids.indexOf("scenario.create-wallet")).toBeLessThan( + ids.indexOf("scenario.proxy-smoke"), + ); + }); + + it("builds a proxy-smoke subset without requiring ring-transfer wallets", () => { + const scenarios = getScenarioManifest(mkContext(["legacy"]), ["scenario.proxy-smoke"]); + + expect(scenarios.map((scenario) => scenario.id)).toEqual(["scenario.proxy-smoke"]); + }); + + it("builds a create-wallet subset without running prior auth/proxy scenarios", () => { + const scenarios = getScenarioManifest(mkContext(["legacy"]), ["scenario.create-wallet"]); + + expect(scenarios.map((scenario) => scenario.id)).toEqual(["scenario.create-wallet"]); + }); + + it("still fails clearly when ring transfer is requested without all wallet types", () => { + expect(() => + getScenarioManifest(mkContext(["legacy"]), ["scenario.real-transfer-and-sign"]), + ).toThrow(/Ring transfer scenario requires wallet types/); + }); +}); diff --git a/src/__tests__/ciSigningSelection.test.ts b/src/__tests__/ciSigningSelection.test.ts new file mode 100644 index 00000000..5cf729a1 --- /dev/null +++ b/src/__tests__/ciSigningSelection.test.ts @@ -0,0 +1,56 @@ +import { describe, expect, it } from "@jest/globals"; +import { + SIGN_TRANSACTION_REQUEST_OPTIONS, + selectPendingTransactionForSigning, +} from "../../scripts/ci/scenarios/flows/signingFlow"; + +describe("route-chain pending transaction selection", () => { + it("does not retry signTransaction after a witness may have been recorded", () => { + expect(SIGN_TRANSACTION_REQUEST_OPTIONS).toEqual({ retries: 0 }); + }); + + it("selects the preferred transaction when present", () => { + expect( + selectPendingTransactionForSigning( + [ + { id: "stale", txCbor: "deadbeef" }, + { id: "target", txCbor: "cafebabe" }, + ], + "target", + ), + ).toEqual({ id: "target", txCbor: "cafebabe" }); + }); + + it("fails instead of falling back when the preferred transaction is missing", () => { + expect(() => + selectPendingTransactionForSigning( + [ + { id: "stale", txCbor: "deadbeef" }, + { id: "other", txCbor: "cafebabe" }, + ], + "target", + ), + ).toThrow(/Preferred pending transaction target was not found/); + }); + + it("fails instead of falling back when the preferred transaction has no txCbor", () => { + expect(() => + selectPendingTransactionForSigning( + [ + { id: "target" }, + { id: "other", txCbor: "cafebabe" }, + ], + "target", + ), + ).toThrow(/Preferred pending transaction target does not include txCbor/); + }); + + it("keeps the old first-signable fallback when no preferred id is provided", () => { + expect( + selectPendingTransactionForSigning([ + { id: "empty" }, + { id: "first-signable", txCbor: "deadbeef" }, + ]), + ).toEqual({ id: "first-signable", txCbor: "deadbeef" }); + }); +}); diff --git a/src/__tests__/common.walletType.test.ts b/src/__tests__/common.walletType.test.ts new file mode 100644 index 00000000..8d8333ac --- /dev/null +++ b/src/__tests__/common.walletType.test.ts @@ -0,0 +1,55 @@ +import { describe, expect, it } from "@jest/globals"; +import { getWalletType } from "@/utils/common"; +import { DbWalletWithLegacy } from "@/types/wallet"; + +function makeWallet(overrides: Partial = {}): DbWalletWithLegacy { + return { + signersStakeKeys: [], + signersDRepKeys: [], + rawImportBodies: null, + ...overrides, + } as unknown as DbWalletWithLegacy; +} + +describe("getWalletType", () => { + it("returns summon when raw import multisig body is present", () => { + const wallet = makeWallet({ + rawImportBodies: { + multisig: { + address: "addr_test1...", + }, + }, + signersStakeKeys: ["stake_test1..."], + signersDRepKeys: ["drep_key"], + }); + + expect(getWalletType(wallet)).toBe("summon"); + }); + + it("returns legacy when stake/drep arrays only contain empty values", () => { + const wallet = makeWallet({ + signersStakeKeys: ["", " "], + signersDRepKeys: ["", " "], + }); + + expect(getWalletType(wallet)).toBe("legacy"); + }); + + it("returns sdk when there is at least one non-empty trimmed stake key", () => { + const wallet = makeWallet({ + signersStakeKeys: [" ", "stake_test1uq..."], + signersDRepKeys: ["", " "], + }); + + expect(getWalletType(wallet)).toBe("sdk"); + }); + + it("returns sdk when there is at least one non-empty trimmed drep key", () => { + const wallet = makeWallet({ + signersStakeKeys: ["", " "], + signersDRepKeys: [" ", "drep_key_hash"], + }); + + expect(getWalletType(wallet)).toBe("sdk"); + }); +}); diff --git a/src/__tests__/createPendingMultisigTransaction.test.ts b/src/__tests__/createPendingMultisigTransaction.test.ts new file mode 100644 index 00000000..9c53ce87 --- /dev/null +++ b/src/__tests__/createPendingMultisigTransaction.test.ts @@ -0,0 +1,99 @@ +import { beforeAll, beforeEach, describe, expect, it, jest } from "@jest/globals"; +import type { PrismaClient } from "@prisma/client"; + +const submitTxMock = jest.fn(); + +jest.mock("@/utils/get-provider", () => ({ + __esModule: true, + getProvider: () => ({ submitTx: submitTxMock }), +}), { virtual: true }); + +let createPendingMultisigTransaction: typeof import("@/lib/server/createPendingMultisigTransaction").createPendingMultisigTransaction; + +function makeDb() { + return { + transaction: { + create: jest.fn().mockResolvedValue({ id: "tx-1" }), + }, + } as unknown as PrismaClient; +} + +const baseArgs = { + walletId: "wallet-1", + wallet: { numRequiredSigners: 2, type: "atLeast" }, + proposerAddress: "addr_test_proposer", + txCbor: "tx-cbor", + txJson: { body: "json" }, + description: "test transaction", + network: 0, +}; + +describe("createPendingMultisigTransaction", () => { + beforeAll(async () => { + ({ createPendingMultisigTransaction } = await import("@/lib/server/createPendingMultisigTransaction")); + }); + + beforeEach(() => { + jest.clearAllMocks(); + submitTxMock.mockResolvedValue("submitted-hash"); + }); + + it("defaults pending transactions to signed by the proposer", async () => { + const db = makeDb(); + + await createPendingMultisigTransaction(db, baseArgs); + + expect(db.transaction.create).toHaveBeenCalledWith({ + data: expect.objectContaining({ + walletId: "wallet-1", + signedAddresses: ["addr_test_proposer"], + }), + }); + }); + + it("allows server-built transactions to start with no signed addresses", async () => { + const db = makeDb(); + + await createPendingMultisigTransaction(db, { + ...baseArgs, + initialSignedAddresses: [], + }); + + expect(db.transaction.create).toHaveBeenCalledWith({ + data: expect.objectContaining({ + signedAddresses: [], + }), + }); + }); + + it("keeps one-signer server-built transactions pending until a witness exists", async () => { + const db = makeDb(); + + await createPendingMultisigTransaction(db, { + ...baseArgs, + wallet: { numRequiredSigners: 1, type: "all" }, + initialSignedAddresses: [], + }); + + expect(db.transaction.create).toHaveBeenCalledWith({ + data: expect.objectContaining({ + signedAddresses: [], + }), + }); + expect(submitTxMock).not.toHaveBeenCalled(); + }); + + it("submits single-signer transactions without creating a pending row", async () => { + const db = makeDb(); + + await expect( + createPendingMultisigTransaction(db, { + ...baseArgs, + wallet: { numRequiredSigners: 1, type: "atLeast" }, + }), + ).resolves.toBe("submitted-hash"); + + expect(submitTxMock).toHaveBeenCalledWith("tx-cbor"); + expect(db.transaction.create).not.toHaveBeenCalled(); + }); +}); diff --git a/src/__tests__/createWallet.bot.test.ts b/src/__tests__/createWallet.bot.test.ts new file mode 100644 index 00000000..5611e0ce --- /dev/null +++ b/src/__tests__/createWallet.bot.test.ts @@ -0,0 +1,454 @@ +import { beforeAll, beforeEach, describe, expect, it, jest } from "@jest/globals"; +import type { NextApiRequest, NextApiResponse } from "next"; +import { BotWalletRole } from "@prisma/client"; +import { createMockResponse, makeBearerAuth, makeBotJwtPayload } from "./apiTestUtils"; + +const addCorsHeadersMock = jest.fn<(res: NextApiResponse) => void>(); +const corsMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => Promise>(); +const applyRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => boolean>(); +const applyBotRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse, botId: string, limit?: number) => boolean>(); +const enforceBodySizeMock = jest.fn<(req: NextApiRequest, res: NextApiResponse, maxBytes: number) => boolean>(); +const verifyJwtMock: jest.Mock = jest.fn(); +const isBotJwtMock: jest.Mock = jest.fn(); +const parseScopeMock: jest.Mock = jest.fn(); +const scopeIncludesMock: jest.Mock = jest.fn(); +const resolvePaymentKeyHashMock: jest.Mock = jest.fn(); +const resolveStakeKeyHashMock: jest.Mock = jest.fn(); +const serializeNativeScriptMock: jest.Mock = jest.fn(); +const findBotUserMock: jest.Mock = jest.fn(); +const createWalletMock: jest.Mock = jest.fn(); +const upsertWalletAccessMock: jest.Mock = jest.fn(); +const getScriptMock: jest.Mock = jest.fn(); + +jest.mock("@/lib/cors", () => ({ + __esModule: true, + addCorsCacheBustingHeaders: addCorsHeadersMock, + cors: corsMock, +}), { virtual: true }); + +jest.mock("@/lib/security/requestGuards", () => ({ + __esModule: true, + applyRateLimit: applyRateLimitMock, + applyBotRateLimit: applyBotRateLimitMock, + enforceBodySize: enforceBodySizeMock, +}), { virtual: true }); + +jest.mock("@/lib/verifyJwt", () => ({ + __esModule: true, + verifyJwt: verifyJwtMock, + isBotJwt: isBotJwtMock, +}), { virtual: true }); + +jest.mock("@/lib/auth/botKey", () => ({ + __esModule: true, + parseScope: parseScopeMock, + scopeIncludes: scopeIncludesMock, +}), { virtual: true }); + +jest.mock("@meshsdk/core", () => ({ + __esModule: true, + resolvePaymentKeyHash: resolvePaymentKeyHashMock, + resolveStakeKeyHash: resolveStakeKeyHashMock, + serializeNativeScript: serializeNativeScriptMock, +}), { virtual: true }); + +jest.mock("@/utils/multisigSDK", () => ({ + __esModule: true, + MultisigWallet: class { + getScript() { + return getScriptMock(); + } + }, +}), { virtual: true }); + +jest.mock("@/server/db", () => ({ + __esModule: true, + db: { + botUser: { findUnique: findBotUserMock }, + wallet: { create: createWalletMock }, + walletBotAccess: { upsert: upsertWalletAccessMock }, + }, +}), { virtual: true }); + +let handler: (req: NextApiRequest, res: NextApiResponse) => Promise; + +beforeAll(async () => { + ({ default: handler } = await import("../pages/api/v1/createWallet")); +}); + +beforeEach(() => { + jest.clearAllMocks(); + applyRateLimitMock.mockReturnValue(true); + applyBotRateLimitMock.mockReturnValue(true); + enforceBodySizeMock.mockReturnValue(true); + corsMock.mockResolvedValue(undefined); + verifyJwtMock.mockReturnValue(makeBotJwtPayload()); + isBotJwtMock.mockReturnValue(true); + parseScopeMock.mockReturnValue(["multisig:create", "multisig:read"]); + scopeIncludesMock.mockReturnValue(true); + resolvePaymentKeyHashMock.mockReturnValue("payment-hash"); + resolveStakeKeyHashMock.mockReturnValue("stake-hash"); + serializeNativeScriptMock.mockReturnValue({ + scriptCbor: "explicit-script-cbor", + address: "addr_explicit_script", + }); + getScriptMock.mockReturnValue({ scriptCbor: "script-cbor", address: "addr_wallet_script" }); + (findBotUserMock as any).mockResolvedValue({ id: "bot-test-id", botKey: { scope: JSON.stringify(["multisig:create"]) } }); + (createWalletMock as any).mockResolvedValue({ id: "wallet-1", name: "Bot Wallet" }); + (upsertWalletAccessMock as any).mockResolvedValue({ role: BotWalletRole.cosigner }); +}); + +describe("createWallet bot API", () => { + it("returns 400 for invalid signer address", async () => { + resolvePaymentKeyHashMock.mockImplementation(() => { + throw new Error("bad address"); + }); + const req = { + method: "POST", + headers: makeBearerAuth(), + body: { + name: "Wallet", + signersAddresses: ["invalid"], + }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + await handler(req, res); + expect(res.status).toHaveBeenCalledWith(400); + }); + + it("creates wallet and bot access for valid bot payload", async () => { + const req = { + method: "POST", + headers: makeBearerAuth(), + body: { + name: "Wallet", + signersAddresses: ["addr_test1qpsigner0000000000000000000000000000000000"], + signersDescriptions: ["Signer 1"], + }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + await handler(req, res); + expect(createWalletMock).toHaveBeenCalled(); + expect(upsertWalletAccessMock).toHaveBeenCalledWith(expect.objectContaining({ + create: expect.objectContaining({ role: BotWalletRole.cosigner }), + })); + expect(serializeNativeScriptMock).toHaveBeenCalled(); + expect(getScriptMock).not.toHaveBeenCalled(); + expect(res.status).toHaveBeenCalledWith(201); + expect(res.json).toHaveBeenCalledWith({ + walletId: "wallet-1", + address: "addr_explicit_script", + name: "Bot Wallet", + }); + }); + + it("preserves signer input order for legacy payment script", async () => { + resolvePaymentKeyHashMock + .mockReturnValueOnce("hash-2") + .mockReturnValueOnce("hash-1"); + const req = { + method: "POST", + headers: makeBearerAuth(), + body: { + name: "Wallet", + signersAddresses: [ + "addr_test1qpsigner0000000000000000000000000000000000", + "addr_test1qpsigner1111111111111111111111111111111111", + ], + numRequiredSigners: 2, + }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + + await handler(req, res); + + expect(serializeNativeScriptMock).toHaveBeenCalledWith( + { + type: "atLeast", + required: 2, + scripts: [ + { type: "sig", keyHash: "hash-2" }, + { type: "sig", keyHash: "hash-1" }, + ], + }, + undefined, + 1, + true, + ); + expect(res.status).toHaveBeenCalledWith(201); + }); + + it("creates wallet from explicit payment native script", async () => { + const req = { + method: "POST", + headers: makeBearerAuth(), + body: { + name: "Wallet", + signersAddresses: ["addr_test1qpsigner0000000000000000000000000000000000"], + scriptType: "all", + paymentNativeScript: { + type: "all", + scripts: [ + { + type: "atLeast", + required: 1, + scripts: [{ type: "sig", keyHash: "payment-hash" }], + }, + ], + }, + }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + + await handler(req, res); + + expect(serializeNativeScriptMock).toHaveBeenCalled(); + expect(getScriptMock).not.toHaveBeenCalled(); + expect(createWalletMock).toHaveBeenCalledWith( + expect.objectContaining({ + data: expect.objectContaining({ + scriptCbor: "explicit-script-cbor", + type: "all", + numRequiredSigners: 1, + }), + }), + ); + expect(res.status).toHaveBeenCalledWith(201); + expect(res.json).toHaveBeenCalledWith({ + walletId: "wallet-1", + address: "addr_explicit_script", + name: "Bot Wallet", + }); + }); + + it("derives type=all from explicit payment script root", async () => { + const req = { + method: "POST", + headers: makeBearerAuth(), + body: { + name: "Wallet", + signersAddresses: ["addr_test1qpsigner0000000000000000000000000000000000"], + paymentNativeScript: { + type: "all", + scripts: [ + { + type: "atLeast", + required: 1, + scripts: [{ type: "sig", keyHash: "payment-hash" }], + }, + ], + }, + }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + + await handler(req, res); + + expect(createWalletMock).toHaveBeenCalledWith( + expect.objectContaining({ + data: expect.objectContaining({ + type: "all", + numRequiredSigners: 1, + }), + }), + ); + expect(res.status).toHaveBeenCalledWith(201); + }); + + it("persists the computed threshold for explicit hierarchical scripts", async () => { + resolvePaymentKeyHashMock + .mockReturnValueOnce("hash-1") + .mockReturnValueOnce("hash-2") + .mockReturnValueOnce("hash-3"); + const req = { + method: "POST", + headers: makeBearerAuth(), + body: { + name: "Wallet", + signersAddresses: [ + "addr_test1qpsigner0000000000000000000000000000000000", + "addr_test1qpsigner1111111111111111111111111111111111", + "addr_test1qpsigner2222222222222222222222222222222222", + ], + scriptType: "all", + paymentNativeScript: { + type: "all", + scripts: [ + { + type: "atLeast", + required: 2, + scripts: [ + { type: "sig", keyHash: "hash-1" }, + { type: "sig", keyHash: "hash-2" }, + { type: "sig", keyHash: "hash-3" }, + ], + }, + ], + }, + }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + + await handler(req, res); + + expect(createWalletMock).toHaveBeenCalledWith( + expect.objectContaining({ + data: expect.objectContaining({ + type: "all", + numRequiredSigners: 2, + }), + }), + ); + expect(res.status).toHaveBeenCalledWith(201); + }); + + it("keeps flat all wallets without explicit scripts as all-of-N metadata", async () => { + const req = { + method: "POST", + headers: makeBearerAuth(), + body: { + name: "Wallet", + signersAddresses: [ + "addr_test1qpsigner0000000000000000000000000000000000", + "addr_test1qpsigner1111111111111111111111111111111111", + ], + scriptType: "all", + }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + + await handler(req, res); + + expect(createWalletMock).toHaveBeenCalledWith( + expect.objectContaining({ + data: expect.objectContaining({ + type: "all", + numRequiredSigners: null, + }), + }), + ); + expect(res.status).toHaveBeenCalledWith(201); + }); + + it("accepts explicit hierarchical script with inner any", async () => { + const req = { + method: "POST", + headers: makeBearerAuth(), + body: { + name: "Wallet", + signersAddresses: [ + "addr_test1qpsigner0000000000000000000000000000000000", + "addr_test1qpsigner1111111111111111111111111111111111", + ], + paymentNativeScript: { + type: "all", + scripts: [ + { + type: "any", + scripts: [ + { type: "sig", keyHash: "payment-hash" }, + { type: "sig", keyHash: "payment-hash" }, + ], + }, + ], + }, + }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + + await handler(req, res); + + expect(res.status).toHaveBeenCalledWith(201); + }); + + it("accepts explicit hierarchical script with inner all", async () => { + const req = { + method: "POST", + headers: makeBearerAuth(), + body: { + name: "Wallet", + signersAddresses: ["addr_test1qpsigner0000000000000000000000000000000000"], + paymentNativeScript: { + type: "all", + scripts: [ + { + type: "all", + scripts: [{ type: "sig", keyHash: "payment-hash" }], + }, + ], + }, + }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + + await handler(req, res); + + expect(res.status).toHaveBeenCalledWith(201); + }); + + it("returns 400 for malformed payment native script", async () => { + const req = { + method: "POST", + headers: makeBearerAuth(), + body: { + name: "Wallet", + signersAddresses: ["addr_test1qpsigner0000000000000000000000000000000000"], + paymentNativeScript: { + type: "all", + scripts: [], + }, + }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + + await handler(req, res); + + expect(res.status).toHaveBeenCalledWith(400); + }); + + it("returns 400 when explicit payment native script root is not all", async () => { + const req = { + method: "POST", + headers: makeBearerAuth(), + body: { + name: "Wallet", + signersAddresses: ["addr_test1qpsigner0000000000000000000000000000000000"], + paymentNativeScript: { + type: "atLeast", + required: 1, + scripts: [{ type: "sig", keyHash: "payment-hash" }], + }, + }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + + await handler(req, res); + + expect(res.status).toHaveBeenCalledWith(400); + }); + + it("returns 400 when payment native script hashes do not match signers", async () => { + const req = { + method: "POST", + headers: makeBearerAuth(), + body: { + name: "Wallet", + signersAddresses: ["addr_test1qpsigner0000000000000000000000000000000000"], + paymentNativeScript: { + type: "all", + scripts: [ + { + type: "sig", + keyHash: "other-hash", + }, + ], + }, + }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + + await handler(req, res); + + expect(res.status).toHaveBeenCalledWith(400); + }); +}); diff --git a/src/__tests__/freeUtxos.bot.test.ts b/src/__tests__/freeUtxos.bot.test.ts new file mode 100644 index 00000000..62a7407b --- /dev/null +++ b/src/__tests__/freeUtxos.bot.test.ts @@ -0,0 +1,208 @@ +import { beforeAll, beforeEach, describe, expect, it, jest } from "@jest/globals"; +import type { NextApiRequest, NextApiResponse } from "next"; +import { BOT_TEST_ADDRESS, createMockResponse, makeBearerAuth, makeBotJwtPayload } from "./apiTestUtils"; + +const addCorsHeadersMock = jest.fn<(res: NextApiResponse) => void>(); +const corsMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => Promise>(); +const applyRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => boolean>(); +const applyBotRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse, botId: string) => boolean>(); +const verifyJwtMock: jest.Mock = jest.fn(); +const isBotJwtMock: jest.Mock = jest.fn(); +const getBotWalletAccessMock: jest.Mock = jest.fn(); +const assertBotWalletAccessMock: jest.Mock = jest.fn(); +const findPendingTransactionsMock: jest.Mock = jest.fn(); +const buildMultisigWalletMock: jest.Mock = jest.fn(); +const addressToNetworkMock: jest.Mock = jest.fn(); +const getProviderMock: jest.Mock = jest.fn(); +const cachedFetchAddressUTxOsMock: jest.Mock = jest.fn(); +const serializeNativeScriptMock: jest.Mock = jest.fn(); +const decodeNativeScriptFromCborMock: jest.Mock = jest.fn(); +const decodedToNativeScriptMock: jest.Mock = jest.fn(); + +jest.mock("@/lib/cors", () => ({ + __esModule: true, + addCorsCacheBustingHeaders: addCorsHeadersMock, + cors: corsMock, +}), { virtual: true }); + +jest.mock("@/lib/security/requestGuards", () => ({ + __esModule: true, + applyRateLimit: applyRateLimitMock, + applyBotRateLimit: applyBotRateLimitMock, +}), { virtual: true }); + +jest.mock("@/lib/verifyJwt", () => ({ + __esModule: true, + verifyJwt: verifyJwtMock, + isBotJwt: isBotJwtMock, +}), { virtual: true }); + +jest.mock("@/lib/auth/botAccess", () => ({ + __esModule: true, + getBotWalletAccess: getBotWalletAccessMock, + assertBotWalletAccess: assertBotWalletAccessMock, +}), { virtual: true }); + +jest.mock("@/server/db", () => ({ + __esModule: true, + db: { + transaction: { findMany: findPendingTransactionsMock }, + }, +}), { virtual: true }); + +jest.mock("@/utils/common", () => ({ + __esModule: true, + buildMultisigWallet: buildMultisigWalletMock, +}), { virtual: true }); + +jest.mock("@/utils/multisigSDK", () => ({ + __esModule: true, + addressToNetwork: addressToNetworkMock, +}), { virtual: true }); + +jest.mock("@/utils/get-provider", () => ({ + __esModule: true, + getProvider: getProviderMock, +}), { virtual: true }); + +jest.mock("@/utils/blockchain-cache", () => ({ + __esModule: true, + cachedFetchAddressUTxOs: cachedFetchAddressUTxOsMock, +}), { virtual: true }); + +jest.mock("@/utils/nativeScriptUtils", () => ({ + __esModule: true, + decodeNativeScriptFromCbor: decodeNativeScriptFromCborMock, + decodedToNativeScript: decodedToNativeScriptMock, +}), { virtual: true }); + +jest.mock("@meshsdk/core", () => ({ + __esModule: true, + serializeNativeScript: serializeNativeScriptMock, +}), { virtual: true }); + +jest.mock("@/server/api/root", () => ({ + __esModule: true, + createCaller: () => ({ + transaction: { getPendingTransactions: jest.fn() }, + wallet: { getWallet: jest.fn() }, + }), +}), { virtual: true }); + +jest.mock("@/lib/security/rateLimit", () => ({ + __esModule: true, + getClientIP: () => "127.0.0.1", +}), { virtual: true }); + +let handler: (req: NextApiRequest, res: NextApiResponse) => Promise; + +beforeAll(async () => { + ({ default: handler } = await import("../pages/api/v1/freeUtxos")); +}); + +beforeEach(() => { + jest.clearAllMocks(); + applyRateLimitMock.mockReturnValue(true); + applyBotRateLimitMock.mockReturnValue(true); + corsMock.mockResolvedValue(undefined); + verifyJwtMock.mockReturnValue(makeBotJwtPayload()); + isBotJwtMock.mockReturnValue(true); + (getBotWalletAccessMock as any).mockResolvedValue({ allowed: true, role: "cosigner" }); + (findPendingTransactionsMock as any).mockResolvedValue([]); + (assertBotWalletAccessMock as any).mockResolvedValue({ wallet: { id: "wallet-1" }, role: "cosigner" }); + buildMultisigWalletMock.mockReturnValue({ + getScript: () => ({ address: "addr_test1walletscript" }), + }); + decodeNativeScriptFromCborMock.mockReturnValue({ any: "decoded" }); + decodedToNativeScriptMock.mockReturnValue({ type: "all", scripts: [] }); + serializeNativeScriptMock.mockReturnValue({ address: "addr_test1canonicalwalletscript" }); + addressToNetworkMock.mockReturnValue(0); + getProviderMock.mockReturnValue({ get: jest.fn() }); + (cachedFetchAddressUTxOsMock as any).mockResolvedValue([ + { input: { txHash: "a", outputIndex: 0 } }, + ]); +}); + +describe("freeUtxos bot API", () => { + it("returns 403 when bot lacks wallet access", async () => { + (getBotWalletAccessMock as any).mockResolvedValue({ allowed: false }); + const req = { + method: "GET", + headers: makeBearerAuth(), + query: { walletId: "wallet-1", address: BOT_TEST_ADDRESS }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + await handler(req, res); + expect(res.status).toHaveBeenCalledWith(403); + }); + + it("returns free utxos for authorized bot", async () => { + const req = { + method: "GET", + headers: makeBearerAuth(), + query: { walletId: "wallet-1", address: BOT_TEST_ADDRESS }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + await handler(req, res); + expect(cachedFetchAddressUTxOsMock).toHaveBeenCalled(); + expect(res.status).toHaveBeenCalledWith(200); + expect(res.json).toHaveBeenCalledWith([{ input: { txHash: "a", outputIndex: 0 } }]); + }); + + it("falls back to canonical scriptCbor when multisig wallet is unavailable", async () => { + buildMultisigWalletMock.mockReturnValue(undefined); + (assertBotWalletAccessMock as any).mockResolvedValue({ + wallet: { + id: "wallet-1", + scriptCbor: "canonical-cbor", + signersAddresses: [BOT_TEST_ADDRESS], + stakeCredentialHash: null, + }, + role: "cosigner", + }); + + const req = { + method: "GET", + headers: makeBearerAuth(), + query: { walletId: "wallet-1", address: BOT_TEST_ADDRESS }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + + await handler(req, res); + + expect(decodeNativeScriptFromCborMock).toHaveBeenCalledWith("canonical-cbor"); + expect(serializeNativeScriptMock).toHaveBeenCalled(); + expect(cachedFetchAddressUTxOsMock).toHaveBeenCalled(); + expect(res.status).toHaveBeenCalledWith(200); + }); + + it("returns clear 500 when canonical script fallback cannot decode", async () => { + buildMultisigWalletMock.mockReturnValue(undefined); + decodeNativeScriptFromCborMock.mockImplementation(() => { + throw new Error("invalid canonical cbor"); + }); + (assertBotWalletAccessMock as any).mockResolvedValue({ + wallet: { + id: "wallet-1", + scriptCbor: "broken-cbor", + signersAddresses: [BOT_TEST_ADDRESS], + stakeCredentialHash: null, + }, + role: "cosigner", + }); + + const req = { + method: "GET", + headers: makeBearerAuth(), + query: { walletId: "wallet-1", address: BOT_TEST_ADDRESS }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + + await handler(req, res); + + expect(res.status).toHaveBeenCalledWith(500); + expect(res.json).toHaveBeenCalledWith({ + error: "Wallet script address resolution failed: invalid canonical cbor", + }); + }); +}); diff --git a/src/__tests__/governanceActiveProposals.test.ts b/src/__tests__/governanceActiveProposals.test.ts index 3fdfb895..450b5b0b 100644 --- a/src/__tests__/governanceActiveProposals.test.ts +++ b/src/__tests__/governanceActiveProposals.test.ts @@ -5,12 +5,12 @@ const addCorsCacheBustingHeadersMock = jest.fn<(res: NextApiResponse) => void>() const corsMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => Promise>(); const applyRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => boolean>(); const applyBotRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse, botId: string) => boolean>(); -const verifyJwtMock = jest.fn(); -const isBotJwtMock = jest.fn(); -const findBotUserMock = jest.fn(); -const providerGetMock = jest.fn(); -const parseScopeMock = jest.fn(); -const scopeIncludesMock = jest.fn(); +const verifyJwtMock = jest.fn<() => unknown>(); +const isBotJwtMock = jest.fn<() => boolean>(); +const findBotUserMock = jest.fn<() => Promise>(); +const providerGetMock = jest.fn<(path: string) => Promise>(); +const parseScopeMock = jest.fn<(scope: string) => string[]>(); +const scopeIncludesMock = jest.fn<(scopes: string[], required: string) => boolean>(); const getProposalStatusMock = jest.fn(); jest.mock( @@ -118,8 +118,8 @@ beforeEach(() => { corsMock.mockResolvedValue(undefined); verifyJwtMock.mockReturnValue({ address: "addr_test1", botId: "bot-1", type: "bot" }); isBotJwtMock.mockReturnValue(true); - parseScopeMock.mockImplementation((scope: string) => JSON.parse(scope)); - scopeIncludesMock.mockImplementation((scopes: string[], required: string) => + parseScopeMock.mockImplementation((scope) => JSON.parse(scope) as string[]); + scopeIncludesMock.mockImplementation((scopes, required) => scopes.includes(required), ); getProposalStatusMock.mockImplementation((details: any) => { @@ -146,7 +146,7 @@ describe("governanceActiveProposals API", () => { }); it("returns only active proposals and tolerates metadata 404", async () => { - providerGetMock.mockImplementation(async (path: string) => { + providerGetMock.mockImplementation(async (path) => { if (path.startsWith("governance/proposals?")) { return [ { @@ -214,7 +214,7 @@ describe("governanceActiveProposals API", () => { await handler(req, res); expect(res.status).toHaveBeenCalledWith(200); - const payload = res.json.mock.calls[0]?.[0] as any; + const payload = (res.json as jest.Mock).mock.calls[0]?.[0] as any; expect(Array.isArray(payload.proposals)).toBe(true); expect(payload.proposals).toHaveLength(1); expect(payload.proposals[0]).toMatchObject({ diff --git a/src/__tests__/multisigSDK.test.ts b/src/__tests__/multisigSDK.test.ts index dbef91da..487e8cc9 100644 --- a/src/__tests__/multisigSDK.test.ts +++ b/src/__tests__/multisigSDK.test.ts @@ -39,9 +39,9 @@ describe('MultisigWallet', () => { ]; const testWallet = new MultisigWallet('Test', unsortedKeys); - expect(testWallet.keys[0].keyHash).toBe('aaaa'); - expect(testWallet.keys[1].keyHash).toBe('mmmm'); - expect(testWallet.keys[2].keyHash).toBe('zzzz'); + expect(testWallet.keys[0]!.keyHash).toBe('aaaa'); + expect(testWallet.keys[1]!.keyHash).toBe('mmmm'); + expect(testWallet.keys[2]!.keyHash).toBe('zzzz'); }); it('should filter out invalid keys', () => { @@ -54,7 +54,7 @@ describe('MultisigWallet', () => { const testWallet = new MultisigWallet('Test', keysWithInvalid); expect(testWallet.keys).toHaveLength(1); - expect(testWallet.keys[0].keyHash).toBe(mockKeyHashes.payment1); + expect(testWallet.keys[0]!.keyHash).toBe(mockKeyHashes.payment1); }); it('should use default values when optional parameters are not provided', () => { @@ -86,7 +86,7 @@ describe('MultisigWallet', () => { it('should return drep keys (role 3)', () => { const drepKeys = wallet.getKeysByRole(3); expect(drepKeys).toHaveLength(1); - expect(drepKeys?.[0].role).toBe(3); + expect(drepKeys?.[0]!.role).toBe(3); }); it('should return undefined for non-existent role', () => { diff --git a/src/__tests__/nativeScript.bot.test.ts b/src/__tests__/nativeScript.bot.test.ts new file mode 100644 index 00000000..f0d55bc5 --- /dev/null +++ b/src/__tests__/nativeScript.bot.test.ts @@ -0,0 +1,203 @@ +import { beforeAll, beforeEach, describe, expect, it, jest } from "@jest/globals"; +import type { NextApiRequest, NextApiResponse } from "next"; +import { BOT_TEST_ADDRESS, createMockResponse, makeBearerAuth, makeBotJwtPayload } from "./apiTestUtils"; + +const addCorsHeadersMock = jest.fn<(res: NextApiResponse) => void>(); +const corsMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => Promise>(); +const applyRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => boolean>(); +const verifyJwtMock: jest.Mock = jest.fn(); +const createCallerMock: jest.Mock = jest.fn(); +const buildMultisigWalletMock: jest.Mock = jest.fn(); +const decodeNativeScriptFromCborMock: jest.Mock = jest.fn(); +const decodedToNativeScriptMock: jest.Mock = jest.fn(); + +jest.mock("@/lib/cors", () => ({ + __esModule: true, + addCorsCacheBustingHeaders: addCorsHeadersMock, + cors: corsMock, +}), { virtual: true }); + +jest.mock("@/lib/security/requestGuards", () => ({ + __esModule: true, + applyRateLimit: applyRateLimitMock, +}), { virtual: true }); + +jest.mock("@/lib/verifyJwt", () => ({ + __esModule: true, + verifyJwt: verifyJwtMock, +}), { virtual: true }); + +jest.mock("@/utils/common", () => ({ + __esModule: true, + buildMultisigWallet: buildMultisigWalletMock, +}), { virtual: true }); + +jest.mock("@/server/api/root", () => ({ + __esModule: true, + createCaller: createCallerMock, +}), { virtual: true }); + +jest.mock("@/server/db", () => ({ + __esModule: true, + db: {}, +}), { virtual: true }); + +jest.mock("@/lib/security/rateLimit", () => ({ + __esModule: true, + getClientIP: () => "127.0.0.1", +}), { virtual: true }); + +jest.mock("@/utils/nativeScriptUtils", () => ({ + __esModule: true, + decodeNativeScriptFromCbor: decodeNativeScriptFromCborMock, + decodedToNativeScript: decodedToNativeScriptMock, +}), { virtual: true }); + +let handler: (req: NextApiRequest, res: NextApiResponse) => Promise; + +beforeAll(async () => { + ({ default: handler } = await import("../pages/api/v1/nativeScript")); +}); + +beforeEach(() => { + jest.clearAllMocks(); + applyRateLimitMock.mockReturnValue(true); + corsMock.mockResolvedValue(undefined); + verifyJwtMock.mockReturnValue(makeBotJwtPayload()); + decodeNativeScriptFromCborMock.mockImplementation((cbor) => ({ cbor })); + decodedToNativeScriptMock.mockImplementation((decoded) => ({ + type: "decoded", + cbor: (decoded as { cbor: string }).cbor, + })); + createCallerMock.mockReturnValue({ + wallet: { + getWallet: (jest.fn() as any).mockResolvedValue({ + id: "wallet-1", + scriptCbor: "canonical-payment-cbor", + rawImportBodies: null, + }), + }, + }); + buildMultisigWalletMock.mockReturnValue({ + getAvailableTypes: () => ["payment"], + buildScript: () => ({ type: "all", scripts: [] }), + }); +}); + +describe("nativeScript bot-runnable API", () => { + it("returns 403 when address mismatches jwt address", async () => { + const req = { + method: "GET", + headers: makeBearerAuth(), + query: { walletId: "wallet-1", address: "addr_test1wrong" }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + await handler(req, res); + expect(res.status).toHaveBeenCalledWith(403); + }); + + it("returns native scripts for matching bot address", async () => { + const req = { + method: "GET", + headers: makeBearerAuth(), + query: { walletId: "wallet-1", address: BOT_TEST_ADDRESS }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + await handler(req, res); + expect(res.status).toHaveBeenCalledWith(200); + expect(res.json).toHaveBeenCalledWith([ + { + type: "payment", + script: { type: "decoded", cbor: "canonical-payment-cbor" }, + }, + ]); + expect(buildMultisigWalletMock).not.toHaveBeenCalled(); + }); + + it("returns payment and stake scripts from canonical sources", async () => { + createCallerMock.mockReturnValue({ + wallet: { + getWallet: (jest.fn() as any).mockResolvedValue({ + id: "wallet-1", + scriptCbor: "canonical-payment-cbor", + rawImportBodies: { + multisig: { + stake_script: "canonical-stake-cbor", + }, + }, + }), + }, + }); + + const req = { + method: "GET", + headers: makeBearerAuth(), + query: { walletId: "wallet-1", address: BOT_TEST_ADDRESS }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + + await handler(req, res); + + expect(res.status).toHaveBeenCalledWith(200); + expect(res.json).toHaveBeenCalledWith([ + { + type: "payment", + script: { type: "decoded", cbor: "canonical-payment-cbor" }, + }, + { + type: "stake", + script: { type: "decoded", cbor: "canonical-stake-cbor" }, + }, + ]); + expect(buildMultisigWalletMock).not.toHaveBeenCalled(); + }); + + it("falls back to sdk wallet reconstruction when canonical decode fails", async () => { + decodeNativeScriptFromCborMock.mockImplementation(() => { + throw new Error("decode failed"); + }); + + const req = { + method: "GET", + headers: makeBearerAuth(), + query: { walletId: "wallet-1", address: BOT_TEST_ADDRESS }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + + await handler(req, res); + + expect(buildMultisigWalletMock).toHaveBeenCalledTimes(1); + expect(res.status).toHaveBeenCalledWith(200); + expect(res.json).toHaveBeenCalledWith([ + { type: "payment", script: { type: "all", scripts: [] } }, + ]); + }); + + it("returns 500 when canonical scripts are unavailable and wallet cannot be constructed", async () => { + decodeNativeScriptFromCborMock.mockImplementation(() => { + throw new Error("decode failed"); + }); + createCallerMock.mockReturnValue({ + wallet: { + getWallet: (jest.fn() as any).mockResolvedValue({ + id: "wallet-1", + scriptCbor: "", + rawImportBodies: null, + }), + }, + }); + buildMultisigWalletMock.mockReturnValue(undefined); + + const req = { + method: "GET", + headers: makeBearerAuth(), + query: { walletId: "wallet-1", address: BOT_TEST_ADDRESS }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + + await handler(req, res); + + expect(res.status).toHaveBeenCalledWith(500); + expect(res.json).toHaveBeenCalledWith({ error: "Wallet could not be constructed" }); + }); +}); diff --git a/src/__tests__/normalizePoolId.test.ts b/src/__tests__/normalizePoolId.test.ts new file mode 100644 index 00000000..ae050603 --- /dev/null +++ b/src/__tests__/normalizePoolId.test.ts @@ -0,0 +1,10 @@ +import { describe, expect, it } from "@jest/globals"; +import { resolvePoolId } from "@meshsdk/core"; +import { normalizePoolIdForDelegation } from "@/lib/server/normalizePoolId"; + +describe("normalizePoolIdForDelegation", () => { + it("normalizes 56-char hex", () => { + const hex = "0".repeat(56); + expect(normalizePoolIdForDelegation(hex)).toBe(resolvePoolId(hex)); + }); +}); diff --git a/src/__tests__/pendingTransactions.bot.test.ts b/src/__tests__/pendingTransactions.bot.test.ts new file mode 100644 index 00000000..f2a45483 --- /dev/null +++ b/src/__tests__/pendingTransactions.bot.test.ts @@ -0,0 +1,98 @@ +import { beforeAll, beforeEach, describe, expect, it, jest } from "@jest/globals"; +import type { NextApiRequest, NextApiResponse } from "next"; +import { BOT_TEST_ADDRESS, createMockResponse, makeBearerAuth, makeBotJwtPayload } from "./apiTestUtils"; + +const addCorsHeadersMock = jest.fn<(res: NextApiResponse) => void>(); +const corsMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => Promise>(); +const applyRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => boolean>(); +const applyBotRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse, botId: string) => boolean>(); +const verifyJwtMock: jest.Mock = jest.fn(); +const isBotJwtMock: jest.Mock = jest.fn(); +const getBotWalletAccessMock: jest.Mock = jest.fn(); +const findPendingTransactionsMock: jest.Mock = jest.fn(); + +jest.mock("@/lib/cors", () => ({ + __esModule: true, + addCorsCacheBustingHeaders: addCorsHeadersMock, + cors: corsMock, +}), { virtual: true }); + +jest.mock("@/lib/security/requestGuards", () => ({ + __esModule: true, + applyRateLimit: applyRateLimitMock, + applyBotRateLimit: applyBotRateLimitMock, +}), { virtual: true }); + +jest.mock("@/lib/verifyJwt", () => ({ + __esModule: true, + verifyJwt: verifyJwtMock, + isBotJwt: isBotJwtMock, +}), { virtual: true }); + +jest.mock("@/lib/auth/botAccess", () => ({ + __esModule: true, + getBotWalletAccess: getBotWalletAccessMock, +}), { virtual: true }); + +jest.mock("@/server/db", () => ({ + __esModule: true, + db: { + transaction: { findMany: findPendingTransactionsMock }, + }, +}), { virtual: true }); + +jest.mock("@/server/api/root", () => ({ + __esModule: true, + createCaller: () => ({}), +}), { virtual: true }); + +jest.mock("@/lib/security/rateLimit", () => ({ + __esModule: true, + getClientIP: () => "127.0.0.1", +}), { virtual: true }); + +let handler: (req: NextApiRequest, res: NextApiResponse) => Promise; + +beforeAll(async () => { + ({ default: handler } = await import("../pages/api/v1/pendingTransactions")); +}); + +beforeEach(() => { + jest.clearAllMocks(); + applyRateLimitMock.mockReturnValue(true); + applyBotRateLimitMock.mockReturnValue(true); + corsMock.mockResolvedValue(undefined); + verifyJwtMock.mockReturnValue(makeBotJwtPayload()); + isBotJwtMock.mockReturnValue(true); + (getBotWalletAccessMock as any).mockResolvedValue({ allowed: true, role: "cosigner" }); + (findPendingTransactionsMock as any).mockResolvedValue([{ id: "tx-1" }]); +}); + +describe("pendingTransactions bot API", () => { + it("returns 403 when bot has no wallet access", async () => { + (getBotWalletAccessMock as any).mockResolvedValue({ allowed: false }); + const req = { + method: "GET", + headers: makeBearerAuth(), + query: { walletId: "w1", address: BOT_TEST_ADDRESS }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + await handler(req, res); + expect(res.status).toHaveBeenCalledWith(403); + }); + + it("returns pending transactions when access is allowed", async () => { + const req = { + method: "GET", + headers: makeBearerAuth(), + query: { walletId: "w1", address: BOT_TEST_ADDRESS }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + await handler(req, res); + expect(findPendingTransactionsMock).toHaveBeenCalledWith({ + where: { walletId: "w1", state: 0 }, + }); + expect(res.status).toHaveBeenCalledWith(200); + expect(res.json).toHaveBeenCalledWith([{ id: "tx-1" }]); + }); +}); diff --git a/src/__tests__/proxyAccess.test.ts b/src/__tests__/proxyAccess.test.ts new file mode 100644 index 00000000..611ba223 --- /dev/null +++ b/src/__tests__/proxyAccess.test.ts @@ -0,0 +1,110 @@ +import { beforeEach, describe, expect, it, jest } from "@jest/globals"; +import { BOT_TEST_ADDRESS, BOT_TEST_ID, makeBotJwtPayload } from "./apiTestUtils"; + +const isBotJwtMock: jest.Mock = jest.fn(); +const getBotWalletAccessMock: jest.Mock = jest.fn(); + +jest.mock("@/lib/verifyJwt", () => ({ + __esModule: true, + isBotJwt: isBotJwtMock, +}), { virtual: true }); + +jest.mock("@/lib/auth/botAccess", () => ({ + __esModule: true, + getBotWalletAccess: getBotWalletAccessMock, +}), { virtual: true }); + +const wallet = { + id: "wallet-1", + signersAddresses: [BOT_TEST_ADDRESS], +}; + +function createDb(walletRow: unknown = wallet) { + return { + wallet: { + findUnique: jest.fn(async () => walletRow), + }, + proxy: { + findFirst: jest.fn(), + }, + }; +} + +describe("proxyAccess", () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it("allows observer bots to read proxies", async () => { + const { authorizeProxyReadForV1 } = await import("@/lib/server/proxyAccess"); + const db = createDb(); + isBotJwtMock.mockReturnValue(true); + (getBotWalletAccessMock as any).mockResolvedValue({ + allowed: true, + role: "observer", + }); + + await expect( + authorizeProxyReadForV1({ + db: db as never, + payload: makeBotJwtPayload(), + walletId: "wallet-1", + address: BOT_TEST_ADDRESS, + }), + ).resolves.toEqual({ wallet }); + + expect(getBotWalletAccessMock).toHaveBeenCalledWith( + db, + "wallet-1", + BOT_TEST_ID, + ); + }); + + it("rejects address mismatches before wallet access checks", async () => { + const { authorizeProxyReadForV1 } = await import("@/lib/server/proxyAccess"); + const db = createDb(); + + await expect( + authorizeProxyReadForV1({ + db: db as never, + payload: makeBotJwtPayload({ address: "addr_test_other" }), + walletId: "wallet-1", + address: BOT_TEST_ADDRESS, + }), + ).rejects.toMatchObject({ code: "ADDRESS_MISMATCH" }); + expect(db.wallet.findUnique).not.toHaveBeenCalled(); + }); + + it("allows human signers to read proxies", async () => { + const { authorizeProxyReadForV1 } = await import("@/lib/server/proxyAccess"); + const db = createDb(); + isBotJwtMock.mockReturnValue(false); + + await expect( + authorizeProxyReadForV1({ + db: db as never, + payload: { address: BOT_TEST_ADDRESS } as never, + walletId: "wallet-1", + address: BOT_TEST_ADDRESS, + }), + ).resolves.toEqual({ wallet }); + }); + + it("loads only active proxies for the requested wallet", async () => { + const { loadActiveProxyForWallet } = await import("@/lib/server/proxyAccess"); + const proxy = { id: "proxy-1", isActive: true }; + const db = createDb(); + (db.proxy.findFirst as any).mockResolvedValue(proxy); + + await expect( + loadActiveProxyForWallet({ + db: db as never, + walletId: "wallet-1", + proxyId: "proxy-1", + }), + ).resolves.toBe(proxy); + expect(db.proxy.findFirst).toHaveBeenCalledWith({ + where: { id: "proxy-1", walletId: "wallet-1", isActive: true }, + }); + }); +}); diff --git a/src/__tests__/proxyBotSelection.test.ts b/src/__tests__/proxyBotSelection.test.ts new file mode 100644 index 00000000..f746c6de --- /dev/null +++ b/src/__tests__/proxyBotSelection.test.ts @@ -0,0 +1,204 @@ +import { describe, expect, it } from "@jest/globals"; +import { + DREP_REGISTER_REQUIRED_LOVELACE, + normalizeJsonArtifact, + PROXY_ACTION_FEE_BUFFER_LOVELACE, + PROXY_ACTION_REQUIRED_LOVELACE, + selectAuthTokenRefs, + selectAuthTokenRefsWithMinLovelace, + selectDRepRegisterRefs, + selectSetupRefs, + splitProxyActionSelection, + type ScriptUtxo, +} from "../../scripts/ci/scenarios/steps/proxyBot"; + +const AUTH_TOKEN_ID = "policy.asset"; + +const mkUtxo = ( + lovelace: string, + txHash: string, + outputIndex = 0, + tokenQuantity?: string, + address = "addr_test_wallet", +): ScriptUtxo => ({ + input: { txHash, outputIndex }, + output: { + address, + amount: [ + { unit: "lovelace", quantity: lovelace }, + ...(tokenQuantity ? [{ unit: AUTH_TOKEN_ID, quantity: tokenQuantity }] : []), + ], + }, +}); + +describe("proxy bot UTxO selection", () => { + it("selects setup from wallet UTxOs and collateral from key-address UTxOs", () => { + const refs = selectSetupRefs({ + walletUtxos: [mkUtxo("20000000", "setup")], + collateralUtxos: [mkUtxo("6000000", "collateral", 0, undefined, "addr_test_signer_1")], + }); + + expect(refs.utxoRefs).toEqual([{ txHash: "setup", outputIndex: 0 }]); + expect(refs.collateralRef).toEqual({ txHash: "collateral", outputIndex: 0 }); + }); + + it("rejects setup when only wallet script UTxOs could act as collateral", () => { + expect(() => + selectSetupRefs({ + walletUtxos: [mkUtxo("20000000", "setup"), mkUtxo("6000000", "script-collateral")], + collateralUtxos: [], + }), + ).toThrow( + /bot payment-address collateral UTxO/, + ); + }); + + it("rejects auth-token selection without key-address collateral", () => { + expect(() => + selectAuthTokenRefs({ + walletUtxos: [mkUtxo("6000000", "token", 0, "1")], + collateralUtxos: [], + authTokenId: AUTH_TOKEN_ID, + }), + ).toThrow(/bot payment-address collateral UTxO/); + }); + + it("adds funding inputs for DRep register while keeping collateral separate", () => { + const refs = selectDRepRegisterRefs({ + walletUtxos: [ + mkUtxo("2000000", "token", 0, "1"), + mkUtxo("300000000", "funding-a"), + mkUtxo("230000000", "funding-b"), + ], + collateralUtxos: [mkUtxo("6000000", "collateral", 0, undefined, "addr_test_signer_1")], + authTokenId: AUTH_TOKEN_ID, + requiredLovelace: DREP_REGISTER_REQUIRED_LOVELACE + 20_000_000n, + }); + + expect(refs.utxoRefs).toEqual([ + { txHash: "token", outputIndex: 0 }, + { txHash: "funding-a", outputIndex: 0 }, + { txHash: "funding-b", outputIndex: 0 }, + ]); + expect(refs.collateralRef).toEqual({ txHash: "collateral", outputIndex: 0 }); + expect(refs.selectedLovelace).toBe(532_000_000n); + }); + + it("splits DRep register diagnostics away from JSON request refs", () => { + const selection = selectDRepRegisterRefs({ + walletUtxos: [ + mkUtxo("2000000", "token", 0, "1"), + mkUtxo("300000000", "funding-a"), + mkUtxo("230000000", "funding-b"), + ], + collateralUtxos: [mkUtxo("6000000", "collateral", 0, undefined, "addr_test_signer_1")], + authTokenId: AUTH_TOKEN_ID, + requiredLovelace: DREP_REGISTER_REQUIRED_LOVELACE + 20_000_000n, + }); + + const { requestRefs, selectionArtifacts } = splitProxyActionSelection(selection); + + expect(requestRefs).toEqual({ + utxoRefs: [ + { txHash: "token", outputIndex: 0 }, + { txHash: "funding-a", outputIndex: 0 }, + { txHash: "funding-b", outputIndex: 0 }, + ], + collateralRef: { txHash: "collateral", outputIndex: 0 }, + }); + expect(selectionArtifacts).toEqual({ + selectedLovelace: "532000000", + requiredLovelace: "525000000", + }); + expect(requestRefs).not.toHaveProperty("selectedLovelace"); + expect(JSON.stringify(requestRefs)).not.toContain("532000000"); + }); + + it("adds funding inputs for auth-token actions while keeping collateral separate", () => { + const selection = selectAuthTokenRefsWithMinLovelace({ + walletUtxos: [ + mkUtxo("1200000", "token", 0, "1"), + mkUtxo("2500000", "funding-a"), + mkUtxo("900000", "funding-b"), + ], + collateralUtxos: [mkUtxo("6000000", "collateral", 0, undefined, "addr_test_signer_1")], + authTokenId: AUTH_TOKEN_ID, + requiredLovelace: PROXY_ACTION_REQUIRED_LOVELACE + PROXY_ACTION_FEE_BUFFER_LOVELACE, + context: "proxy vote", + }); + + expect(selection.utxoRefs).toEqual([ + { txHash: "token", outputIndex: 0 }, + { txHash: "funding-a", outputIndex: 0 }, + { txHash: "funding-b", outputIndex: 0 }, + ]); + expect(selection.collateralRef).toEqual({ txHash: "collateral", outputIndex: 0 }); + expect(selection.selectedLovelace).toBe(4_600_000n); + expect(selection.requiredLovelace).toBe(4_000_000n); + }); + + it("splits auth-token action diagnostics away from request refs", () => { + const selection = selectAuthTokenRefsWithMinLovelace({ + walletUtxos: [ + mkUtxo("1200000", "token", 0, "1"), + mkUtxo("3000000", "funding-a"), + ], + collateralUtxos: [mkUtxo("6000000", "collateral", 0, undefined, "addr_test_signer_1")], + authTokenId: AUTH_TOKEN_ID, + requiredLovelace: PROXY_ACTION_REQUIRED_LOVELACE + PROXY_ACTION_FEE_BUFFER_LOVELACE, + context: "proxy vote", + }); + + const { requestRefs, selectionArtifacts } = splitProxyActionSelection(selection); + + expect(requestRefs).toEqual({ + utxoRefs: [ + { txHash: "token", outputIndex: 0 }, + { txHash: "funding-a", outputIndex: 0 }, + ], + collateralRef: { txHash: "collateral", outputIndex: 0 }, + }); + expect(selectionArtifacts).toEqual({ + selectedLovelace: "4200000", + requiredLovelace: "4000000", + }); + expect(requestRefs).not.toHaveProperty("selectedLovelace"); + }); + + it("normalizes nested BigInt artifacts without changing request contracts", () => { + expect( + normalizeJsonArtifact({ + selectedLovelace: 1n, + nested: [{ requiredLovelace: 2n }], + }), + ).toEqual({ + selectedLovelace: "1", + nested: [{ requiredLovelace: "2" }], + }); + }); + + it("rejects DRep register when only token and collateral are available", () => { + expect(() => + selectDRepRegisterRefs({ + walletUtxos: [ + mkUtxo("2000000", "token", 0, "1"), + ], + collateralUtxos: [mkUtxo("6000000", "collateral", 0, undefined, "addr_test_signer_1")], + authTokenId: AUTH_TOKEN_ID, + requiredLovelace: DREP_REGISTER_REQUIRED_LOVELACE, + }), + ).toThrow(/requires 505 ADA in selected wallet inputs/); + }); + + it("rejects auth-token min-lovelace actions when selected wallet inputs are too small", () => { + expect(() => + selectAuthTokenRefsWithMinLovelace({ + walletUtxos: [mkUtxo("1200000", "token", 0, "1")], + collateralUtxos: [mkUtxo("6000000", "collateral", 0, undefined, "addr_test_signer_1")], + authTokenId: AUTH_TOKEN_ID, + requiredLovelace: PROXY_ACTION_REQUIRED_LOVELACE + PROXY_ACTION_FEE_BUFFER_LOVELACE, + context: "proxy vote", + }), + ).toThrow(/proxy vote requires 4 ADA in selected wallet inputs/); + }); +}); diff --git a/src/__tests__/proxyCiChainRecovery.test.ts b/src/__tests__/proxyCiChainRecovery.test.ts new file mode 100644 index 00000000..45cd0b07 --- /dev/null +++ b/src/__tests__/proxyCiChainRecovery.test.ts @@ -0,0 +1,294 @@ +import { describe, expect, it, jest } from "@jest/globals"; +import { recoverProxyRowsFromChainForWalletType } from "../../scripts/ci/scenarios/proxyChainRecovery"; +import type { CIBootstrapContext } from "../../scripts/ci/framework/types"; +import { deriveProxyScripts } from "../lib/server/proxyTxBuilders"; +import type { UtxoRef } from "../lib/server/proxyUtxos"; + +type TestProxyRow = { + id: string; + walletId: string | null; + proxyAddress: string; + authTokenId: string; + paramUtxo: string; + isActive: boolean; +}; + +const walletAddress = "addr_test_wallet"; +const paramUtxo: UtxoRef = { + txHash: "a".repeat(64), + outputIndex: 0, +}; +const derivedProxy = deriveProxyScripts({ paramUtxo, network: 0 }); + +function createContext(): CIBootstrapContext { + return { + schemaVersion: 3, + createdAt: "2026-04-30T00:00:00.000Z", + apiBaseUrl: "http://localhost:3000", + networkId: 0, + walletTypes: ["legacy"], + wallets: [ + { + type: "legacy", + walletId: "current-wallet", + walletAddress, + signerAddresses: ["addr_test_signer_1", "addr_test_signer_2"], + }, + ], + bots: [], + signerAddresses: ["addr_test_signer_1", "addr_test_signer_2"], + signerStakeAddresses: [], + }; +} + +function createProxyRow(overrides: Partial = {}): TestProxyRow { + return { + id: "proxy-1", + walletId: "old-wallet", + proxyAddress: derivedProxy.proxyAddress, + authTokenId: derivedProxy.authTokenId, + paramUtxo: JSON.stringify(paramUtxo), + isActive: false, + ...overrides, + }; +} + +function createDb(args: { proxies?: TestProxyRow[] } = {}) { + const proxies = [...(args.proxies ?? [])]; + const creates: unknown[] = []; + const updates: unknown[] = []; + type TestDb = { + wallet: { + findUnique: ReturnType; + }; + proxy: { + findFirst: ReturnType; + create: ReturnType; + update: ReturnType; + }; + $transaction: ReturnType; + }; + const db: TestDb = { + wallet: { + findUnique: jest.fn(async ({ where }: { where: { id: string } }) => + where.id === "current-wallet" ? { id: "current-wallet" } : null, + ), + }, + proxy: { + findFirst: jest.fn(async ({ where }: { where: { authTokenId: string } }) => + proxies.find((proxy) => proxy.authTokenId === where.authTokenId) ?? null, + ), + create: jest.fn(async ({ data }: { data: Omit & { description: string } }) => { + creates.push(data); + const row: TestProxyRow = { id: `proxy-${creates.length}`, ...data }; + proxies.push(row); + return row; + }), + update: jest.fn(async ({ where, data }: { where: { id: string }; data: { walletId: string; isActive: true } }) => { + updates.push({ where, data }); + const row = proxies.find((proxy) => proxy.id === where.id); + if (!row) throw new Error(`missing proxy ${where.id}`); + row.walletId = data.walletId; + row.isActive = data.isActive; + return row; + }), + }, + $transaction: jest.fn(async (fn: (tx: TestDb) => Promise) => fn(db)), + }; + return { db, creates, updates, proxies }; +} + +function createProvider(args: { + walletAssets: string[]; + histories?: Record>; + txInputs?: Record>; + txErrors?: string[]; +}) { + return { + fetchAddressUTxOs: jest.fn(async (address: string) => [ + { + input: { txHash: "b".repeat(64), outputIndex: 0 }, + output: { + address, + amount: + address === walletAddress + ? [ + { unit: "lovelace", quantity: "2000000" }, + ...args.walletAssets.map((unit) => ({ unit, quantity: "1" })), + ] + : [{ unit: "lovelace", quantity: "2000000" }], + }, + }, + ]), + get: jest.fn(async (path: string) => { + const assetMatch = path.match(/^\/assets\/([^/]+)\/history/); + if (assetMatch) { + const assetUnit = decodeURIComponent(assetMatch[1]!); + return args.histories?.[assetUnit] ?? []; + } + + const txMatch = path.match(/^\/txs\/([^/]+)\/utxos$/); + if (txMatch) { + const txHash = decodeURIComponent(txMatch[1]!); + if (args.txErrors?.includes(txHash)) { + throw new Error(`tx lookup failed for ${txHash}`); + } + return { inputs: args.txInputs?.[txHash] ?? [] }; + } + + throw new Error(`unexpected path ${path}`); + }), + }; +} + +describe("CI proxy chain recovery", () => { + it("recovers a missing row when a wallet asset matches a mint transaction input", async () => { + const { db, creates, updates } = createDb(); + const provider = createProvider({ + walletAssets: [derivedProxy.authTokenId], + histories: { [derivedProxy.authTokenId]: [{ tx_hash: "mint-tx", action: "minted" }] }, + txInputs: { "mint-tx": [{ tx_hash: paramUtxo.txHash, output_index: paramUtxo.outputIndex }] }, + }); + + const result = await recoverProxyRowsFromChainForWalletType({ + ctx: createContext(), + walletType: "legacy", + db, + provider, + }); + + expect(result.recovered).toEqual([ + expect.objectContaining({ + proxyId: "proxy-1", + action: "created", + authTokenId: derivedProxy.authTokenId, + proxyAddress: derivedProxy.proxyAddress, + mintTxHash: "mint-tx", + dRepId: derivedProxy.dRepId, + }), + ]); + expect(creates).toEqual([ + expect.objectContaining({ + walletId: "current-wallet", + authTokenId: derivedProxy.authTokenId, + proxyAddress: derivedProxy.proxyAddress, + paramUtxo: JSON.stringify(paramUtxo), + description: "Recovered CI proxy from chain", + isActive: true, + }), + ]); + expect(updates).toEqual([]); + }); + + it("reattaches an existing historical row instead of creating a duplicate", async () => { + const existing = createProxyRow({ walletId: "old-wallet", isActive: false }); + const { db, creates, updates } = createDb({ proxies: [existing] }); + const provider = createProvider({ + walletAssets: [derivedProxy.authTokenId], + histories: { [derivedProxy.authTokenId]: [{ tx_hash: "mint-tx", action: "minted" }] }, + txInputs: { "mint-tx": [{ tx_hash: paramUtxo.txHash, output_index: paramUtxo.outputIndex }] }, + }); + + const result = await recoverProxyRowsFromChainForWalletType({ + ctx: createContext(), + walletType: "legacy", + db, + provider, + }); + + expect(result.recovered[0]).toEqual( + expect.objectContaining({ + proxyId: "proxy-1", + action: "reattached", + fromWalletId: "old-wallet", + }), + ); + expect(creates).toEqual([]); + expect(updates).toEqual([ + expect.objectContaining({ + where: { id: "proxy-1" }, + data: { walletId: "current-wallet", isActive: true }, + }), + ]); + }); + + it("skips unrelated wallet assets whose mint inputs do not derive the observed unit", async () => { + const unrelatedAsset = "f".repeat(56); + const { db, creates, updates } = createDb(); + const provider = createProvider({ + walletAssets: [unrelatedAsset], + histories: { [unrelatedAsset]: [{ tx_hash: "mint-tx", action: "minted" }] }, + txInputs: { "mint-tx": [{ tx_hash: "c".repeat(64), output_index: 1 }] }, + }); + + const result = await recoverProxyRowsFromChainForWalletType({ + ctx: createContext(), + walletType: "legacy", + db, + provider, + }); + + expect(result.recovered).toEqual([]); + expect(result.skipped).toEqual([ + expect.objectContaining({ assetUnit: unrelatedAsset, reason: "no-derived-match" }), + ]); + expect(creates).toEqual([]); + expect(updates).toEqual([]); + }); + + it("records diagnostics when asset history has no mint or tx UTxO lookup fails", async () => { + const noMintAsset = "1".repeat(56); + const txErrorAsset = "2".repeat(56); + const { db } = createDb(); + const provider = createProvider({ + walletAssets: [noMintAsset, txErrorAsset], + histories: { + [noMintAsset]: [{ tx_hash: "non-mint-tx", action: "burned" }], + [txErrorAsset]: [{ tx_hash: "error-tx", action: "minted" }], + }, + txInputs: {}, + txErrors: ["error-tx"], + }); + + const result = await recoverProxyRowsFromChainForWalletType({ + ctx: createContext(), + walletType: "legacy", + db, + provider, + }); + + expect(result.recovered).toEqual([]); + expect(result.skipped).toEqual( + expect.arrayContaining([ + expect.objectContaining({ assetUnit: noMintAsset, reason: "no-mint-transaction" }), + expect.objectContaining({ assetUnit: txErrorAsset, reason: "tx-utxos-fetch-error" }), + ]), + ); + }); + + it("enforces the candidate cap and records skipped excess assets", async () => { + const assetA = "a".repeat(56); + const assetB = "b".repeat(56); + const { db } = createDb(); + const provider = createProvider({ + walletAssets: [assetA, assetB], + histories: { [assetA]: [] }, + }); + + const result = await recoverProxyRowsFromChainForWalletType({ + ctx: createContext(), + walletType: "legacy", + db, + provider, + maxCandidates: 1, + }); + + expect(provider.get).toHaveBeenCalledTimes(1); + expect(result.skipped).toEqual( + expect.arrayContaining([ + expect.objectContaining({ assetUnit: assetA, reason: "no-mint-transaction" }), + expect.objectContaining({ assetUnit: assetB, reason: "candidate-cap-exceeded" }), + ]), + ); + }); +}); diff --git a/src/__tests__/proxyCiOrphanAdoption.test.ts b/src/__tests__/proxyCiOrphanAdoption.test.ts new file mode 100644 index 00000000..ea90b0c7 --- /dev/null +++ b/src/__tests__/proxyCiOrphanAdoption.test.ts @@ -0,0 +1,261 @@ +import { describe, expect, it, jest } from "@jest/globals"; +import { resolvePaymentKeyHash, serializeNativeScript } from "@meshsdk/core"; +import { adoptProxyOrphansForWalletType } from "../../scripts/ci/scenarios/proxyOrphanAdoption"; +import type { CIBootstrapContext } from "../../scripts/ci/framework/types"; +import { deriveProxyScripts } from "../lib/server/proxyTxBuilders"; +import { realTestAddresses } from "./testUtils"; + +type TestWalletRow = { + id: string; + name: string; + signersAddresses: string[]; + signersStakeKeys: string[]; + signersDRepKeys: string[]; + signersDescriptions: string[]; + numRequiredSigners: number; + scriptCbor: string; + stakeCredentialHash: string | null; + type: string; + rawImportBodies: null; +}; + +type TestProxyRow = { + id: string; + walletId: string | null; + proxyAddress: string; + authTokenId: string; + paramUtxo: string; + isActive: boolean; +}; + +const paramUtxo = { + txHash: "a".repeat(64), + outputIndex: 0, +}; +const derivedProxy = deriveProxyScripts({ paramUtxo, network: 0 }); + +function createWalletRows(): { address: string; current: TestWalletRow; old: TestWalletRow } { + const paymentScript = { + type: "atLeast" as const, + required: 1, + scripts: [ + { type: "sig" as const, keyHash: resolvePaymentKeyHash(realTestAddresses.address1) }, + { type: "sig" as const, keyHash: resolvePaymentKeyHash(realTestAddresses.address2) }, + ], + }; + const serialized = serializeNativeScript(paymentScript, undefined, 0, true); + if (!serialized.scriptCbor) { + throw new Error("Expected test native script CBOR"); + } + + const base = { + name: "CI legacy wallet", + signersAddresses: [realTestAddresses.address1, realTestAddresses.address2], + signersStakeKeys: [], + signersDRepKeys: [], + signersDescriptions: ["one", "two"], + numRequiredSigners: 1, + scriptCbor: serialized.scriptCbor, + stakeCredentialHash: null, + type: "atLeast", + rawImportBodies: null, + }; + + return { + address: serialized.address, + current: { ...base, id: "current-wallet" }, + old: { ...base, id: "old-wallet" }, + }; +} + +function createContext(walletAddress: string): CIBootstrapContext { + return { + schemaVersion: 3, + createdAt: "2026-04-30T00:00:00.000Z", + apiBaseUrl: "http://localhost:3000", + networkId: 0, + walletTypes: ["legacy"], + wallets: [ + { + type: "legacy", + walletId: "current-wallet", + walletAddress, + signerAddresses: [realTestAddresses.address1, realTestAddresses.address2], + }, + ], + bots: [], + signerAddresses: [realTestAddresses.address1, realTestAddresses.address2], + signerStakeAddresses: [], + }; +} + +function createProxyRow(overrides: Partial = {}): TestProxyRow { + return { + id: "proxy-1", + walletId: "old-wallet", + proxyAddress: derivedProxy.proxyAddress, + authTokenId: derivedProxy.authTokenId, + paramUtxo: JSON.stringify(paramUtxo), + isActive: true, + ...overrides, + }; +} + +function createDb(args: { wallets: TestWalletRow[]; proxies: TestProxyRow[] }) { + const updates: Array<{ where: { id: string }; data: { walletId: string; isActive: true } }> = []; + type TestDb = { + wallet: { + findUnique: ReturnType; + findMany: ReturnType; + }; + proxy: { + findMany: ReturnType; + update: ReturnType; + }; + $transaction: ReturnType; + }; + const db: TestDb = { + wallet: { + findUnique: jest.fn(async ({ where }: { where: { id: string } }) => + args.wallets.find((wallet) => wallet.id === where.id) ?? null, + ), + findMany: jest.fn(async () => args.wallets), + }, + proxy: { + findMany: jest.fn(async ({ where }: { where: { walletId: { in: string[] } } }) => + args.proxies.filter((proxy) => proxy.walletId && where.walletId.in.includes(proxy.walletId)), + ), + update: jest.fn(async (updateArgs: { where: { id: string }; data: { walletId: string; isActive: true } }) => { + updates.push(updateArgs); + return { + id: updateArgs.where.id, + walletId: updateArgs.data.walletId, + isActive: updateArgs.data.isActive, + }; + }), + }, + $transaction: jest.fn(async (fn: (tx: TestDb) => Promise) => fn(db)), + }; + return { db, updates }; +} + +function createProvider(args: { walletAddress: string; includeAuthToken: boolean }) { + return { + fetchAddressUTxOs: jest.fn(async (address: string) => { + if (address === args.walletAddress) { + return [ + { + input: { txHash: "b".repeat(64), outputIndex: 0 }, + output: { + address, + amount: [ + { unit: "lovelace", quantity: "2000000" }, + ...(args.includeAuthToken + ? [{ unit: derivedProxy.authTokenId, quantity: "1" }] + : []), + ], + }, + }, + ]; + } + if (address === derivedProxy.proxyAddress) { + return []; + } + throw new Error(`unexpected address ${address}`); + }), + }; +} + +describe("CI proxy orphan adoption", () => { + it("reattaches a valid historical proxy row to the current wallet", async () => { + const { address, current, old } = createWalletRows(); + const proxy = createProxyRow(); + const { db, updates } = createDb({ wallets: [current, old], proxies: [proxy] }); + + const result = await adoptProxyOrphansForWalletType({ + ctx: createContext(address), + walletType: "legacy", + db, + provider: createProvider({ walletAddress: address, includeAuthToken: true }), + }); + + expect(result.historicalWalletIds).toEqual(["old-wallet"]); + expect(result.adopted).toEqual([ + expect.objectContaining({ + proxyId: "proxy-1", + fromWalletId: "old-wallet", + authTokenId: derivedProxy.authTokenId, + }), + ]); + expect(updates).toEqual([ + expect.objectContaining({ + where: { id: "proxy-1" }, + data: { walletId: "current-wallet", isActive: true }, + }), + ]); + }); + + it("reactivates a valid inactive row already attached to the current wallet", async () => { + const { address, current, old } = createWalletRows(); + const proxy = createProxyRow({ walletId: "current-wallet", isActive: false }); + const { db, updates } = createDb({ wallets: [current, old], proxies: [proxy] }); + + const result = await adoptProxyOrphansForWalletType({ + ctx: createContext(address), + walletType: "legacy", + db, + provider: createProvider({ walletAddress: address, includeAuthToken: true }), + }); + + expect(result.adopted[0]).toEqual( + expect.objectContaining({ + proxyId: "proxy-1", + fromWalletId: "current-wallet", + wasActive: false, + }), + ); + expect(updates[0]?.data).toEqual({ walletId: "current-wallet", isActive: true }); + }); + + it("skips rows whose stored metadata does not match derived scripts", async () => { + const { address, current, old } = createWalletRows(); + const { db, updates } = createDb({ + wallets: [current, old], + proxies: [createProxyRow({ authTokenId: "wrong-auth-token" })], + }); + + const result = await adoptProxyOrphansForWalletType({ + ctx: createContext(address), + walletType: "legacy", + db, + provider: createProvider({ walletAddress: address, includeAuthToken: true }), + }); + + expect(result.adopted).toEqual([]); + expect(result.skipped).toEqual([ + expect.objectContaining({ proxyId: "proxy-1", reason: "metadata-mismatch" }), + ]); + expect(updates).toEqual([]); + }); + + it("skips rows when the auth token is not visible at the current wallet address", async () => { + const { address, current, old } = createWalletRows(); + const { db, updates } = createDb({ + wallets: [current, old], + proxies: [createProxyRow()], + }); + + const result = await adoptProxyOrphansForWalletType({ + ctx: createContext(address), + walletType: "legacy", + db, + provider: createProvider({ walletAddress: address, includeAuthToken: false }), + }); + + expect(result.adopted).toEqual([]); + expect(result.skipped).toEqual([ + expect.objectContaining({ proxyId: "proxy-1", reason: "chain-empty" }), + ]); + expect(updates).toEqual([]); + }); +}); diff --git a/src/__tests__/proxyCiPreflight.test.ts b/src/__tests__/proxyCiPreflight.test.ts new file mode 100644 index 00000000..89fc9345 --- /dev/null +++ b/src/__tests__/proxyCiPreflight.test.ts @@ -0,0 +1,471 @@ +import { describe, expect, it, jest } from "@jest/globals"; +import { + analyzeProxyFullLifecycleUtxoShape, + assertProxyFullLifecyclePreflight, + createScenarioProxyFullLifecycle, + createScenarioProxySmoke, + DREP_REGISTER_REQUIRED_LOVELACE, + FULL_LIFECYCLE_FEE_BUFFER_LOVELACE, + getProxyDRepAnchorUrl, + LIFECYCLE_PROXY_LOVELACE, + PROXY_FULL_LIFECYCLE_WALLET_TYPES, + requireSetupTxHash, + runProxyFullLifecycleHygiene, +} from "../../scripts/ci/scenarios/steps/proxyBot"; +import type { CIBootstrapContext, CIWalletType } from "../../scripts/ci/framework/types"; + +type TestUtxo = Parameters[0]["walletUtxos"][number]; + +const mkUtxo = (lovelace: string, txHash = "aa", outputIndex = 0): TestUtxo => ({ + input: { txHash, outputIndex }, + output: { + address: "addr_test_wallet", + amount: [{ unit: "lovelace", quantity: lovelace }], + }, +}); + +const mkCollateralUtxo = (lovelace = "6000000", txHash = "collateral", outputIndex = 0): TestUtxo => ({ + input: { txHash, outputIndex }, + output: { + address: "addr_test_signer_1", + amount: [{ unit: "lovelace", quantity: lovelace }], + }, +}); + +const mkAuthTokenUtxo = (txHash = "auth", outputIndex = 0): TestUtxo => ({ + input: { txHash, outputIndex }, + output: { + address: "addr_test_wallet", + amount: [ + { unit: "lovelace", quantity: "6000000" }, + { unit: "policy.asset", quantity: "10" }, + ], + }, +}); + +const mkContext = (walletTypes: CIWalletType[]): CIBootstrapContext => ({ + schemaVersion: 3, + createdAt: "2026-04-29T00:00:00.000Z", + apiBaseUrl: "http://localhost:3000", + networkId: 0, + walletTypes, + wallets: walletTypes.map((type) => ({ + type, + walletId: `${type}-wallet-id`, + walletAddress: `addr_test_${type}`, + signerAddresses: ["addr_test_signer_1", "addr_test_signer_2", "addr_test_signer_3"], + })), + bots: [ + { + id: "bot-1", + paymentAddress: "addr_test_signer_1", + botKeyId: "bot-key-1", + botId: "bot-user-1", + }, + ], + defaultBotId: "bot-1", + signerAddresses: ["addr_test_signer_1", "addr_test_signer_2", "addr_test_signer_3"], + signerStakeAddresses: ["stake_test_1", "stake_test_2", "stake_test_3"], +}); + +describe("proxy full lifecycle preflight", () => { + it("classifies an already usable UTxO shape as pass", () => { + const analysis = analyzeProxyFullLifecycleUtxoShape({ + walletUtxos: [mkUtxo("540000000", "aa", 0)], + collateralUtxos: [mkCollateralUtxo()], + }); + + expect(analysis.status).toBe("pass"); + }); + + it("classifies one large wallet UTxO without key collateral as needing a self-split", () => { + const analysis = analyzeProxyFullLifecycleUtxoShape({ + walletUtxos: [mkUtxo("600000000", "aa", 0)], + collateralUtxos: [], + }); + + expect(analysis.status).toBe("needs-split"); + }); + + it("classifies insufficient total ADA as a hard funding failure", () => { + const analysis = analyzeProxyFullLifecycleUtxoShape({ + walletUtxos: [mkUtxo("525000000", "aa", 0)], + collateralUtxos: [mkCollateralUtxo()], + }); + + expect(analysis.status).toBe("insufficient-total"); + expect(() => + assertProxyFullLifecyclePreflight({ + walletUtxos: [mkUtxo("525000000", "aa", 0)], + collateralUtxos: [mkCollateralUtxo()], + }), + ).toThrow(/insufficient ADA/); + }); + + it("does not classify insufficient self-split budget as self-healable", () => { + const analysis = analyzeProxyFullLifecycleUtxoShape({ + walletUtxos: [mkUtxo("540000000", "aa", 0)], + collateralUtxos: [], + }); + + expect(analysis.status).toBe("insufficient-shape"); + }); + + it("rejects when no setup UTxO has at least 20 ADA", () => { + expect(() => + assertProxyFullLifecyclePreflight({ + walletUtxos: Array.from({ length: 29 }, (_, index) => + mkUtxo("19000000", `small-${index}`, index), + ), + collateralUtxos: [mkCollateralUtxo()], + }), + ).toThrow(/no wallet UTxO has at least 20 ADA/); + }); + + it("rejects when no key-address collateral UTxO is present", () => { + expect(() => + assertProxyFullLifecyclePreflight({ + walletUtxos: [mkUtxo("540000000", "aa", 0)], + collateralUtxos: [], + }), + ).toThrow(/no bot payment-address UTxO has at least 5 ADA/); + }); + + it("rejects insufficient total ADA with an actionable delta", () => { + expect(() => + assertProxyFullLifecyclePreflight({ + walletUtxos: [mkUtxo("525000000", "aa", 0)], + collateralUtxos: [mkCollateralUtxo()], + }), + ).toThrow(/insufficient ADA/); + }); + + it("passes when setup, key collateral, and wallet budget are available", () => { + const result = assertProxyFullLifecyclePreflight({ + walletUtxos: [mkUtxo("540000000", "aa", 0)], + collateralUtxos: [mkCollateralUtxo()], + }); + + expect(result.totalLovelace).toBe(540_000_000n); + expect(result.setupCandidates).toBe(1); + expect(result.keyCollateralCandidates).toBe(1); + expect(result.drepSelectableLovelace).toBe(540_000_000n); + expect(result.drepRequiredLovelace).toBe(536_000_000n); + expect(result.requiredTotalLovelace).toBe(536_000_000n); + }); + + it("rejects when script-address UTxOs are the only apparent collateral", () => { + expect(() => + assertProxyFullLifecyclePreflight({ + walletUtxos: [mkUtxo("540000000", "aa", 0), mkUtxo("6000000", "bb", 1)], + collateralUtxos: [], + }), + ).toThrow(/no bot payment-address UTxO/); + }); + + it("rejects when wallet inputs cannot fund the DRep budget", () => { + expect(() => + assertProxyFullLifecyclePreflight({ + walletUtxos: [mkUtxo("535999999", "aa", 0)], + collateralUtxos: [mkCollateralUtxo()], + }), + ).toThrow(/insufficient ADA/); + }); + + it("uses hardcoded proxy lifecycle budget constants", () => { + const result = assertProxyFullLifecyclePreflight({ + walletUtxos: [mkUtxo("540000000", "aa", 0)], + collateralUtxos: [mkCollateralUtxo()], + }); + + expect(LIFECYCLE_PROXY_LOVELACE).toBe(10_000_000n); + expect(FULL_LIFECYCLE_FEE_BUFFER_LOVELACE).toBe(20_000_000n); + expect(result.requiredTotalLovelace).toBe( + DREP_REGISTER_REQUIRED_LOVELACE + + LIFECYCLE_PROXY_LOVELACE + + 1_000_000n + + FULL_LIFECYCLE_FEE_BUFFER_LOVELACE, + ); + }); + + it("requires the normal DRep anchor URL for proxy DRep registration", () => { + expect(getProxyDRepAnchorUrl({ CI_DREP_ANCHOR_URL: " https://example.test/drep.json " })).toBe( + "https://example.test/drep.json", + ); + expect(() => getProxyDRepAnchorUrl({})).toThrow(/CI_DREP_ANCHOR_URL is required/); + }); +}); + +describe("proxy scenario composition", () => { + it("includes malformed-body checks for proxy finalize routes", () => { + const scenario = createScenarioProxySmoke(mkContext(["legacy"])); + const stepIds = scenario.steps.map((step) => step.id); + + expect(stepIds).toContain("v1.proxySetupFinalize.malformedBody"); + expect(stepIds).toContain("v1.proxyCleanupFinalize.malformedBody"); + }); + + it("runs full lifecycle for legacy, hierarchical, and SDK wallets", () => { + const scenario = createScenarioProxyFullLifecycle(mkContext(["legacy", "hierarchical", "sdk"])); + const stepIds = scenario.steps.map((step) => step.id); + + expect(PROXY_FULL_LIFECYCLE_WALLET_TYPES).toEqual(["legacy", "hierarchical", "sdk"]); + expect(stepIds).toContain("v1.proxy.full.recoverFromChain.legacy"); + expect(stepIds).toContain("v1.proxy.full.adoptOrphans.legacy"); + expect(stepIds).toContain("v1.proxy.full.hygiene.legacy"); + expect(stepIds).toContain("v1.proxy.full.utxoShape.legacy"); + expect(stepIds).toContain("v1.proxy.full.preflight.legacy"); + expect(stepIds.indexOf("v1.proxy.full.recoverFromChain.legacy")).toBeLessThan( + stepIds.indexOf("v1.proxy.full.adoptOrphans.legacy"), + ); + expect(stepIds.indexOf("v1.proxy.full.adoptOrphans.legacy")).toBeLessThan( + stepIds.indexOf("v1.proxy.full.hygiene.legacy"), + ); + expect(stepIds.indexOf("v1.proxy.full.hygiene.legacy")).toBeLessThan( + stepIds.indexOf("v1.proxy.full.utxoShape.legacy"), + ); + expect(stepIds.indexOf("v1.proxy.full.utxoShape.legacy")).toBeLessThan( + stepIds.indexOf("v1.proxy.full.preflight.legacy"), + ); + expect(stepIds).toContain("v1.proxy.full.recoverFromChain.hierarchical"); + expect(stepIds).toContain("v1.proxy.full.adoptOrphans.hierarchical"); + expect(stepIds).toContain("v1.proxy.full.hygiene.hierarchical"); + expect(stepIds).toContain("v1.proxy.full.utxoShape.hierarchical"); + expect(stepIds).toContain("v1.proxy.full.preflight.hierarchical"); + expect(stepIds.indexOf("v1.proxy.full.recoverFromChain.hierarchical")).toBeLessThan( + stepIds.indexOf("v1.proxy.full.adoptOrphans.hierarchical"), + ); + expect(stepIds.indexOf("v1.proxy.full.adoptOrphans.hierarchical")).toBeLessThan( + stepIds.indexOf("v1.proxy.full.hygiene.hierarchical"), + ); + expect(stepIds.indexOf("v1.proxy.full.hygiene.hierarchical")).toBeLessThan( + stepIds.indexOf("v1.proxy.full.utxoShape.hierarchical"), + ); + expect(stepIds.indexOf("v1.proxy.full.utxoShape.hierarchical")).toBeLessThan( + stepIds.indexOf("v1.proxy.full.preflight.hierarchical"), + ); + expect(stepIds).toContain("v1.proxy.full.recoverFromChain.sdk"); + expect(stepIds).toContain("v1.proxy.full.adoptOrphans.sdk"); + expect(stepIds).toContain("v1.proxy.full.hygiene.sdk"); + expect(stepIds).toContain("v1.proxy.full.utxoShape.sdk"); + expect(stepIds).toContain("v1.proxy.full.preflight.sdk"); + expect(stepIds.indexOf("v1.proxy.full.recoverFromChain.sdk")).toBeLessThan( + stepIds.indexOf("v1.proxy.full.adoptOrphans.sdk"), + ); + expect(stepIds.indexOf("v1.proxy.full.adoptOrphans.sdk")).toBeLessThan( + stepIds.indexOf("v1.proxy.full.hygiene.sdk"), + ); + expect(stepIds.indexOf("v1.proxy.full.hygiene.sdk")).toBeLessThan( + stepIds.indexOf("v1.proxy.full.utxoShape.sdk"), + ); + expect(stepIds.indexOf("v1.proxy.full.utxoShape.sdk")).toBeLessThan( + stepIds.indexOf("v1.proxy.full.preflight.sdk"), + ); + }); + + it("signs proxy lifecycle transactions with signer index 0 before the broadcaster", () => { + const scenario = createScenarioProxyFullLifecycle(mkContext(["legacy"])); + const stepIds = scenario.steps.map((step) => step.id); + + const setupProposeIndex = stepIds.indexOf("v1.proxy.lifecycle.setup.propose.legacy"); + expect(stepIds.slice(setupProposeIndex + 1, setupProposeIndex + 3)).toEqual([ + "v1.proxy.lifecycle.setup.signer0.legacy", + "v1.proxy.lifecycle.setup.signer1.legacy", + ]); + + const spendProposeIndex = stepIds.indexOf("v1.proxy.full.spend.propose.legacy"); + expect(stepIds.slice(spendProposeIndex + 1, spendProposeIndex + 3)).toEqual([ + "v1.proxy.full.spend.legacy.signer0", + "v1.proxy.full.spend.legacy.signer1", + ]); + expect(stepIds).not.toContain("v1.proxy.lifecycle.setup.sign1.legacy"); + expect(stepIds).not.toContain("v1.proxy.full.spend.legacy.sign1"); + }); + + it("fails clearly instead of using a setup transaction id as a txHash", () => { + expect(() => + requireSetupTxHash({ + setupTransactionId: "database-transaction-id", + }), + ).toThrow(/proxy setup was not broadcast; signer step returned submitted=false/); + }); + + it("fails clearly when full lifecycle has no eligible wallet type", async () => { + const ctx = mkContext([]); + const scenario = createScenarioProxyFullLifecycle(ctx); + + expect(scenario.steps).toHaveLength(1); + expect(scenario.steps[0]?.id).toBe("v1.proxy.full.precondition"); + await expect(scenario.steps[0]?.execute(ctx)).rejects.toThrow( + /scenario\.proxy-full-lifecycle requires at least one of legacy, hierarchical, sdk/, + ); + }); +}); + +describe("proxy full lifecycle hygiene", () => { + const proxy = { + id: "proxy-1", + proxyAddress: "addr_test_proxy", + authTokenId: "policy.asset", + isActive: true, + }; + + function createHygieneDeps(requestJsonMock: ReturnType) { + return { + requestJson: requestJsonMock, + authenticateBot: jest.fn(async () => "token"), + getDefaultBot: jest.fn((ctx: CIBootstrapContext) => ctx.bots[0]!), + fetchFreeUtxos: jest.fn(async () => [mkAuthTokenUtxo()]), + fetchKeyAddressUtxos: jest.fn(async () => [mkCollateralUtxo()]), + runSigningFlow: jest.fn(async (args: { signBroadcast?: boolean; preferredTransactionId?: string }) => ({ + walletType: "legacy" as const, + walletId: "legacy-wallet-id", + transactionId: args.preferredTransactionId ?? "tx", + signerAddress: "addr_test_signer_1", + status: 200, + submitted: args.signBroadcast, + txHash: args.signBroadcast ? `${args.preferredTransactionId ?? "tx"}-hash` : undefined, + })), + pollUntilUtxosConsumed: jest.fn(async () => ({ attempts: 1 })), + env: { CI_MNEMONIC_1: "one", CI_MNEMONIC_2: "two" }, + }; + } + + it("no-ops when no active proxies are listed", async () => { + const requestJsonMock = jest.fn(async () => ({ status: 200, data: [] })); + + const result = await runProxyFullLifecycleHygiene({ + ctx: mkContext(["legacy"]), + walletType: "legacy", + deps: createHygieneDeps(requestJsonMock), + }); + + expect(result.artifacts.noOp).toBe(true); + expect(requestJsonMock).toHaveBeenCalledTimes(1); + }); + + it("cleans and finalizes an active proxy that is ready to burn", async () => { + const requestJsonMock = jest + .fn() + .mockResolvedValueOnce({ status: 200, data: [proxy] }) + .mockResolvedValueOnce({ status: 200, data: { active: false, dRepId: "drep1proxy" } }) + .mockResolvedValueOnce({ + status: 201, + data: { transaction: { id: "tx-burn" }, cleanup: { phase: "burn" } }, + }) + .mockResolvedValueOnce({ status: 201, data: { proxy: { ...proxy, isActive: false } } }) + .mockResolvedValueOnce({ status: 200, data: [] }); + const deps = createHygieneDeps(requestJsonMock); + + const result = await runProxyFullLifecycleHygiene({ + ctx: mkContext(["legacy"]), + walletType: "legacy", + deps, + }); + + expect(result.artifacts.noOp).toBe(false); + expect(deps.runSigningFlow).toHaveBeenCalledTimes(2); + expect(deps.pollUntilUtxosConsumed).toHaveBeenCalledTimes(1); + expect(requestJsonMock).toHaveBeenNthCalledWith( + 2, + expect.objectContaining({ + url: "http://localhost:3000/api/v1/proxyDRepInfo?walletId=legacy-wallet-id&address=addr_test_signer_1&proxyId=proxy-1", + }), + ); + expect(requestJsonMock).toHaveBeenNthCalledWith( + 3, + expect.objectContaining({ + url: "http://localhost:3000/api/v1/proxyCleanup", + body: expect.objectContaining({ + proxyId: proxy.id, + deactivateProxy: true, + utxoRefs: [{ txHash: "auth", outputIndex: 0 }], + collateralRef: { txHash: "collateral", outputIndex: 0 }, + }), + }), + ); + expect(requestJsonMock).toHaveBeenNthCalledWith( + 4, + expect.objectContaining({ + url: "http://localhost:3000/api/v1/proxyCleanupFinalize", + body: expect.objectContaining({ txHash: "tx-burn-hash" }), + }), + ); + }); + + it("runs a sweep pass before the burn pass when proxy UTxOs remain", async () => { + const requestJsonMock = jest + .fn() + .mockResolvedValueOnce({ status: 200, data: [proxy] }) + .mockResolvedValueOnce({ status: 200, data: { active: false, dRepId: "drep1proxy" } }) + .mockResolvedValueOnce({ + status: 201, + data: { transaction: { id: "tx-sweep" }, cleanup: { phase: "sweep" } }, + }) + .mockResolvedValueOnce({ + status: 201, + data: { transaction: { id: "tx-burn" }, cleanup: { phase: "burn" } }, + }) + .mockResolvedValueOnce({ status: 201, data: { proxy: { ...proxy, isActive: false } } }) + .mockResolvedValueOnce({ status: 200, data: [] }); + const deps = createHygieneDeps(requestJsonMock); + + const result = await runProxyFullLifecycleHygiene({ + ctx: mkContext(["legacy"]), + walletType: "legacy", + deps, + }); + + const cleaned = result.artifacts.cleaned as Array<{ cleanupTransactions: unknown[] }>; + expect(cleaned[0]?.cleanupTransactions).toHaveLength(2); + expect(deps.runSigningFlow).toHaveBeenCalledTimes(4); + expect(deps.pollUntilUtxosConsumed).toHaveBeenCalledTimes(2); + }); + + it("deregisters an active proxy DRep before cleanup", async () => { + const requestJsonMock = jest + .fn() + .mockResolvedValueOnce({ status: 200, data: [proxy] }) + .mockResolvedValueOnce({ status: 200, data: { active: true, dRepId: "drep1proxy" } }) + .mockResolvedValueOnce({ status: 201, data: { transaction: { id: "tx-drep" } } }) + .mockResolvedValueOnce({ + status: 201, + data: { transaction: { id: "tx-burn" }, cleanup: { phase: "burn" } }, + }) + .mockResolvedValueOnce({ status: 201, data: { proxy: { ...proxy, isActive: false } } }) + .mockResolvedValueOnce({ status: 200, data: [] }); + const deps = createHygieneDeps(requestJsonMock); + + const result = await runProxyFullLifecycleHygiene({ + ctx: mkContext(["legacy"]), + walletType: "legacy", + deps, + }); + + const cleaned = result.artifacts.cleaned as Array<{ + dRep?: { wasActive?: boolean; deregisterTransaction?: { transactionId?: string } }; + }>; + expect(cleaned[0]?.dRep?.wasActive).toBe(true); + expect(cleaned[0]?.dRep?.deregisterTransaction?.transactionId).toBe("tx-drep"); + expect(deps.runSigningFlow).toHaveBeenCalledTimes(4); + expect(deps.pollUntilUtxosConsumed).toHaveBeenCalledTimes(2); + expect(requestJsonMock).toHaveBeenNthCalledWith( + 3, + expect.objectContaining({ + url: "http://localhost:3000/api/v1/proxyDRepCertificate", + body: expect.objectContaining({ + proxyId: proxy.id, + action: "deregister", + utxoRefs: [{ txHash: "auth", outputIndex: 0 }], + collateralRef: { txHash: "collateral", outputIndex: 0 }, + }), + }), + ); + expect(requestJsonMock).toHaveBeenNthCalledWith( + 4, + expect.objectContaining({ + url: "http://localhost:3000/api/v1/proxyCleanup", + }), + ); + }); +}); diff --git a/src/__tests__/proxyCleanup.bot.test.ts b/src/__tests__/proxyCleanup.bot.test.ts new file mode 100644 index 00000000..5f3827a4 --- /dev/null +++ b/src/__tests__/proxyCleanup.bot.test.ts @@ -0,0 +1,219 @@ +import { beforeAll, beforeEach, describe, expect, it, jest } from "@jest/globals"; +import type { UTxO } from "@meshsdk/core"; +import type { NextApiRequest, NextApiResponse } from "next"; +import { createMockResponse, makeBearerAuth, makeBotJwtPayload } from "./apiTestUtils"; + +const addCorsHeadersMock = jest.fn<(res: NextApiResponse) => void>(); +const corsMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => Promise>(); +const applyRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => boolean>(); +const applyBotRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse, botId: string) => boolean>(); +const enforceBodySizeMock = jest.fn<(req: NextApiRequest, res: NextApiResponse, maxBytes: number) => boolean>(); +const verifyJwtMock: jest.Mock = jest.fn(); +const isBotJwtMock: jest.Mock = jest.fn(); +const authorizeWalletSignerForV1TxMock: jest.Mock = jest.fn(); +const loadActiveProxyForWalletMock: jest.Mock = jest.fn(); +const resolveWalletScriptAddressMock: jest.Mock = jest.fn(); +const resolveUtxoRefsFromChainMock: jest.Mock = jest.fn(); +const resolveCollateralRefFromChainMock: jest.Mock = jest.fn(); +const resolveSingleUtxoRefFromChainMock: jest.Mock = jest.fn(); +const requireAuthTokenUtxoMock: jest.Mock = jest.fn(); +const buildProxyCleanupSweepTxMock: jest.Mock = jest.fn(); +const buildProxyCleanupTxMock: jest.Mock = jest.fn(); +const deriveProxyScriptsMock: jest.Mock = jest.fn(); +const createPendingMultisigTransactionMock: jest.Mock = jest.fn(); +const completeMock: jest.Mock = jest.fn(); +const getTxBuilderMock: jest.Mock = jest.fn(); +const fetchAddressUTxOsMock: jest.Mock = jest.fn(); + +const proxy = { + id: "proxy-1", + proxyAddress: "addr_test_proxy", + authTokenId: "policy", + paramUtxo: JSON.stringify({ txHash: "aa", outputIndex: 0 }), +}; + +const proxyUtxo = { + input: { txHash: "cc", outputIndex: 2 }, + output: { address: proxy.proxyAddress, amount: [{ unit: "lovelace", quantity: "2000000" }] }, +} as UTxO; + +jest.mock("@/lib/cors", () => ({ + __esModule: true, + addCorsCacheBustingHeaders: addCorsHeadersMock, + cors: corsMock, +}), { virtual: true }); + +jest.mock("@/lib/security/requestGuards", () => ({ + __esModule: true, + applyRateLimit: applyRateLimitMock, + applyBotRateLimit: applyBotRateLimitMock, + enforceBodySize: enforceBodySizeMock, +}), { virtual: true }); + +jest.mock("@/lib/verifyJwt", () => ({ + __esModule: true, + verifyJwt: verifyJwtMock, + isBotJwt: isBotJwtMock, +}), { virtual: true }); + +jest.mock("@/server/db", () => ({ + __esModule: true, + db: {}, +}), { virtual: true }); + +jest.mock("@/lib/server/v1WalletAuth", () => ({ + __esModule: true, + authorizeWalletSignerForV1Tx: authorizeWalletSignerForV1TxMock, +}), { virtual: true }); + +jest.mock("@/lib/server/proxyAccess", () => ({ + __esModule: true, + loadActiveProxyForWallet: loadActiveProxyForWalletMock, +}), { virtual: true }); + +jest.mock("@/lib/server/walletScriptAddress", () => ({ + __esModule: true, + resolveWalletScriptAddress: resolveWalletScriptAddressMock, +}), { virtual: true }); + +jest.mock("@/lib/server/resolveUtxoRefsFromChain", () => ({ + __esModule: true, + resolveUtxoRefsFromChain: resolveUtxoRefsFromChainMock, +}), { virtual: true }); + +jest.mock("@/lib/server/proxyUtxos", () => ({ + __esModule: true, + requireAuthTokenUtxo: requireAuthTokenUtxoMock, + resolveCollateralRefFromChain: resolveCollateralRefFromChainMock, + resolveSingleUtxoRefFromChain: resolveSingleUtxoRefFromChainMock, +}), { virtual: true }); + +jest.mock("@/lib/server/createPendingMultisigTransaction", () => ({ + __esModule: true, + createPendingMultisigTransaction: createPendingMultisigTransactionMock, +}), { virtual: true }); + +jest.mock("@/utils/get-provider", () => ({ + __esModule: true, + getProvider: () => ({ fetchAddressUTxOs: fetchAddressUTxOsMock }), +}), { virtual: true }); + +jest.mock("@/utils/get-tx-builder", () => ({ + __esModule: true, + getTxBuilder: getTxBuilderMock, +}), { virtual: true }); + +jest.mock("@/lib/server/proxyTxBuilders", () => ({ + __esModule: true, + buildProxyCleanupSweepTx: buildProxyCleanupSweepTxMock, + buildProxyCleanupTx: buildProxyCleanupTxMock, + deriveProxyScripts: deriveProxyScriptsMock, +}), { virtual: true }); + +let handler: (req: NextApiRequest, res: NextApiResponse) => Promise; + +beforeAll(async () => { + ({ default: handler } = await import("../pages/api/v1/proxyCleanup")); +}); + +beforeEach(() => { + jest.clearAllMocks(); + applyRateLimitMock.mockReturnValue(true); + applyBotRateLimitMock.mockReturnValue(true); + enforceBodySizeMock.mockReturnValue(true); + corsMock.mockResolvedValue(undefined); + verifyJwtMock.mockReturnValue(makeBotJwtPayload()); + isBotJwtMock.mockReturnValue(true); + (authorizeWalletSignerForV1TxMock as any).mockResolvedValue({ + wallet: { scriptCbor: "script", numRequiredSigners: 2, type: "all" }, + }); + (loadActiveProxyForWalletMock as any).mockResolvedValue(proxy); + resolveWalletScriptAddressMock.mockReturnValue("addr_test_wallet"); + (resolveUtxoRefsFromChainMock as any).mockResolvedValue({ utxos: [{ input: { txHash: "bb", outputIndex: 1 } }] }); + (resolveCollateralRefFromChainMock as any).mockResolvedValue({ collateral: { input: { txHash: "dd", outputIndex: 3 } } }); + requireAuthTokenUtxoMock.mockReturnValue({ input: { txHash: "bb", outputIndex: 1 } }); + deriveProxyScriptsMock.mockReturnValue({ + authTokenId: proxy.authTokenId, + proxyAddress: proxy.proxyAddress, + }); + buildProxyCleanupSweepTxMock.mockReturnValue({ sweptProxyUtxos: "1", preservedAuthTokens: "1" }); + buildProxyCleanupTxMock.mockReturnValue({ burnedAuthTokens: "10" }); + (completeMock as any).mockResolvedValue("tx-cbor"); + getTxBuilderMock.mockReturnValue({ complete: completeMock, meshTxBuilderBody: {} }); + (createPendingMultisigTransactionMock as any).mockResolvedValue({ id: "tx-1" }); +}); + +function cleanupRequest(body: Record): NextApiRequest { + return { + method: "POST", + headers: makeBearerAuth(), + body: { + walletId: "wallet-1", + address: makeBotJwtPayload().address, + proxyId: proxy.id, + utxoRefs: [{ txHash: "bb", outputIndex: 1 }], + collateralRef: { txHash: "dd", outputIndex: 3 }, + ...body, + }, + } as unknown as NextApiRequest; +} + +describe("proxyCleanup bot API", () => { + it("builds a sweep cleanup when proxy UTxOs remain", async () => { + (fetchAddressUTxOsMock as any).mockResolvedValue([proxyUtxo]); + const res = createMockResponse(); + + await handler(cleanupRequest({}), res); + + expect(resolveCollateralRefFromChainMock).toHaveBeenCalledWith( + expect.objectContaining({ + collateralRef: { txHash: "dd", outputIndex: 3 }, + expectedAddress: makeBotJwtPayload().address, + }), + ); + expect(buildProxyCleanupSweepTxMock).toHaveBeenCalledWith( + expect.objectContaining({ proxyUtxos: [proxyUtxo] }), + ); + expect(createPendingMultisigTransactionMock).toHaveBeenCalledWith( + expect.anything(), + expect.objectContaining({ + proposerAddress: makeBotJwtPayload().address, + initialSignedAddresses: [], + }), + ); + expect(buildProxyCleanupTxMock).not.toHaveBeenCalled(); + expect(res.status).toHaveBeenCalledWith(201); + expect(res.json).toHaveBeenCalledWith({ + transaction: { id: "tx-1" }, + cleanup: { phase: "sweep", sweptProxyUtxos: "1", preservedAuthTokens: "1" }, + }); + }); + + it("builds a burn cleanup when the proxy address is empty", async () => { + (fetchAddressUTxOsMock as any).mockResolvedValue([]); + const res = createMockResponse(); + + await handler(cleanupRequest({}), res); + + expect(buildProxyCleanupTxMock).toHaveBeenCalled(); + expect(buildProxyCleanupSweepTxMock).not.toHaveBeenCalled(); + expect(res.status).toHaveBeenCalledWith(201); + expect(res.json).toHaveBeenCalledWith({ + transaction: { id: "tx-1" }, + cleanup: { phase: "burn", burnedAuthTokens: "10" }, + }); + }); + + it("rejects explicit proxyUtxoRefs that omit visible proxy UTxOs", async () => { + (fetchAddressUTxOsMock as any).mockResolvedValue([proxyUtxo]); + const res = createMockResponse(); + + await handler( + cleanupRequest({ proxyUtxoRefs: [{ txHash: "ee", outputIndex: 4 }] }), + res, + ); + + expect(res.status).toHaveBeenCalledWith(400); + expect(buildProxyCleanupSweepTxMock).not.toHaveBeenCalled(); + }); +}); diff --git a/src/__tests__/proxyCleanupFinalization.test.ts b/src/__tests__/proxyCleanupFinalization.test.ts new file mode 100644 index 00000000..254b533d --- /dev/null +++ b/src/__tests__/proxyCleanupFinalization.test.ts @@ -0,0 +1,172 @@ +import { describe, expect, it, jest } from "@jest/globals"; +import type { Proxy } from "@prisma/client"; +import type { UTxO } from "@meshsdk/core"; + +jest.mock("@/utils/get-provider", () => ({ + __esModule: true, + getProvider: jest.fn(), +}), { virtual: true }); + +const proxy = { + id: "proxy-1", + walletId: "wallet-1", + proxyAddress: "addr_test_proxy", + authTokenId: "policy", + paramUtxo: "{}", + description: null, + isActive: true, + createdAt: new Date(), + updatedAt: new Date(), +} as Proxy; + +const mkUtxo = (amount: UTxO["output"]["amount"]): UTxO => + ({ + input: { txHash: "aa", outputIndex: 0 }, + output: { address: "addr_test_wallet", amount }, + }) as UTxO; + +function createDb() { + return { + proxy: { + update: jest.fn(async ({ data }: { data: Partial }) => ({ + ...proxy, + ...data, + })), + }, + }; +} + +describe("finalizeConfirmedProxyCleanup", () => { + it("deactivates the proxy when auth tokens are gone", async () => { + const { finalizeConfirmedProxyCleanup } = await import("@/lib/server/proxyCleanupFinalization"); + const db = createDb(); + const result = await finalizeConfirmedProxyCleanup({ + db: db as never, + network: 0, + proxy, + walletAddress: "addr_test_wallet", + txHash: "cleanup-burn-tx", + provider: { + fetchAddressUTxOs: jest.fn(async (address: string) => + address === proxy.proxyAddress + ? [] + : [mkUtxo([{ unit: "lovelace", quantity: "2000000" }])], + ), + get: jest.fn(async () => ({ + inputs: [ + { + address: "addr_test_wallet", + amount: [ + { unit: "lovelace", quantity: "2000000" }, + { unit: "policy", quantity: "1" }, + ], + }, + ], + outputs: [{ address: "addr_test_wallet", amount: [{ unit: "lovelace", quantity: "1500000" }] }], + })), + }, + }); + + expect("error" in result).toBe(false); + expect(db.proxy.update).toHaveBeenCalledWith( + expect.objectContaining({ + where: { id: proxy.id }, + data: { isActive: false }, + }), + ); + }); + + it("rejects cleanup finalization while auth tokens are still on-chain", async () => { + const { finalizeConfirmedProxyCleanup } = await import("@/lib/server/proxyCleanupFinalization"); + const result = await finalizeConfirmedProxyCleanup({ + db: createDb() as never, + network: 0, + proxy, + walletAddress: "addr_test_wallet", + txHash: "cleanup-burn-tx", + provider: { + fetchAddressUTxOs: jest.fn(async () => [ + mkUtxo([ + { unit: "lovelace", quantity: "2000000" }, + { unit: "policy", quantity: "1" }, + ]), + ]), + get: jest.fn(async () => ({ + inputs: [ + { + address: "addr_test_wallet", + amount: [ + { unit: "lovelace", quantity: "2000000" }, + { unit: "policy", quantity: "1" }, + ], + }, + ], + outputs: [{ address: "addr_test_wallet", amount: [{ unit: "lovelace", quantity: "1500000" }] }], + })), + }, + }); + + expect("error" in result).toBe(true); + if ("error" in result) { + expect(result.error).toContain("auth tokens are still visible"); + } + }); + + it("rejects cleanup finalization while proxy UTxOs remain", async () => { + const { finalizeConfirmedProxyCleanup } = await import("@/lib/server/proxyCleanupFinalization"); + const result = await finalizeConfirmedProxyCleanup({ + db: createDb() as never, + network: 0, + proxy, + walletAddress: "addr_test_wallet", + txHash: "cleanup-burn-tx", + provider: { + fetchAddressUTxOs: jest.fn(async (address: string) => + address === proxy.proxyAddress + ? [mkUtxo([{ unit: "lovelace", quantity: "1000000" }])] + : [], + ), + get: jest.fn(async () => ({ + inputs: [ + { + address: "addr_test_wallet", + amount: [ + { unit: "lovelace", quantity: "2000000" }, + { unit: "policy", quantity: "1" }, + ], + }, + ], + outputs: [{ address: "addr_test_wallet", amount: [{ unit: "lovelace", quantity: "1500000" }] }], + })), + }, + }); + + expect("error" in result).toBe(true); + if ("error" in result) { + expect(result.error).toContain("proxy address still has on-chain UTxOs"); + } + }); + + it("rejects finalization when txHash does not spend the auth token", async () => { + const { finalizeConfirmedProxyCleanup } = await import("@/lib/server/proxyCleanupFinalization"); + const result = await finalizeConfirmedProxyCleanup({ + db: createDb() as never, + network: 0, + proxy, + walletAddress: "addr_test_wallet", + txHash: "wrong-tx", + provider: { + fetchAddressUTxOs: jest.fn(async () => []), + get: jest.fn(async () => ({ + inputs: [{ address: "addr_test_wallet", amount: [{ unit: "lovelace", quantity: "2000000" }] }], + outputs: [{ address: "addr_test_wallet", amount: [{ unit: "lovelace", quantity: "1500000" }] }], + })), + }, + }); + + expect("error" in result).toBe(true); + if ("error" in result) { + expect(result.error).toContain("txHash does not match confirmed proxy cleanup burn outputs"); + } + }); +}); diff --git a/src/__tests__/proxyCleanupRuntime.test.ts b/src/__tests__/proxyCleanupRuntime.test.ts new file mode 100644 index 00000000..1b1b7c7b --- /dev/null +++ b/src/__tests__/proxyCleanupRuntime.test.ts @@ -0,0 +1,51 @@ +import { describe, expect, it } from "@jest/globals"; +import { + shouldSkipActionConfirmation, + shouldSkipCleanupBurnPropose, + shouldSkipCleanupBurnSigning, +} from "../../scripts/ci/scenarios/steps/proxyBot"; + +describe("proxy cleanup runtime state", () => { + it("skips the optional burn proposal when initial cleanup already produced burn", () => { + const runtime = { cleanupPhase: "burn" as const }; + + expect(shouldSkipCleanupBurnPropose(runtime)).toBe(true); + expect( + shouldSkipCleanupBurnSigning({ + cleanupBurnSkipped: true, + cleanupBurnTransactionId: undefined, + }), + ).toBe(true); + }); + + it("runs burn signing after a separate burn transaction is proposed", () => { + const runtime = { + cleanupPhase: "burn" as const, + cleanupBurnSkipped: false, + cleanupBurnTransactionId: "tx-burn", + }; + + expect(shouldSkipCleanupBurnPropose({ cleanupPhase: "sweep" })).toBe(false); + expect(shouldSkipCleanupBurnSigning(runtime)).toBe(false); + }); + + it("skips burn signing when no burn transaction was created", () => { + expect( + shouldSkipCleanupBurnSigning({ + cleanupBurnSkipped: false, + cleanupBurnTransactionId: undefined, + }), + ).toBe(true); + }); + + it("skips action confirmation until a transaction id and spent inputs are recorded", () => { + expect(shouldSkipActionConfirmation({})).toBe(true); + expect(shouldSkipActionConfirmation({ actionTransactionId: "tx-1" })).toBe(true); + expect( + shouldSkipActionConfirmation({ + actionTransactionId: "tx-1", + actionUtxoRefs: [{ txHash: "hash", outputIndex: 0 }], + }), + ).toBe(false); + }); +}); diff --git a/src/__tests__/proxyDRepInfo.test.ts b/src/__tests__/proxyDRepInfo.test.ts new file mode 100644 index 00000000..0bd3619a --- /dev/null +++ b/src/__tests__/proxyDRepInfo.test.ts @@ -0,0 +1,156 @@ +import { beforeAll, beforeEach, describe, expect, it, jest } from "@jest/globals"; +import type { NextApiRequest, NextApiResponse } from "next"; +import { createMockResponse, makeBearerAuth, makeBotJwtPayload } from "./apiTestUtils"; + +const addCorsHeadersMock = jest.fn<(res: NextApiResponse) => void>(); +const corsMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => Promise>(); +const applyRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => boolean>(); +const applyBotRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse, botId: string) => boolean>(); +const verifyJwtMock: jest.Mock = jest.fn(); +const isBotJwtMock: jest.Mock = jest.fn(); +const authorizeProxyReadForV1Mock: jest.Mock = jest.fn(); +const loadActiveProxyForWalletMock: jest.Mock = jest.fn(); +const deriveProxyScriptsMock: jest.Mock = jest.fn(); + +const proxy = { + id: "proxy-1", + walletId: "wallet-1", + proxyAddress: "addr_test_proxy", + authTokenId: "policy", + paramUtxo: JSON.stringify({ txHash: "aa", outputIndex: 0 }), + isActive: true, +}; + +jest.mock("@/env", () => ({ + __esModule: true, + env: { BLOCKFROST_API_KEY_PREPROD: "preprod-key" }, +}), { virtual: true }); + +jest.mock("@/lib/cors", () => ({ + __esModule: true, + addCorsCacheBustingHeaders: addCorsHeadersMock, + cors: corsMock, +}), { virtual: true }); + +jest.mock("@/lib/security/requestGuards", () => ({ + __esModule: true, + applyRateLimit: applyRateLimitMock, + applyBotRateLimit: applyBotRateLimitMock, +}), { virtual: true }); + +jest.mock("@/lib/verifyJwt", () => ({ + __esModule: true, + verifyJwt: verifyJwtMock, + isBotJwt: isBotJwtMock, +}), { virtual: true }); + +jest.mock("@/server/db", () => ({ + __esModule: true, + db: {}, +}), { virtual: true }); + +jest.mock("@/lib/server/proxyAccess", () => ({ + __esModule: true, + authorizeProxyReadForV1: authorizeProxyReadForV1Mock, + loadActiveProxyForWallet: loadActiveProxyForWalletMock, +}), { virtual: true }); + +jest.mock("@/lib/server/proxyTxBuilders", () => ({ + __esModule: true, + deriveProxyScripts: deriveProxyScriptsMock, +}), { virtual: true }); + +let handler: (req: NextApiRequest, res: NextApiResponse) => Promise; + +beforeAll(async () => { + ({ default: handler } = await import("../pages/api/v1/proxyDRepInfo")); +}); + +beforeEach(() => { + jest.clearAllMocks(); + applyRateLimitMock.mockReturnValue(true); + applyBotRateLimitMock.mockReturnValue(true); + corsMock.mockResolvedValue(undefined); + verifyJwtMock.mockReturnValue(makeBotJwtPayload()); + isBotJwtMock.mockReturnValue(true); + (authorizeProxyReadForV1Mock as any).mockResolvedValue({ wallet: { id: "wallet-1" } }); + (loadActiveProxyForWalletMock as any).mockResolvedValue(proxy); + deriveProxyScriptsMock.mockReturnValue({ + authTokenId: proxy.authTokenId, + proxyAddress: proxy.proxyAddress, + dRepId: "drep1proxy", + }); + global.fetch = jest.fn(async () => ({ + ok: true, + status: 200, + json: async () => ({ active: true }), + text: async () => "", + })) as never; +}); + +function infoRequest(query: Record = {}): NextApiRequest { + return { + method: "GET", + headers: makeBearerAuth(), + query: { + walletId: "wallet-1", + address: makeBotJwtPayload().address, + proxyId: proxy.id, + ...query, + }, + } as unknown as NextApiRequest; +} + +describe("proxyDRepInfo API", () => { + it("returns active proxy DRep status", async () => { + const res = createMockResponse(); + + await handler(infoRequest(), res); + + expect(global.fetch).toHaveBeenCalledWith( + "https://cardano-preprod.blockfrost.io/api/v0/governance/dreps/drep1proxy", + { headers: { project_id: "preprod-key" } }, + ); + expect(res.status).toHaveBeenCalledWith(200); + expect(res.json).toHaveBeenCalledWith({ active: true, dRepId: "drep1proxy" }); + }); + + it("returns inactive when Blockfrost reports the DRep is not found", async () => { + global.fetch = jest.fn(async () => ({ + ok: false, + status: 404, + json: async () => ({}), + text: async () => "not found", + })) as never; + const res = createMockResponse(); + + await handler(infoRequest(), res); + + expect(res.status).toHaveBeenCalledWith(200); + expect(res.json).toHaveBeenCalledWith({ active: false, dRepId: "drep1proxy" }); + }); + + it("rejects unauthorized proxy reads", async () => { + authorizeProxyReadForV1Mock.mockRejectedValueOnce(Object.assign(new Error("Not authorized for this wallet"), { code: "FORBIDDEN" })); + const res = createMockResponse(); + + await handler(infoRequest(), res); + + expect(res.status).toHaveBeenCalledWith(403); + expect(res.json).toHaveBeenCalledWith({ error: "Not authorized for this wallet" }); + }); + + it("rejects stored proxy metadata mismatches", async () => { + deriveProxyScriptsMock.mockReturnValueOnce({ + authTokenId: "different-policy", + proxyAddress: proxy.proxyAddress, + dRepId: "drep1proxy", + }); + const res = createMockResponse(); + + await handler(infoRequest(), res); + + expect(res.status).toHaveBeenCalledWith(409); + expect(res.json).toHaveBeenCalledWith({ error: "Stored proxy metadata does not match derived scripts" }); + }); +}); diff --git a/src/__tests__/proxySetup.bot.test.ts b/src/__tests__/proxySetup.bot.test.ts new file mode 100644 index 00000000..ee796b2c --- /dev/null +++ b/src/__tests__/proxySetup.bot.test.ts @@ -0,0 +1,169 @@ +import { beforeAll, beforeEach, describe, expect, it, jest } from "@jest/globals"; +import type { NextApiRequest, NextApiResponse } from "next"; +import { createMockResponse, makeBearerAuth, makeBotJwtPayload } from "./apiTestUtils"; + +const addCorsHeadersMock = jest.fn<(res: NextApiResponse) => void>(); +const corsMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => Promise>(); +const applyRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => boolean>(); +const applyBotRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse, botId: string) => boolean>(); +const enforceBodySizeMock = jest.fn<(req: NextApiRequest, res: NextApiResponse, maxBytes: number) => boolean>(); +const verifyJwtMock: jest.Mock = jest.fn(); +const isBotJwtMock: jest.Mock = jest.fn(); +const authorizeWalletSignerForV1TxMock: jest.Mock = jest.fn(); +const resolveUtxoRefsFromChainMock: jest.Mock = jest.fn(); +const resolveCollateralRefFromChainMock: jest.Mock = jest.fn(); +const resolveWalletScriptAddressMock: jest.Mock = jest.fn(); +const buildProxySetupTxMock: jest.Mock = jest.fn(); +const createPendingMultisigTransactionMock: jest.Mock = jest.fn(); +const completeMock: jest.Mock = jest.fn(); +const getTxBuilderMock: jest.Mock = jest.fn(); + +jest.mock("@/lib/cors", () => ({ + __esModule: true, + addCorsCacheBustingHeaders: addCorsHeadersMock, + cors: corsMock, +}), { virtual: true }); + +jest.mock("@/lib/security/requestGuards", () => ({ + __esModule: true, + applyRateLimit: applyRateLimitMock, + applyBotRateLimit: applyBotRateLimitMock, + enforceBodySize: enforceBodySizeMock, +}), { virtual: true }); + +jest.mock("@/lib/verifyJwt", () => ({ + __esModule: true, + verifyJwt: verifyJwtMock, + isBotJwt: isBotJwtMock, +}), { virtual: true }); + +jest.mock("@/server/db", () => ({ + __esModule: true, + db: {}, +}), { virtual: true }); + +jest.mock("@/lib/server/v1WalletAuth", () => ({ + __esModule: true, + authorizeWalletSignerForV1Tx: authorizeWalletSignerForV1TxMock, +}), { virtual: true }); + +jest.mock("@/lib/server/walletScriptAddress", () => ({ + __esModule: true, + resolveWalletScriptAddress: resolveWalletScriptAddressMock, +}), { virtual: true }); + +jest.mock("@/lib/server/resolveUtxoRefsFromChain", () => ({ + __esModule: true, + resolveUtxoRefsFromChain: resolveUtxoRefsFromChainMock, +}), { virtual: true }); + +jest.mock("@/lib/server/proxyUtxos", () => ({ + __esModule: true, + resolveCollateralRefFromChain: resolveCollateralRefFromChainMock, +}), { virtual: true }); + +jest.mock("@/lib/server/createPendingMultisigTransaction", () => ({ + __esModule: true, + createPendingMultisigTransaction: createPendingMultisigTransactionMock, +}), { virtual: true }); + +jest.mock("@/utils/get-tx-builder", () => ({ + __esModule: true, + getTxBuilder: getTxBuilderMock, +}), { virtual: true }); + +jest.mock("@/lib/server/proxyTxBuilders", () => ({ + __esModule: true, + DEFAULT_PROXY_SETUP_LOVELACE: "1000000", + buildProxySetupTx: buildProxySetupTxMock, +}), { virtual: true }); + +let handler: (req: NextApiRequest, res: NextApiResponse) => Promise; + +beforeAll(async () => { + ({ default: handler } = await import("../pages/api/v1/proxySetup")); +}); + +beforeEach(() => { + jest.clearAllMocks(); + applyRateLimitMock.mockReturnValue(true); + applyBotRateLimitMock.mockReturnValue(true); + enforceBodySizeMock.mockReturnValue(true); + corsMock.mockResolvedValue(undefined); + verifyJwtMock.mockReturnValue(makeBotJwtPayload()); + isBotJwtMock.mockReturnValue(true); + (authorizeWalletSignerForV1TxMock as any).mockResolvedValue({ + wallet: { scriptCbor: "script", numRequiredSigners: 2, type: "all" }, + }); + resolveWalletScriptAddressMock.mockReturnValue("addr_test_wallet_script"); + (resolveUtxoRefsFromChainMock as any).mockResolvedValue({ utxos: [{ input: { txHash: "aa", outputIndex: 0 } }] }); + (resolveCollateralRefFromChainMock as any).mockResolvedValue({ collateral: { input: { txHash: "bb", outputIndex: 1 } } }); + buildProxySetupTxMock.mockReturnValue({ + proxyAddress: "addr_test_proxy", + authTokenId: "policy", + paramUtxo: { txHash: "aa", outputIndex: 0 }, + }); + (completeMock as any).mockResolvedValue("tx-cbor"); + getTxBuilderMock.mockReturnValue({ complete: completeMock, meshTxBuilderBody: {} }); + (createPendingMultisigTransactionMock as any).mockResolvedValue({ id: "tx-1" }); +}); + +describe("proxySetup bot API", () => { + it("rejects invalid initialProxyLovelace before resolving UTxOs", async () => { + const req = { + method: "POST", + headers: makeBearerAuth(), + body: { + walletId: "wallet-1", + address: makeBotJwtPayload().address, + utxoRefs: [{ txHash: "aa", outputIndex: 0 }], + collateralRef: { txHash: "bb", outputIndex: 1 }, + initialProxyLovelace: "0", + }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + + await handler(req, res); + + expect(res.status).toHaveBeenCalledWith(400); + expect(resolveUtxoRefsFromChainMock).not.toHaveBeenCalled(); + }); + + it("passes valid initialProxyLovelace to the setup builder", async () => { + const req = { + method: "POST", + headers: makeBearerAuth(), + body: { + walletId: "wallet-1", + address: makeBotJwtPayload().address, + utxoRefs: [{ txHash: "aa", outputIndex: 0 }], + collateralRef: { txHash: "bb", outputIndex: 1 }, + initialProxyLovelace: "5000000", + }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + + await handler(req, res); + + expect(resolveCollateralRefFromChainMock).toHaveBeenCalledWith( + expect.objectContaining({ + collateralRef: { txHash: "bb", outputIndex: 1 }, + expectedAddress: makeBotJwtPayload().address, + }), + ); + expect(buildProxySetupTxMock).toHaveBeenCalledWith( + expect.objectContaining({ + initialProxyLovelace: "5000000", + multisigScriptCbor: "script", + }), + ); + expect(createPendingMultisigTransactionMock).toHaveBeenCalledWith( + expect.anything(), + expect.objectContaining({ + proposerAddress: makeBotJwtPayload().address, + initialSignedAddresses: [], + }), + ); + expect(res.status).toHaveBeenCalledWith(201); + }); +}); diff --git a/src/__tests__/proxySetupFinalization.test.ts b/src/__tests__/proxySetupFinalization.test.ts new file mode 100644 index 00000000..a5697392 --- /dev/null +++ b/src/__tests__/proxySetupFinalization.test.ts @@ -0,0 +1,205 @@ +import { describe, expect, it, jest } from "@jest/globals"; +import type { UTxO } from "@meshsdk/core"; + +jest.mock("@/utils/get-provider", () => ({ + __esModule: true, + getProvider: jest.fn(), +}), { virtual: true }); + +const setup = { + proxyAddress: "addr_test_proxy", + authTokenId: "policy", + paramUtxo: { txHash: "aa", outputIndex: 0 }, + description: "CI proxy setup", +}; +const validTxHash = "a".repeat(64); + +const mkUtxo = ( + address: string, + amount: UTxO["output"]["amount"], + txHash = "aa", + outputIndex = 0, +): UTxO => + ({ + input: { txHash, outputIndex }, + output: { address, amount }, + }) as UTxO; + +function createDb(existingProxy?: unknown) { + return { + proxy: { + findFirst: jest.fn(async () => existingProxy ?? null), + create: jest.fn(async ({ data }: { data: Record }) => ({ + id: "proxy-1", + ...data, + })), + update: jest.fn(async ({ data }: { data: Record }) => ({ + id: "proxy-1", + ...data, + })), + }, + }; +} + +function createProvider(args: { walletUtxos: UTxO[]; proxyUtxos: UTxO[] }) { + return { + fetchAddressUTxOs: jest.fn(async (address: string) => + address === setup.proxyAddress ? args.proxyUtxos : args.walletUtxos, + ), + get: jest.fn(async () => ({ + outputs: [ + { + address: setup.proxyAddress, + amount: [{ unit: "lovelace", quantity: "10000000" }], + }, + { + address: "addr_test_wallet", + amount: [ + { unit: "lovelace", quantity: "2000000" }, + { unit: setup.authTokenId, quantity: "1" }, + ], + }, + ], + })), + }; +} + +describe("finalizeConfirmedProxySetup", () => { + it("creates a proxy row when confirmed chain state is present", async () => { + const { finalizeConfirmedProxySetup } = await import("@/lib/server/proxySetupFinalization"); + const db = createDb(); + const provider = createProvider({ + walletUtxos: [ + mkUtxo("addr_test_wallet", [ + { unit: "lovelace", quantity: "2000000" }, + { unit: "policy", quantity: "1" }, + ]), + ], + proxyUtxos: [ + mkUtxo("addr_test_proxy", [{ unit: "lovelace", quantity: "1000000" }]), + ], + }); + + const result = await finalizeConfirmedProxySetup({ + db: db as never, + network: 0, + walletId: "wallet-1", + walletAddress: "addr_test_wallet", + txHash: validTxHash, + setup, + provider, + }); + + expect("error" in result).toBe(false); + expect(db.proxy.create).toHaveBeenCalledWith({ + data: { + walletId: "wallet-1", + proxyAddress: setup.proxyAddress, + authTokenId: setup.authTokenId, + paramUtxo: JSON.stringify(setup.paramUtxo), + description: setup.description, + isActive: true, + }, + }); + }); + + it("rejects confirmed setup when the auth token is missing at the wallet", async () => { + const { finalizeConfirmedProxySetup } = await import("@/lib/server/proxySetupFinalization"); + const result = await finalizeConfirmedProxySetup({ + db: createDb() as never, + network: 0, + walletId: "wallet-1", + walletAddress: "addr_test_wallet", + txHash: validTxHash, + setup, + provider: createProvider({ + walletUtxos: [ + mkUtxo("addr_test_wallet", [{ unit: "lovelace", quantity: "2000000" }]), + ], + proxyUtxos: [ + mkUtxo("addr_test_proxy", [{ unit: "lovelace", quantity: "1000000" }]), + ], + }), + }); + + expect("error" in result).toBe(true); + if ("error" in result) { + expect(result.error).toContain("auth token is not present"); + } + }); + + it("is idempotent when an active proxy row already exists", async () => { + const { finalizeConfirmedProxySetup } = await import("@/lib/server/proxySetupFinalization"); + const existingProxy = { id: "proxy-existing", isActive: true }; + const db = createDb(existingProxy); + const result = await finalizeConfirmedProxySetup({ + db: db as never, + network: 0, + walletId: "wallet-1", + walletAddress: "addr_test_wallet", + txHash: validTxHash, + setup, + provider: createProvider({ + walletUtxos: [ + mkUtxo("addr_test_wallet", [ + { unit: "lovelace", quantity: "2000000" }, + { unit: "policy", quantity: "1" }, + ]), + ], + proxyUtxos: [ + mkUtxo("addr_test_proxy", [{ unit: "lovelace", quantity: "1000000" }]), + ], + }), + }); + + expect(result).toBe(existingProxy); + expect(db.proxy.create).not.toHaveBeenCalled(); + }); + + it("rejects finalization when txHash does not match setup outputs", async () => { + const { finalizeConfirmedProxySetup } = await import("@/lib/server/proxySetupFinalization"); + const result = await finalizeConfirmedProxySetup({ + db: createDb() as never, + network: 0, + walletId: "wallet-1", + walletAddress: "addr_test_wallet", + txHash: validTxHash, + setup, + provider: { + fetchAddressUTxOs: jest.fn(async () => []), + get: jest.fn(async () => ({ + outputs: [{ address: "addr_test_other", amount: [{ unit: "lovelace", quantity: "1000000" }] }], + })), + }, + }); + + expect("error" in result).toBe(true); + if ("error" in result) { + expect(result.error).toContain("txHash does not match confirmed proxy setup outputs"); + } + }); + + it("rejects malformed txHash before provider lookup", async () => { + const { finalizeConfirmedProxySetup } = await import("@/lib/server/proxySetupFinalization"); + const provider = createProvider({ + walletUtxos: [], + proxyUtxos: [], + }); + + const result = await finalizeConfirmedProxySetup({ + db: createDb() as never, + network: 0, + walletId: "wallet-1", + walletAddress: "addr_test_wallet", + txHash: "transaction-row-id", + setup, + provider, + }); + + expect("error" in result).toBe(true); + if ("error" in result) { + expect(result.error).toContain("txHash must be a 64-character hex string"); + } + expect(provider.get).not.toHaveBeenCalled(); + }); +}); diff --git a/src/__tests__/proxyTxBuilders.test.ts b/src/__tests__/proxyTxBuilders.test.ts new file mode 100644 index 00000000..1cca2d4b --- /dev/null +++ b/src/__tests__/proxyTxBuilders.test.ts @@ -0,0 +1,273 @@ +import { describe, expect, it } from "@jest/globals"; +import type { UTxO } from "@meshsdk/core"; +import { + buildProxyDRepCertificateTx, + buildProxyCleanupTx, + buildProxyCleanupSweepTx, + buildProxyVoteTx, + buildProxySetupTx, + DEFAULT_PROXY_SETUP_LOVELACE, +} from "@/lib/server/proxyTxBuilders"; + +const mkUtxo = ( + address: string, + lovelace: string, + txHash = "a".repeat(64), + outputIndex = 0, +): UTxO => + ({ + input: { txHash, outputIndex }, + output: { + address, + amount: [{ unit: "lovelace", quantity: lovelace }], + }, + }) as UTxO; + +function createTxBuilderMock() { + const txOuts: Array<{ address: string; amount: UTxO["output"]["amount"] }> = []; + const mints: Array<{ quantity: string; policyId: string; tokenName: string }> = []; + const txIns: Array<{ txHash: string; outputIndex: number; address: string }> = []; + const builder = { + spendingPlutusScriptV3: () => builder, + txIn: ( + txHash: string, + outputIndex: number, + _amount: UTxO["output"]["amount"], + address: string, + ) => { + txIns.push({ txHash, outputIndex, address }); + return builder; + }, + txInScript: () => builder, + txInInlineDatumPresent: () => builder, + txInRedeemerValue: () => builder, + mintPlutusScriptV3: () => builder, + mint: (quantity: string, policyId: string, tokenName: string) => { + mints.push({ quantity, policyId, tokenName }); + return builder; + }, + mintingScript: () => builder, + mintRedeemerValue: () => builder, + txOut: (address: string, amount: UTxO["output"]["amount"]) => { + txOuts.push({ address, amount }); + return builder; + }, + txInCollateral: () => builder, + changeAddress: () => builder, + drepRegistrationCertificate: () => builder, + drepUpdateCertificate: () => builder, + drepDeregistrationCertificate: () => builder, + certificateScript: () => builder, + certificateRedeemerValue: () => builder, + votePlutusScriptV3: () => builder, + vote: () => builder, + voteScript: () => builder, + voteRedeemerValue: () => builder, + }; + + return { builder, txOuts, mints, txIns }; +} + +describe("buildProxySetupTx", () => { + it("defaults the proxy output to the minimal setup lovelace", () => { + const { builder, txOuts } = createTxBuilderMock(); + const setup = buildProxySetupTx({ + txBuilder: builder as never, + network: 0, + walletUtxos: [mkUtxo("addr_test_wallet", "20000000")], + walletAddress: "addr_test_wallet", + collateral: mkUtxo("addr_test_collateral", "5000000", "b".repeat(64), 1), + }); + + expect(txOuts).toContainEqual({ + address: setup.proxyAddress, + amount: [{ unit: "lovelace", quantity: DEFAULT_PROXY_SETUP_LOVELACE }], + }); + }); + + it("uses initialProxyLovelace for the proxy setup output", () => { + const { builder, txOuts } = createTxBuilderMock(); + const setup = buildProxySetupTx({ + txBuilder: builder as never, + network: 0, + walletUtxos: [mkUtxo("addr_test_wallet", "20000000")], + walletAddress: "addr_test_wallet", + collateral: mkUtxo("addr_test_collateral", "5000000", "b".repeat(64), 1), + initialProxyLovelace: "5000000", + }); + + expect(txOuts).toContainEqual({ + address: setup.proxyAddress, + amount: [{ unit: "lovelace", quantity: "5000000" }], + }); + }); + + it("burns all 10 auth tokens for proxy cleanup", () => { + const setupBuilder = createTxBuilderMock(); + const setup = buildProxySetupTx({ + txBuilder: setupBuilder.builder as never, + network: 0, + walletUtxos: [mkUtxo("addr_test_wallet", "20000000")], + walletAddress: "addr_test_wallet", + collateral: mkUtxo("addr_test_collateral", "5000000", "b".repeat(64), 1), + }); + + const cleanupBuilder = createTxBuilderMock(); + const result = buildProxyCleanupTx({ + txBuilder: cleanupBuilder.builder as never, + network: 0, + paramUtxo: setup.paramUtxo, + walletUtxos: [ + ({ + ...mkUtxo("addr_test_wallet", "3000000", "c".repeat(64), 2), + output: { + address: "addr_test_wallet", + amount: [ + { unit: "lovelace", quantity: "3000000" }, + { unit: setup.authTokenId, quantity: "10" }, + ], + }, + }) as UTxO, + ], + collateral: mkUtxo("addr_test_collateral", "5000000", "d".repeat(64), 3), + walletAddress: "addr_test_wallet", + authTokenId: setup.authTokenId, + }); + + expect(result).toEqual({ burnedAuthTokens: "10" }); + expect(cleanupBuilder.mints).toContainEqual({ + quantity: "-10", + policyId: setup.authTokenId, + tokenName: "", + }); + }); + + it("sweeps proxy UTxOs back to the wallet while preserving an auth token", () => { + const setupBuilder = createTxBuilderMock(); + const setup = buildProxySetupTx({ + txBuilder: setupBuilder.builder as never, + network: 0, + walletUtxos: [mkUtxo("addr_test_wallet", "20000000")], + walletAddress: "addr_test_wallet", + collateral: mkUtxo("addr_test_collateral", "5000000", "b".repeat(64), 1), + }); + + const sweepBuilder = createTxBuilderMock(); + const result = buildProxyCleanupSweepTx({ + txBuilder: sweepBuilder.builder as never, + network: 0, + paramUtxo: setup.paramUtxo, + proxyAddress: setup.proxyAddress, + proxyUtxos: [mkUtxo(setup.proxyAddress, "2500000", "c".repeat(64), 2)], + walletUtxos: [ + ({ + ...mkUtxo("addr_test_wallet", "3000000", "d".repeat(64), 3), + output: { + address: "addr_test_wallet", + amount: [ + { unit: "lovelace", quantity: "3000000" }, + { unit: setup.authTokenId, quantity: "1" }, + ], + }, + }) as UTxO, + ], + authTokenUtxo: ({ + ...mkUtxo("addr_test_wallet", "3000000", "d".repeat(64), 3), + output: { + address: "addr_test_wallet", + amount: [ + { unit: "lovelace", quantity: "3000000" }, + { unit: setup.authTokenId, quantity: "1" }, + ], + }, + }) as UTxO, + collateral: mkUtxo("addr_test_collateral", "5000000", "e".repeat(64), 4), + walletAddress: "addr_test_wallet", + }); + + expect(result).toEqual({ sweptProxyUtxos: "1", preservedAuthTokens: "1" }); + expect(sweepBuilder.txIns).toContainEqual({ + txHash: "c".repeat(64), + outputIndex: 2, + address: setup.proxyAddress, + }); + expect(sweepBuilder.txOuts).toContainEqual({ + address: "addr_test_wallet", + amount: expect.arrayContaining([ + { unit: "lovelace", quantity: "2500000" }, + { unit: setup.authTokenId, quantity: "1" }, + ]), + }); + expect(sweepBuilder.mints).toEqual([]); + }); +}); + +describe("proxy action funding validation", () => { + it("rejects proxy vote inputs that cannot preserve the auth token output", () => { + const setupBuilder = createTxBuilderMock(); + const setup = buildProxySetupTx({ + txBuilder: setupBuilder.builder as never, + network: 0, + walletUtxos: [mkUtxo("addr_test_wallet", "20000000")], + walletAddress: "addr_test_wallet", + collateral: mkUtxo("addr_test_collateral", "5000000", "b".repeat(64), 1), + }); + const authTokenUtxo = { + ...mkUtxo("addr_test_wallet", "1200000", "c".repeat(64), 2), + output: { + address: "addr_test_wallet", + amount: [ + { unit: "lovelace", quantity: "1200000" }, + { unit: setup.authTokenId, quantity: "1" }, + ], + }, + } as UTxO; + + expect(() => + buildProxyVoteTx({ + txBuilder: createTxBuilderMock().builder as never, + network: 0, + paramUtxo: setup.paramUtxo, + walletUtxos: [authTokenUtxo], + authTokenUtxo, + collateral: mkUtxo("addr_test_collateral", "5000000", "d".repeat(64), 3), + walletAddress: "addr_test_wallet", + votes: [{ proposalId: `${"e".repeat(64)}#0`, voteKind: "Abstain" }], + }), + ).toThrow(/proxy vote requires at least 2 ADA in selected wallet inputs, but only 1.2 ADA was selected/); + }); + + it("rejects proxy DRep deregister inputs that cannot preserve the auth token output", () => { + const setupBuilder = createTxBuilderMock(); + const setup = buildProxySetupTx({ + txBuilder: setupBuilder.builder as never, + network: 0, + walletUtxos: [mkUtxo("addr_test_wallet", "20000000")], + walletAddress: "addr_test_wallet", + collateral: mkUtxo("addr_test_collateral", "5000000", "b".repeat(64), 1), + }); + const authTokenUtxo = { + ...mkUtxo("addr_test_wallet", "1200000", "c".repeat(64), 2), + output: { + address: "addr_test_wallet", + amount: [ + { unit: "lovelace", quantity: "1200000" }, + { unit: setup.authTokenId, quantity: "1" }, + ], + }, + } as UTxO; + + expect(() => + buildProxyDRepCertificateTx({ + txBuilder: createTxBuilderMock().builder as never, + network: 0, + paramUtxo: setup.paramUtxo, + walletUtxos: [authTokenUtxo], + authTokenUtxo, + collateral: mkUtxo("addr_test_collateral", "5000000", "d".repeat(64), 3), + walletAddress: "addr_test_wallet", + action: "deregister", + }), + ).toThrow(/proxy DRep deregister requires at least 2 ADA in selected wallet inputs, but only 1.2 ADA was selected/); + }); +}); diff --git a/src/__tests__/proxyUtxos.test.ts b/src/__tests__/proxyUtxos.test.ts new file mode 100644 index 00000000..1d9aeb97 --- /dev/null +++ b/src/__tests__/proxyUtxos.test.ts @@ -0,0 +1,115 @@ +import { describe, expect, it } from "@jest/globals"; +import type { UTxO } from "@meshsdk/core"; +import { + requireAuthTokenUtxo, + resolveCollateralRefFromChain, + selectProxyUtxosForOutputs, +} from "@/lib/server/proxyUtxos"; + +const mkUtxo = ( + address: string, + amount: UTxO["output"]["amount"], + txHash = "aa", + outputIndex = 0, +): UTxO => + ({ + input: { txHash, outputIndex }, + output: { address, amount }, + }) as UTxO; + +describe("proxyUtxos", () => { + it("rejects collateral below 5 ADA", async () => { + const result = await resolveCollateralRefFromChain({ + network: 0, + collateralRef: { txHash: "aa", outputIndex: 0 }, + provider: { + fetchUTxOs: async () => [ + mkUtxo("addr_test", [{ unit: "lovelace", quantity: "4999999" }]), + ], + }, + }); + + expect("error" in result).toBe(true); + if ("error" in result) { + expect(result.error).toContain("at least 5 ADA"); + } + }); + + it("rejects collateral at an unexpected address", async () => { + const result = await resolveCollateralRefFromChain({ + network: 0, + collateralRef: { txHash: "aa", outputIndex: 0 }, + expectedAddress: "addr_test_signer", + provider: { + fetchUTxOs: async () => [ + mkUtxo("addr_test_wallet_script", [{ unit: "lovelace", quantity: "6000000" }]), + ], + }, + }); + + expect("error" in result).toBe(true); + if ("error" in result) { + expect(result.error).toContain("expected address"); + } + }); + + it("rejects collateral with native assets", async () => { + const result = await resolveCollateralRefFromChain({ + network: 0, + collateralRef: { txHash: "aa", outputIndex: 0 }, + expectedAddress: "addr_test_signer", + provider: { + fetchUTxOs: async () => [ + mkUtxo("addr_test_signer", [ + { unit: "lovelace", quantity: "6000000" }, + { unit: "policy.asset", quantity: "1" }, + ]), + ], + }, + }); + + expect("error" in result).toBe(true); + if ("error" in result) { + expect(result.error).toContain("ADA-only"); + } + }); + + it("finds the proxy auth-token UTxO", () => { + const result = requireAuthTokenUtxo( + [ + mkUtxo("addr_wallet", [{ unit: "lovelace", quantity: "3000000" }]), + mkUtxo( + "addr_wallet", + [ + { unit: "lovelace", quantity: "2000000" }, + { unit: "policyid", quantity: "1" }, + ], + "bb", + 1, + ), + ], + "policyid", + ); + + expect("error" in result).toBe(false); + if (!("error" in result)) { + expect(result.input.txHash).toBe("bb"); + } + }); + + it("selects proxy UTxOs that cover requested outputs plus fee buffer", () => { + const result = selectProxyUtxosForOutputs({ + proxyUtxos: [ + mkUtxo("addr_proxy", [{ unit: "lovelace", quantity: "1000000" }], "aa", 0), + mkUtxo("addr_proxy", [{ unit: "lovelace", quantity: "2500000" }], "bb", 1), + ], + outputs: [{ address: "addr_target", unit: "lovelace", amount: "1500000" }], + feeBufferLovelace: BigInt(500000), + }); + + expect(Array.isArray(result)).toBe(true); + if (Array.isArray(result)) { + expect(result.map((utxo) => utxo.input.txHash)).toEqual(["bb"]); + } + }); +}); diff --git a/src/__tests__/resolveDRepAnchorFromUrl.test.ts b/src/__tests__/resolveDRepAnchorFromUrl.test.ts new file mode 100644 index 00000000..8f38f629 --- /dev/null +++ b/src/__tests__/resolveDRepAnchorFromUrl.test.ts @@ -0,0 +1,42 @@ +import { afterEach, describe, expect, it, jest } from "@jest/globals"; +import { hashDrepAnchor } from "@meshsdk/core"; + +jest.mock("node:dns/promises", () => ({ + lookup: jest.fn(() => + Promise.resolve([{ address: "8.8.8.8", family: 4 }] as { address: string; family: number }[]), + ), +})); + +import { resolveDRepAnchorFromUrl } from "@/lib/server/resolveDRepAnchorFromUrl"; + +const originalFetch = global.fetch; + +afterEach(() => { + global.fetch = originalFetch; + jest.clearAllMocks(); +}); + +describe("resolveDRepAnchorFromUrl", () => { + it("computes hash from JSON body", async () => { + const doc = { "@context": "https://example.com", name: "Test" }; + const body = JSON.stringify(doc); + global.fetch = jest.fn(async () => { + return new Response(body, { status: 200, headers: { "Content-Type": "application/json" } }); + }) as unknown as typeof fetch; + + const r = await resolveDRepAnchorFromUrl("https://example.test/drep.json"); + expect(r.anchorUrl).toBe("https://example.test/drep.json"); + expect(r.anchorDataHash).toBe(hashDrepAnchor(doc as object)); + }); + + it("rejects when optional anchorDataHash mismatches", async () => { + const doc = { x: 1 }; + global.fetch = jest.fn(async () => { + return new Response(JSON.stringify(doc), { status: 200 }); + }) as unknown as typeof fetch; + + await expect( + resolveDRepAnchorFromUrl("https://example.test/a.json", "deadbeef"), + ).rejects.toThrow(/anchorDataHash does not match/); + }); +}); diff --git a/src/__tests__/resolveUtxoRefsFromChain.test.ts b/src/__tests__/resolveUtxoRefsFromChain.test.ts new file mode 100644 index 00000000..7c753c0b --- /dev/null +++ b/src/__tests__/resolveUtxoRefsFromChain.test.ts @@ -0,0 +1,53 @@ +import { describe, expect, it } from "@jest/globals"; +import type { UTxO } from "@meshsdk/core"; +import { resolveUtxoRefsFromChain } from "@/lib/server/resolveUtxoRefsFromChain"; + +const mkUtxo = (addr: string, txHash = "ab", idx = 0): UTxO => + ({ + input: { txHash, outputIndex: idx }, + output: { address: addr, amount: [{ unit: "lovelace", quantity: "3000000" }] }, + }) as UTxO; + +describe("resolveUtxoRefsFromChain", () => { + it("rejects empty utxoRefs", async () => { + const r = await resolveUtxoRefsFromChain({ + network: 0, + utxoRefs: [], + expectedSpendAddress: "addr1test", + provider: { fetchUTxOs: async () => [] }, + }); + expect("error" in r && r.status === 400).toBe(true); + }); + + it("rejects when output address does not match spend address", async () => { + const r = await resolveUtxoRefsFromChain({ + network: 0, + utxoRefs: [{ txHash: "aa", outputIndex: 0 }], + expectedSpendAddress: "addr_expected", + provider: { + fetchUTxOs: async () => [mkUtxo("addr_other", "aa", 0)], + }, + }); + expect("error" in r).toBe(true); + if ("error" in r) { + expect(r.error).toContain("multisig spend address"); + } + }); + + it("returns utxos when address matches", async () => { + const addr = "addr1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq"; + const r = await resolveUtxoRefsFromChain({ + network: 0, + utxoRefs: [{ txHash: "aa", outputIndex: 1 }], + expectedSpendAddress: addr, + provider: { + fetchUTxOs: async (hash, index) => { + expect(hash).toBe("aa"); + expect(index).toBe(1); + return [mkUtxo(addr, "aa", 1)]; + }, + }, + }); + expect("utxos" in r && r.utxos.length === 1).toBe(true); + }); +}); diff --git a/src/__tests__/signTransaction.bot.test.ts b/src/__tests__/signTransaction.bot.test.ts new file mode 100644 index 00000000..fa0ffe91 --- /dev/null +++ b/src/__tests__/signTransaction.bot.test.ts @@ -0,0 +1,210 @@ +import { beforeAll, beforeEach, describe, expect, it, jest } from "@jest/globals"; +import type { NextApiRequest, NextApiResponse } from "next"; +import { BOT_TEST_ADDRESS, createMockResponse, makeBearerAuth, makeBotJwtPayload } from "./apiTestUtils"; + +const addCorsHeadersMock = jest.fn<(res: NextApiResponse) => void>(); +const corsMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => Promise>(); +const applyRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => boolean>(); +const applyBotRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse, botId: string) => boolean>(); +const enforceBodySizeMock = jest.fn<(req: NextApiRequest, res: NextApiResponse, maxBytes: number) => boolean>(); +const verifyJwtMock: jest.Mock = jest.fn(); +const isBotJwtMock: jest.Mock = jest.fn(); +const getBotWalletAccessMock: jest.Mock = jest.fn(); +const resolvePaymentKeyHashMock: jest.Mock = jest.fn(); +const calculateTxHashMock: jest.Mock = jest.fn(); +const createVkeyWitnessFromHexMock: jest.Mock = jest.fn(); +const addUniqueVkeyWitnessToTxMock: jest.Mock = jest.fn(); +const shouldSubmitMultisigTxMock: jest.Mock = jest.fn(); +const submitTxWithScriptRecoveryMock: jest.Mock = jest.fn(); +const findWalletMock: jest.Mock = jest.fn(); +const findTransactionMock: jest.Mock = jest.fn(); +const updateManyTransactionMock: jest.Mock = jest.fn(); + +jest.mock("@/lib/cors", () => ({ + __esModule: true, + addCorsCacheBustingHeaders: addCorsHeadersMock, + cors: corsMock, +}), { virtual: true }); + +jest.mock("@/lib/security/requestGuards", () => ({ + __esModule: true, + applyRateLimit: applyRateLimitMock, + applyBotRateLimit: applyBotRateLimitMock, + enforceBodySize: enforceBodySizeMock, +}), { virtual: true }); + +jest.mock("@/lib/verifyJwt", () => ({ + __esModule: true, + verifyJwt: verifyJwtMock, + isBotJwt: isBotJwtMock, +}), { virtual: true }); + +jest.mock("@/lib/auth/botAccess", () => ({ + __esModule: true, + getBotWalletAccess: getBotWalletAccessMock, +}), { virtual: true }); + +jest.mock("@meshsdk/core", () => ({ + __esModule: true, + resolvePaymentKeyHash: resolvePaymentKeyHashMock, +}), { virtual: true }); + +jest.mock("@meshsdk/core-csl", () => ({ + __esModule: true, + calculateTxHash: calculateTxHashMock, +}), { virtual: true }); + +jest.mock("@/utils/txSignUtils", () => ({ + __esModule: true, + createVkeyWitnessFromHex: createVkeyWitnessFromHexMock, + addUniqueVkeyWitnessToTx: addUniqueVkeyWitnessToTxMock, + shouldSubmitMultisigTx: shouldSubmitMultisigTxMock, + submitTxWithScriptRecovery: submitTxWithScriptRecoveryMock, +}), { virtual: true }); + +jest.mock("@/utils/get-provider", () => ({ + __esModule: true, + getProvider: () => ({ submitTx: jest.fn() }), +}), { virtual: true }); + +jest.mock("@/utils/multisigSDK", () => ({ + __esModule: true, + addressToNetwork: () => 0, +}), { virtual: true }); + +jest.mock("@/server/db", () => ({ + __esModule: true, + db: { + wallet: { findUnique: findWalletMock }, + transaction: { + findUnique: findTransactionMock, + updateMany: updateManyTransactionMock, + }, + }, +}), { virtual: true }); + +jest.mock("@/server/api/root", () => ({ + __esModule: true, + createCaller: () => ({ wallet: { getWallet: jest.fn() } }), +}), { virtual: true }); + +jest.mock("@/lib/security/rateLimit", () => ({ + __esModule: true, + getClientIP: () => "127.0.0.1", +}), { virtual: true }); + +let handler: (req: NextApiRequest, res: NextApiResponse) => Promise; + +function makeWitnessRecord() { + return { + vkey: () => ({ + public_key: () => ({ + hash: () => ({ to_bytes: () => Buffer.from("a1b2c3d4", "hex") }), + to_bech32: () => "bech32", + }), + }), + signature: () => ({ to_bytes: () => Buffer.from("ff", "hex") }), + }; +} + +beforeAll(async () => { + ({ default: handler } = await import("../pages/api/v1/signTransaction")); +}); + +beforeEach(() => { + jest.clearAllMocks(); + applyRateLimitMock.mockReturnValue(true); + applyBotRateLimitMock.mockReturnValue(true); + enforceBodySizeMock.mockReturnValue(true); + corsMock.mockResolvedValue(undefined); + verifyJwtMock.mockReturnValue(makeBotJwtPayload()); + isBotJwtMock.mockReturnValue(true); + (getBotWalletAccessMock as any).mockResolvedValue({ allowed: true, role: "cosigner" }); + (findWalletMock as any).mockResolvedValue({ + id: "wallet-1", + signersAddresses: [BOT_TEST_ADDRESS], + numRequiredSigners: 2, + type: "atLeast", + }); + (findTransactionMock as any) + .mockResolvedValueOnce({ + id: "tx-1", + walletId: "wallet-1", + state: 0, + signedAddresses: [], + rejectedAddresses: [], + txCbor: "deadbeef", + txJson: "{}", + txHash: null, + }) + .mockResolvedValueOnce({ + id: "tx-1", + state: 0, + signedAddresses: [BOT_TEST_ADDRESS], + rejectedAddresses: [], + txCbor: "deadbeef-merged", + txJson: "{\"multisig\":{\"state\":0}}", + txHash: null, + }); + resolvePaymentKeyHashMock.mockReturnValue("a1b2c3d4"); + calculateTxHashMock.mockReturnValue("ff".repeat(32)); + createVkeyWitnessFromHexMock.mockReturnValue({ + publicKey: { verify: () => true }, + signature: {}, + witness: {}, + keyHashHex: "a1b2c3d4", + }); + addUniqueVkeyWitnessToTxMock.mockReturnValue({ + txHex: "deadbeef-merged", + witnessAdded: true, + vkeyWitnesses: { len: () => 1, get: () => makeWitnessRecord() }, + }); + shouldSubmitMultisigTxMock.mockReturnValue(false); + (submitTxWithScriptRecoveryMock as any).mockResolvedValue({ + txHash: "hash", + txHex: "deadbeef-merged", + }); + (updateManyTransactionMock as any).mockResolvedValue({ count: 1 }); +}); + +describe("signTransaction bot API", () => { + it("returns 403 when bot is not cosigner", async () => { + (getBotWalletAccessMock as any).mockResolvedValue({ allowed: true, role: "observer" }); + const req = { + method: "POST", + headers: makeBearerAuth(), + body: { + walletId: "wallet-1", + transactionId: "tx-1", + address: BOT_TEST_ADDRESS, + signature: "aa".repeat(64), + key: "bb".repeat(64), + }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + await handler(req, res); + expect(res.status).toHaveBeenCalledWith(403); + }); + + it("records bot witness on happy path", async () => { + const req = { + method: "POST", + headers: makeBearerAuth(), + body: { + walletId: "wallet-1", + transactionId: "tx-1", + address: BOT_TEST_ADDRESS, + signature: "aa".repeat(64), + key: "bb".repeat(64), + broadcast: false, + }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + await handler(req, res); + expect(updateManyTransactionMock).toHaveBeenCalled(); + expect(res.status).toHaveBeenCalledWith(200); + expect(res.json).toHaveBeenCalledWith(expect.objectContaining({ + submitted: false, + })); + }); +}); diff --git a/src/__tests__/signTransaction.test.ts b/src/__tests__/signTransaction.test.ts index f4b2e873..4c3fe3f5 100644 --- a/src/__tests__/signTransaction.test.ts +++ b/src/__tests__/signTransaction.test.ts @@ -455,6 +455,9 @@ beforeEach(() => { const existingWitnessCount = mergedWitnesses.len(); for (let i = 0; i < existingWitnessCount; i++) { const existingWitness = mergedWitnesses.get(i); + if (!existingWitness) { + continue; + } const existingKeyHash = Buffer.from( existingWitness.vkey().public_key().hash().to_bytes(), ).toString('hex').toLowerCase(); diff --git a/src/__tests__/submitDatum.bot.test.ts b/src/__tests__/submitDatum.bot.test.ts new file mode 100644 index 00000000..601905a0 --- /dev/null +++ b/src/__tests__/submitDatum.bot.test.ts @@ -0,0 +1,111 @@ +import { beforeAll, beforeEach, describe, expect, it, jest } from "@jest/globals"; +import type { NextApiRequest, NextApiResponse } from "next"; +import { BOT_TEST_ADDRESS, createMockResponse, makeBearerAuth, makeBotJwtPayload } from "./apiTestUtils"; + +const addCorsHeadersMock = jest.fn<(res: NextApiResponse) => void>(); +const corsMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => Promise>(); +const applyRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => boolean>(); +const applyBotRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse, botId: string) => boolean>(); +const enforceBodySizeMock = jest.fn<(req: NextApiRequest, res: NextApiResponse, maxBytes: number) => boolean>(); +const verifyJwtMock: jest.Mock = jest.fn(); +const isBotJwtMock: jest.Mock = jest.fn(); +const assertBotWalletAccessMock: jest.Mock = jest.fn(); +const checkSignatureMock: jest.Mock = jest.fn(); +const createSignableMock: jest.Mock = jest.fn(); + +jest.mock("@/lib/cors", () => ({ + __esModule: true, + addCorsCacheBustingHeaders: addCorsHeadersMock, + cors: corsMock, +}), { virtual: true }); + +jest.mock("@/lib/security/requestGuards", () => ({ + __esModule: true, + applyRateLimit: applyRateLimitMock, + applyBotRateLimit: applyBotRateLimitMock, + enforceBodySize: enforceBodySizeMock, +}), { virtual: true }); + +jest.mock("@/lib/verifyJwt", () => ({ + __esModule: true, + verifyJwt: verifyJwtMock, + isBotJwt: isBotJwtMock, +}), { virtual: true }); + +jest.mock("@/lib/auth/botAccess", () => ({ + __esModule: true, + assertBotWalletAccess: assertBotWalletAccessMock, +}), { virtual: true }); + +jest.mock("@meshsdk/core-cst", () => ({ + __esModule: true, + checkSignature: checkSignatureMock, +}), { virtual: true }); + +jest.mock("@/server/db", () => ({ + __esModule: true, + db: { + signable: { create: createSignableMock }, + wallet: { findUnique: jest.fn() }, + }, +}), { virtual: true }); + +let handler: (req: NextApiRequest, res: NextApiResponse) => Promise; + +beforeAll(async () => { + ({ default: handler } = await import("../pages/api/v1/submitDatum")); +}); + +beforeEach(() => { + jest.clearAllMocks(); + applyRateLimitMock.mockReturnValue(true); + applyBotRateLimitMock.mockReturnValue(true); + enforceBodySizeMock.mockReturnValue(true); + corsMock.mockResolvedValue(undefined); + verifyJwtMock.mockReturnValue(makeBotJwtPayload()); + isBotJwtMock.mockReturnValue(true); + (assertBotWalletAccessMock as any).mockResolvedValue({ wallet: { id: "wallet-1" } }); + (checkSignatureMock as any).mockResolvedValue(true); + (createSignableMock as any).mockResolvedValue({ id: "sig-1" }); +}); + +describe("submitDatum bot API", () => { + it("returns 401 for invalid datum signature", async () => { + (checkSignatureMock as any).mockResolvedValue(false); + const req = { + method: "POST", + headers: makeBearerAuth(), + body: { + walletId: "wallet-1", + signature: "sig", + key: "key", + address: BOT_TEST_ADDRESS, + datum: "payload", + callbackUrl: "https://example.com/callback", + }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + await handler(req, res); + expect(res.status).toHaveBeenCalledWith(401); + }); + + it("creates signable datum for authorized bot", async () => { + const req = { + method: "POST", + headers: makeBearerAuth(), + body: { + walletId: "wallet-1", + signature: "sig", + key: "key", + address: BOT_TEST_ADDRESS, + datum: "payload", + callbackUrl: "https://example.com/callback", + }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + await handler(req, res); + expect(createSignableMock).toHaveBeenCalled(); + expect(res.status).toHaveBeenCalledWith(201); + expect(res.json).toHaveBeenCalledWith({ id: "sig-1" }); + }); +}); diff --git a/src/__tests__/txScriptRecovery.test.ts b/src/__tests__/txScriptRecovery.test.ts new file mode 100644 index 00000000..e90fb0ac --- /dev/null +++ b/src/__tests__/txScriptRecovery.test.ts @@ -0,0 +1,36 @@ +import { describe, expect, it } from "@jest/globals"; +import type { MultisigSubmissionWallet } from "@/types/txSign"; +import { shouldSubmitMultisigTx } from "@/utils/txScriptRecovery"; + +const signersAddresses = ["addr_test_1", "addr_test_2", "addr_test_3"]; + +function wallet(overrides: Partial): MultisigSubmissionWallet { + return { + type: "all", + numRequiredSigners: null, + signersAddresses, + ...overrides, + }; +} + +describe("shouldSubmitMultisigTx", () => { + it("honors an explicit threshold stored on all wallets", () => { + const appWallet = wallet({ type: "all", numRequiredSigners: 2 }); + + expect(shouldSubmitMultisigTx(appWallet, 1)).toBe(false); + expect(shouldSubmitMultisigTx(appWallet, 2)).toBe(true); + }); + + it("keeps flat all wallets as all-of-N when no threshold is stored", () => { + const appWallet = wallet({ type: "all", numRequiredSigners: null }); + + expect(shouldSubmitMultisigTx(appWallet, 2)).toBe(false); + expect(shouldSubmitMultisigTx(appWallet, 3)).toBe(true); + }); + + it("keeps existing any and atLeast behavior", () => { + expect(shouldSubmitMultisigTx(wallet({ type: "any", numRequiredSigners: null }), 1)).toBe(true); + expect(shouldSubmitMultisigTx(wallet({ type: "atLeast", numRequiredSigners: 2 }), 1)).toBe(false); + expect(shouldSubmitMultisigTx(wallet({ type: "atLeast", numRequiredSigners: 2 }), 2)).toBe(true); + }); +}); diff --git a/src/__tests__/walletIds.bot.test.ts b/src/__tests__/walletIds.bot.test.ts new file mode 100644 index 00000000..bc035357 --- /dev/null +++ b/src/__tests__/walletIds.bot.test.ts @@ -0,0 +1,93 @@ +import { beforeAll, beforeEach, describe, expect, it, jest } from "@jest/globals"; +import type { NextApiRequest, NextApiResponse } from "next"; +import { BOT_TEST_ADDRESS, createMockResponse, makeBearerAuth, makeBotJwtPayload } from "./apiTestUtils"; + +const addCorsHeadersMock = jest.fn<(res: NextApiResponse) => void>(); +const corsMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => Promise>(); +const applyRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse) => boolean>(); +const applyBotRateLimitMock = jest.fn<(req: NextApiRequest, res: NextApiResponse, botId: string) => boolean>(); +const verifyJwtMock: jest.Mock = jest.fn(); +const isBotJwtMock: jest.Mock = jest.fn(); +const createCallerMock: jest.Mock = jest.fn(); +const getWalletIdsForBotMock: jest.Mock = jest.fn(); + +jest.mock("@/lib/cors", () => ({ + __esModule: true, + addCorsCacheBustingHeaders: addCorsHeadersMock, + cors: corsMock, +}), { virtual: true }); + +jest.mock("@/lib/security/requestGuards", () => ({ + __esModule: true, + applyRateLimit: applyRateLimitMock, + applyBotRateLimit: applyBotRateLimitMock, +}), { virtual: true }); + +jest.mock("@/lib/verifyJwt", () => ({ + __esModule: true, + verifyJwt: verifyJwtMock, + isBotJwt: isBotJwtMock, +}), { virtual: true }); + +jest.mock("@/server/api/root", () => ({ + __esModule: true, + createCaller: createCallerMock, +}), { virtual: true }); + +jest.mock("@/server/db", () => ({ + __esModule: true, + db: {}, +}), { virtual: true }); + +jest.mock("@/lib/auth/botAccess", () => ({ + __esModule: true, + getWalletIdsForBot: getWalletIdsForBotMock, +}), { virtual: true }); + +jest.mock("@/lib/security/rateLimit", () => ({ + __esModule: true, + getClientIP: () => "127.0.0.1", +}), { virtual: true }); + +let handler: (req: NextApiRequest, res: NextApiResponse) => Promise; + +beforeAll(async () => { + ({ default: handler } = await import("../pages/api/v1/walletIds")); +}); + +beforeEach(() => { + jest.clearAllMocks(); + applyRateLimitMock.mockReturnValue(true); + applyBotRateLimitMock.mockReturnValue(true); + corsMock.mockResolvedValue(undefined); + verifyJwtMock.mockReturnValue(makeBotJwtPayload()); + isBotJwtMock.mockReturnValue(true); + createCallerMock.mockReturnValue({ wallet: { getUserWallets: jest.fn() } }); + (getWalletIdsForBotMock as any).mockResolvedValue([{ walletId: "w1", walletName: "Wallet 1" }]); +}); + +describe("walletIds bot API", () => { + it("returns 403 for address mismatch", async () => { + const req = { + method: "GET", + headers: makeBearerAuth(), + query: { address: "addr_test1wrong" }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + await handler(req, res); + expect(res.status).toHaveBeenCalledWith(403); + }); + + it("returns wallet ids for authorized bot", async () => { + const req = { + method: "GET", + headers: makeBearerAuth(), + query: { address: BOT_TEST_ADDRESS }, + } as unknown as NextApiRequest; + const res = createMockResponse(); + await handler(req, res); + expect(getWalletIdsForBotMock).toHaveBeenCalled(); + expect(res.status).toHaveBeenCalledWith(200); + expect(res.json).toHaveBeenCalledWith([{ walletId: "w1", walletName: "Wallet 1" }]); + }); +}); diff --git a/src/components/pages/wallet/staking/StakingActions/stake.tsx b/src/components/pages/wallet/staking/StakingActions/stake.tsx index 2c77d877..7c4850be 100644 --- a/src/components/pages/wallet/staking/StakingActions/stake.tsx +++ b/src/components/pages/wallet/staking/StakingActions/stake.tsx @@ -9,67 +9,9 @@ import { ToastAction } from "@radix-ui/react-toast"; import { toast } from "@/hooks/use-toast"; import { getTxBuilder } from "@/utils/get-tx-builder"; import useTransaction from "@/hooks/useTransaction"; +import { buildStakingActionConfigs, type StakingActionUi } from "@/utils/stakingCertificates"; -type StakingAction = "register" | "deregister" | "delegate" | "withdrawal" | "registerAndDelegate"; - -type StakingActionConfig = { - execute: () => void; - description: string; - successTitle: string; - successMessage: string; -}; - -function buildStakingActionConfigs({ - txBuilder, - rewardAddress, - stakingScript, - poolHex, - rewards, -}: { - txBuilder: ReturnType; - rewardAddress: string; - stakingScript: string; - poolHex: string; - rewards: string; -}): Record { - return { - register: { - execute: () => txBuilder.registerStakeCertificate(rewardAddress).certificateScript(stakingScript), - description: "Register stake.", - successTitle: "Stake Registered", - successMessage: "Your stake address has been registered.", - }, - deregister: { - execute: () => txBuilder.deregisterStakeCertificate(rewardAddress).certificateScript(stakingScript), - description: "Deregister stake.", - successTitle: "Stake Deregistered", - successMessage: "Your stake address has been deregistered.", - }, - delegate: { - execute: () => txBuilder.delegateStakeCertificate(rewardAddress, poolHex).certificateScript(stakingScript), - description: "Delegate stake.", - successTitle: "Stake Delegated", - successMessage: "Your stake has been delegated.", - }, - withdrawal: { - execute: () => txBuilder.withdrawal(rewardAddress, rewards), - description: "Withdraw rewards.", - successTitle: "Rewards Withdrawn", - successMessage: "Your staking rewards have been withdrawn.", - }, - registerAndDelegate: { - execute: () => { - txBuilder - .registerStakeCertificate(rewardAddress) - .certificateScript(stakingScript); - txBuilder.delegateStakeCertificate(rewardAddress, poolHex).certificateScript(stakingScript); - }, - description: "Register & delegate stake.", - successTitle: "Stake Registered & Delegated", - successMessage: "Your stake address has been registered and delegated.", - }, - }; -} +type StakingAction = StakingActionUi; export default function StakeButton({ stakingInfo, appWallet, diff --git a/src/components/pages/wallet/transactions/transaction-card.tsx b/src/components/pages/wallet/transactions/transaction-card.tsx index 051a0d2f..326059c9 100644 --- a/src/components/pages/wallet/transactions/transaction-card.tsx +++ b/src/components/pages/wallet/transactions/transaction-card.tsx @@ -73,7 +73,15 @@ export default function TransactionCard({ const { activeWallet, isWalletReady, isAnyWalletConnected } = useActiveWallet(); const { appWallet } = useAppWallet(); const userAddress = useUserStore((state) => state.userAddress); - const txJson = JSON.parse(transaction.txJson); + // Parse defensively — a malformed txJson (e.g. from a row that slipped past + // earlier API validation) must not crash the whole Transactions page (#211). + const txJson = useMemo(() => { + try { + return JSON.parse(transaction.txJson); + } catch { + return null; + } + }, [transaction.txJson]); const [loading, setLoading] = useState(false); const [isSignersOpen, setIsSignersOpen] = useState(false); const { toast } = useToast(); @@ -371,6 +379,9 @@ export default function TransactionCard({ // }, []); const outputList = useMemo((): React.ReactElement => { + if (!txJson || !Array.isArray(txJson.outputs)) { + return <>; + } return ( <> {txJson.outputs.map((output: any, i: number) => { @@ -477,7 +488,51 @@ export default function TransactionCard({ } if (!appWallet) return <>; - + + // Unreadable transaction — txJson failed to parse. Render a degraded card so + // the Transactions page still loads and the user can free locked UTxOs (#211). + if (!txJson) { + return ( + + + + Unreadable transaction + + + {dateToFormatted(transaction.createdAt)} + + + +

+ This transaction's metadata could not be parsed and cannot be + signed. Rejecting it here will free any UTxOs it was holding. +

+
+
+ ID: {transaction.id} +
+ {transaction.txHash && ( +
+ Tx hash:{" "} + {transaction.txHash} +
+ )} +
+
+ + + +
+ ); + } + // Calculate signing threshold info const signersCount = appWallet.signersAddresses.length; const requiredSigners = appWallet.numRequiredSigners ?? signersCount; diff --git a/src/lib/server/createPendingMultisigTransaction.ts b/src/lib/server/createPendingMultisigTransaction.ts new file mode 100644 index 00000000..7970fb21 --- /dev/null +++ b/src/lib/server/createPendingMultisigTransaction.ts @@ -0,0 +1,68 @@ +import type { PrismaClient } from "@prisma/client"; +import { getProvider } from "@/utils/get-provider"; + +export type WalletSubmitShape = { + numRequiredSigners: number | null; + type: string; +}; + +function getRequiredSignerCount(wallet: WalletSubmitShape): number { + if (wallet.type === "any") return 1; + if (wallet.type === "atLeast" || typeof wallet.numRequiredSigners === "number") { + return wallet.numRequiredSigners ?? 1; + } + return Number.POSITIVE_INFINITY; +} + +/** + * Same broadcast vs pending rules as addTransaction: single signer or "any" → submit; else persist pending. + */ +export async function createPendingMultisigTransaction( + db: PrismaClient, + args: { + walletId: string; + wallet: WalletSubmitShape; + proposerAddress: string; + txCbor: string; + txJson: unknown; + description: string; + network: number; + initialSignedAddresses?: string[]; + }, +) { + const { + walletId, + wallet, + proposerAddress, + txCbor, + txJson, + description, + network, + initialSignedAddresses = [proposerAddress], + } = args; + const reqSigners = wallet.numRequiredSigners; + const wtype = wallet.type; + + const txJsonStr = + typeof txJson === "object" && txJson !== null + ? JSON.stringify(txJson) + : String(txJson); + + const requiredSigners = getRequiredSignerCount(wallet); + if ((reqSigners === 1 || wtype === "any") && initialSignedAddresses.length >= requiredSigners) { + const blockchainProvider = getProvider(network); + return await blockchainProvider.submitTx(txCbor); + } + + return await db.transaction.create({ + data: { + walletId, + txJson: txJsonStr, + txCbor, + signedAddresses: initialSignedAddresses, + rejectedAddresses: [], + description, + state: 0, + }, + }); +} diff --git a/src/lib/server/normalizePoolId.ts b/src/lib/server/normalizePoolId.ts new file mode 100644 index 00000000..1aca887e --- /dev/null +++ b/src/lib/server/normalizePoolId.ts @@ -0,0 +1,22 @@ +import { deserializePoolId, resolvePoolId } from "@meshsdk/core"; + +/** + * Accepts bech32 `pool1...` or 56-char hex pool id; returns Mesh `delegateStakeCertificate` pool id string. + */ +export function normalizePoolIdForDelegation(poolIdRaw: string): string { + const poolId = poolIdRaw.trim(); + if (!poolId) { + throw new Error("poolId is required"); + } + if (poolId.startsWith("pool")) { + const hash = deserializePoolId(poolId); + return resolvePoolId(hash); + } + const hex = /^[0-9a-fA-F]{56}$/; + if (hex.test(poolId)) { + return resolvePoolId(poolId.toLowerCase()); + } + throw new Error( + "Invalid poolId: expected bech32 pool1... or 56-character hex pool id", + ); +} diff --git a/src/lib/server/proxyAccess.ts b/src/lib/server/proxyAccess.ts new file mode 100644 index 00000000..b1402bd9 --- /dev/null +++ b/src/lib/server/proxyAccess.ts @@ -0,0 +1,63 @@ +import type { PrismaClient, Proxy, Wallet } from "@prisma/client"; +import type { JwtPayload } from "@/lib/verifyJwt"; +import { isBotJwt } from "@/lib/verifyJwt"; +import { getBotWalletAccess } from "@/lib/auth/botAccess"; + +export async function authorizeProxyReadForV1(args: { + db: PrismaClient; + payload: JwtPayload; + walletId: string; + address: string; +}): Promise<{ wallet: Wallet }> { + const { db, payload, walletId, address } = args; + if (payload.address !== address) { + throw Object.assign(new Error("Address mismatch"), { + code: "ADDRESS_MISMATCH", + }); + } + + const wallet = await db.wallet.findUnique({ where: { id: walletId } }); + if (!wallet) { + throw Object.assign(new Error("Wallet not found"), { code: "NOT_FOUND" }); + } + + if (isBotJwt(payload)) { + const access = await getBotWalletAccess(db, walletId, payload.botId); + if (!access.allowed) { + throw Object.assign(new Error("Not authorized for this wallet"), { + code: "FORBIDDEN", + }); + } + return { wallet }; + } + + if (!wallet.signersAddresses.includes(address)) { + throw Object.assign(new Error("Not authorized for this wallet"), { + code: "FORBIDDEN", + }); + } + + return { wallet }; +} + +export async function loadActiveProxyForWallet(args: { + db: PrismaClient; + walletId: string; + proxyId: string; +}): Promise { + const proxy = await args.db.proxy.findFirst({ + where: { + id: args.proxyId, + walletId: args.walletId, + isActive: true, + }, + }); + + if (!proxy) { + throw Object.assign(new Error("Active proxy not found for this wallet"), { + code: "NOT_FOUND", + }); + } + + return proxy; +} diff --git a/src/lib/server/proxyCleanupFinalization.ts b/src/lib/server/proxyCleanupFinalization.ts new file mode 100644 index 00000000..5449fe74 --- /dev/null +++ b/src/lib/server/proxyCleanupFinalization.ts @@ -0,0 +1,132 @@ +import type { PrismaClient, Proxy } from "@prisma/client"; +import type { UTxO } from "@meshsdk/core"; +import { getProvider } from "@/utils/get-provider"; +import { hasAsset } from "@/lib/server/proxyUtxos"; + +type AddressUtxoFetcher = { + fetchAddressUTxOs: (address: string) => Promise; + get?: (path: string) => Promise; +}; + +type TxUtxoEntry = { + address?: string; + amount?: { unit?: string; quantity?: string }[]; +}; + +type TxUtxosResponse = { + inputs?: TxUtxoEntry[]; + outputs?: TxUtxoEntry[]; +}; + +function txEntryHasAsset(entry: TxUtxoEntry, unit: string): boolean { + return entry.amount?.some((asset) => asset.unit === unit && BigInt(asset.quantity ?? "0") > 0n) ?? false; +} + +async function validateCleanupTxHash(args: { + provider: AddressUtxoFetcher; + txHash: string; + proxy: Proxy; +}): Promise<{ error: string; status: number } | null> { + if (typeof args.provider.get !== "function") { + return { + error: "Unable to validate confirmed proxy cleanup txHash: provider does not support transaction lookup", + status: 400, + }; + } + + let txUtxos: TxUtxosResponse; + try { + txUtxos = (await args.provider.get(`/txs/${args.txHash}/utxos`)) as TxUtxosResponse; + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + return { + error: `Unable to validate confirmed proxy cleanup txHash: ${message}`, + status: 400, + }; + } + + const inputs = Array.isArray(txUtxos.inputs) ? txUtxos.inputs : []; + const outputs = Array.isArray(txUtxos.outputs) ? txUtxos.outputs : []; + const spentAuthToken = inputs.some((input) => txEntryHasAsset(input, args.proxy.authTokenId)); + const recreatedAuthToken = outputs.some((output) => txEntryHasAsset(output, args.proxy.authTokenId)); + const recreatedProxyOutput = outputs.some((output) => output.address === args.proxy.proxyAddress); + if (!spentAuthToken || recreatedAuthToken || recreatedProxyOutput) { + return { + error: "txHash does not match confirmed proxy cleanup burn outputs", + status: 400, + }; + } + + return null; +} + +export async function finalizeConfirmedProxyCleanup(args: { + db: PrismaClient; + network: number; + proxy: Proxy; + walletAddress: string; + txHash: string; + deactivateProxy?: boolean; + provider?: AddressUtxoFetcher; +}): Promise<{ proxy: Proxy } | { error: string; status: number }> { + const provider = args.provider ?? getProvider(args.network); + const txHash = args.txHash.trim(); + if (!txHash) { + return { + error: "txHash is required", + status: 400, + }; + } + + const txHashValidation = await validateCleanupTxHash({ + provider, + txHash, + proxy: args.proxy, + }); + if (txHashValidation) { + return txHashValidation; + } + + let walletUtxos: UTxO[]; + let proxyUtxos: UTxO[]; + try { + [walletUtxos, proxyUtxos] = await Promise.all([ + provider.fetchAddressUTxOs(args.walletAddress), + provider.fetchAddressUTxOs(args.proxy.proxyAddress), + ]); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + return { + error: `Unable to validate confirmed proxy cleanup from chain: ${message}`, + status: 400, + }; + } + + const authTokenStillPresent = [...walletUtxos, ...proxyUtxos].some((utxo) => + hasAsset(utxo, args.proxy.authTokenId), + ); + if (authTokenStillPresent) { + return { + error: "Confirmed cleanup not found: auth tokens are still visible on-chain", + status: 400, + }; + } + + if (proxyUtxos.length > 0) { + return { + error: "Confirmed cleanup not found: proxy address still has on-chain UTxOs", + status: 400, + }; + } + + if (args.deactivateProxy === false) { + return { proxy: args.proxy }; + } + + const proxy = await args.db.proxy.update({ + where: { id: args.proxy.id }, + data: { isActive: false }, + }); + + return { proxy }; +} diff --git a/src/lib/server/proxySetupFinalization.ts b/src/lib/server/proxySetupFinalization.ts new file mode 100644 index 00000000..94376767 --- /dev/null +++ b/src/lib/server/proxySetupFinalization.ts @@ -0,0 +1,208 @@ +import type { PrismaClient } from "@prisma/client"; +import type { UTxO } from "@meshsdk/core"; +import { getProvider } from "@/utils/get-provider"; +import { hasAsset, type UtxoRef } from "@/lib/server/proxyUtxos"; + +export type ProxySetupMetadata = { + proxyAddress: string; + authTokenId: string; + paramUtxo: UtxoRef; + description?: string; +}; + +type AddressUtxoFetcher = { + fetchAddressUTxOs: (address: string) => Promise; + get?: (path: string) => Promise; +}; + +type TxUtxoEntry = { + address?: string; + amount?: { unit?: string; quantity?: string }[]; +}; + +type TxUtxosResponse = { + outputs?: TxUtxoEntry[]; +}; + +function txEntryHasAsset(entry: TxUtxoEntry, unit: string): boolean { + return entry.amount?.some((asset) => asset.unit === unit && BigInt(asset.quantity ?? "0") > 0n) ?? false; +} + +async function validateSetupTxHash(args: { + provider: AddressUtxoFetcher; + txHash: string; + walletAddress: string; + setup: ProxySetupMetadata; +}): Promise<{ error: string; status: number } | null> { + if (typeof args.provider.get !== "function") { + return { + error: "Unable to validate confirmed proxy setup txHash: provider does not support transaction lookup", + status: 400, + }; + } + + let txUtxos: TxUtxosResponse; + try { + txUtxos = (await args.provider.get(`/txs/${args.txHash}/utxos`)) as TxUtxosResponse; + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + return { + error: `Unable to validate confirmed proxy setup txHash: ${message}`, + status: 400, + }; + } + + const outputs = Array.isArray(txUtxos.outputs) ? txUtxos.outputs : []; + const createdProxyOutput = outputs.some((output) => output.address === args.setup.proxyAddress); + const returnedAuthTokenToWallet = outputs.some( + (output) => output.address === args.walletAddress && txEntryHasAsset(output, args.setup.authTokenId), + ); + if (!createdProxyOutput || !returnedAuthTokenToWallet) { + return { + error: "txHash does not match confirmed proxy setup outputs", + status: 400, + }; + } + + return null; +} + +function normalizeSetupMetadata( + metadata: Partial, +): ProxySetupMetadata | { error: string; status: number } { + const proxyAddress = + typeof metadata.proxyAddress === "string" ? metadata.proxyAddress.trim() : ""; + const authTokenId = + typeof metadata.authTokenId === "string" ? metadata.authTokenId.trim() : ""; + const paramUtxo = metadata.paramUtxo; + const txHash = + typeof paramUtxo?.txHash === "string" ? paramUtxo.txHash.trim() : ""; + const outputIndex = + typeof paramUtxo?.outputIndex === "number" && + Number.isInteger(paramUtxo.outputIndex) + ? paramUtxo.outputIndex + : -1; + + if (!proxyAddress || !authTokenId || !txHash || outputIndex < 0) { + return { + error: "proxyAddress, authTokenId, and paramUtxo are required", + status: 400, + }; + } + + return { + proxyAddress, + authTokenId, + paramUtxo: { txHash, outputIndex }, + description: + typeof metadata.description === "string" && metadata.description.trim() + ? metadata.description.trim() + : undefined, + }; +} + +function isTxHashHex(value: string): boolean { + return /^[0-9a-fA-F]{64}$/.test(value); +} + +export async function finalizeConfirmedProxySetup(args: { + db: PrismaClient; + network: number; + walletId: string; + walletAddress: string; + txHash: string; + setup: Partial; + provider?: AddressUtxoFetcher; +}) { + const setup = normalizeSetupMetadata(args.setup); + if ("error" in setup) { + return setup; + } + + const provider = args.provider ?? getProvider(args.network); + const txHash = args.txHash.trim(); + if (!txHash) { + return { + error: "txHash is required", + status: 400, + }; + } + if (!isTxHashHex(txHash)) { + return { + error: "txHash must be a 64-character hex string", + status: 400, + }; + } + + const txHashValidation = await validateSetupTxHash({ + provider, + txHash, + walletAddress: args.walletAddress, + setup, + }); + if (txHashValidation) { + return txHashValidation; + } + + let walletUtxos: UTxO[]; + let proxyUtxos: UTxO[]; + try { + [walletUtxos, proxyUtxos] = await Promise.all([ + provider.fetchAddressUTxOs(args.walletAddress), + provider.fetchAddressUTxOs(setup.proxyAddress), + ]); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + return { + error: `Unable to validate confirmed proxy setup from chain: ${message}`, + status: 400, + }; + } + + const authTokenAtWallet = walletUtxos.some((utxo) => + hasAsset(utxo, setup.authTokenId), + ); + if (!authTokenAtWallet) { + return { + error: + "Confirmed setup not found: auth token is not present at the multisig wallet address", + status: 400, + }; + } + + if (proxyUtxos.length === 0) { + return { + error: "Confirmed setup not found: proxy address has no on-chain UTxOs", + status: 400, + }; + } + + const existing = await args.db.proxy.findFirst({ + where: { + walletId: args.walletId, + proxyAddress: setup.proxyAddress, + authTokenId: setup.authTokenId, + }, + }); + + if (existing) { + if (!existing.isActive) { + return args.db.proxy.update({ + where: { id: existing.id }, + data: { isActive: true }, + }); + } + return existing; + } + + return args.db.proxy.create({ + data: { + walletId: args.walletId, + proxyAddress: setup.proxyAddress, + authTokenId: setup.authTokenId, + paramUtxo: JSON.stringify(setup.paramUtxo), + description: setup.description, + isActive: true, + }, + }); +} diff --git a/src/lib/server/proxyTxBuilders.ts b/src/lib/server/proxyTxBuilders.ts new file mode 100644 index 00000000..baab5413 --- /dev/null +++ b/src/lib/server/proxyTxBuilders.ts @@ -0,0 +1,494 @@ +import { mConStr0, mConStr1, mOutputReference } from "@meshsdk/common"; +import { + applyParamsToScript, + hashDrepAnchor, + resolveScriptHash, + resolveScriptHashDRepId, + serializePlutusScript, +} from "@meshsdk/core"; +import type { MeshTxBuilder, UTxO } from "@meshsdk/core"; +import blueprint from "@/components/multisig/proxy/aiken-workspace/plutus.json"; +import { parseProposalId } from "@/lib/governance"; +import { getLovelace, sameUtxoRef } from "@/lib/server/proxyUtxos"; + +export const DEFAULT_PROXY_SETUP_LOVELACE = "1000000"; +const PROXY_ACTION_MIN_LOVELACE = 2_000_000n; + +const DEFAULT_PROXY_STAKE_CREDENTIAL = + "c08f0294ead5ab7ae0ce5471dd487007919297ba95230af22f25e575"; + +export type ProxySetupInfo = { + paramUtxo: UTxO["input"]; + authTokenId: string; + proxyAddress: string; +}; + +export type ProxyVoteInput = { + proposalId: string; + voteKind: "Yes" | "No" | "Abstain"; + metadata?: unknown; +}; + +function formatAda(lovelace: bigint): string { + const whole = lovelace / 1_000_000n; + const fraction = lovelace % 1_000_000n; + if (fraction === 0n) return `${whole} ADA`; + return `${whole}.${fraction.toString().padStart(6, "0").replace(/0+$/, "")} ADA`; +} + +function assertSelectedLovelace(args: { + context: string; + selectedLovelace: bigint; + requiredLovelace: bigint; +}) { + if (args.selectedLovelace >= args.requiredLovelace) return; + throw new Error( + `${args.context} requires at least ${formatAda(args.requiredLovelace)} in selected wallet inputs, but only ${formatAda(args.selectedLovelace)} was selected`, + ); +} + +export function deriveProxyScripts(args: { + paramUtxo: UTxO["input"]; + network: number; + stakeCredential?: string; +}) { + const authTokenCbor = applyParamsToScript( + blueprint.validators[0]!.compiledCode, + [mOutputReference(args.paramUtxo.txHash, args.paramUtxo.outputIndex)], + ); + const authTokenId = resolveScriptHash(authTokenCbor, "V3"); + const proxyCbor = applyParamsToScript(blueprint.validators[2]!.compiledCode, [ + authTokenId, + ]); + const proxyAddress = serializePlutusScript( + { code: proxyCbor, version: "V3" }, + args.stakeCredential ?? DEFAULT_PROXY_STAKE_CREDENTIAL, + args.network, + ).address; + const proxyScriptHash = resolveScriptHash(proxyCbor, "V3"); + const dRepId = resolveScriptHashDRepId(proxyScriptHash); + + return { + authTokenCbor, + authTokenId, + proxyCbor, + proxyAddress, + dRepId, + }; +} + +function addScriptInput( + txBuilder: MeshTxBuilder, + utxo: UTxO, + scriptCbor?: string, +) { + txBuilder.txIn( + utxo.input.txHash, + utxo.input.outputIndex, + utxo.output.amount, + utxo.output.address, + ); + if (scriptCbor) { + txBuilder.txInScript(scriptCbor); + } +} + +function addCollateral(txBuilder: MeshTxBuilder, collateral: UTxO) { + txBuilder.txInCollateral( + collateral.input.txHash, + collateral.input.outputIndex, + collateral.output.amount, + collateral.output.address, + ); +} + +function selectParamUtxo(utxos: UTxO[]): UTxO | null { + return ( + utxos.find((utxo) => getLovelace(utxo) >= BigInt(20_000_000)) ?? null + ); +} + +export function buildProxySetupTx(args: { + txBuilder: MeshTxBuilder; + network: number; + walletUtxos: UTxO[]; + walletAddress: string; + collateral: UTxO; + multisigScriptCbor?: string; + initialProxyLovelace?: string; + stakeCredential?: string; +}): ProxySetupInfo { + const paramUtxo = selectParamUtxo(args.walletUtxos); + if (!paramUtxo) { + throw new Error("No setup UTxO found with at least 20 ADA"); + } + + const scripts = deriveProxyScripts({ + paramUtxo: paramUtxo.input, + network: args.network, + stakeCredential: args.stakeCredential, + }); + + addScriptInput(args.txBuilder, paramUtxo, args.multisigScriptCbor); + + args.txBuilder + .mintPlutusScriptV3() + .mint("10", scripts.authTokenId, "") + .mintingScript(scripts.authTokenCbor) + .mintRedeemerValue(mConStr0([])) + .txOut(scripts.proxyAddress, [ + { + unit: "lovelace", + quantity: args.initialProxyLovelace ?? DEFAULT_PROXY_SETUP_LOVELACE, + }, + ]); + + for (let i = 0; i < 10; i++) { + args.txBuilder.txOut(args.walletAddress, [ + { unit: scripts.authTokenId, quantity: "1" }, + ]); + } + + addCollateral(args.txBuilder, args.collateral); + args.txBuilder.changeAddress(args.walletAddress); + + return { + paramUtxo: paramUtxo.input, + authTokenId: scripts.authTokenId, + proxyAddress: scripts.proxyAddress, + }; +} + +export function buildProxySpendTx(args: { + txBuilder: MeshTxBuilder; + network: number; + proxyAddress: string; + paramUtxo: UTxO["input"]; + walletUtxos: UTxO[]; + proxyUtxos: UTxO[]; + authTokenUtxo: UTxO; + collateral: UTxO; + outputs: { address: string; unit: string; amount: string }[]; + walletAddress: string; + multisigScriptCbor?: string; + stakeCredential?: string; +}) { + const scripts = deriveProxyScripts({ + paramUtxo: args.paramUtxo, + network: args.network, + stakeCredential: args.stakeCredential, + }); + + for (const proxyUtxo of args.proxyUtxos) { + args.txBuilder + .spendingPlutusScriptV3() + .txIn( + proxyUtxo.input.txHash, + proxyUtxo.input.outputIndex, + proxyUtxo.output.amount, + proxyUtxo.output.address, + ) + .txInScript(scripts.proxyCbor) + .txInInlineDatumPresent() + .txInRedeemerValue(mConStr0([])); + } + + addScriptInput(args.txBuilder, args.authTokenUtxo, args.multisigScriptCbor); + for (const utxo of args.walletUtxos) { + if (!sameUtxoRef(utxo.input, args.authTokenUtxo.input)) { + addScriptInput(args.txBuilder, utxo, args.multisigScriptCbor); + } + } + + addCollateral(args.txBuilder, args.collateral); + args.txBuilder.txOut(args.walletAddress, [ + { unit: scripts.authTokenId, quantity: "1" }, + ]); + + for (const output of args.outputs) { + args.txBuilder.txOut(output.address, [ + { unit: output.unit, quantity: output.amount }, + ]); + } + + args.txBuilder.changeAddress(args.proxyAddress); +} + +export function buildProxyDRepCertificateTx(args: { + txBuilder: MeshTxBuilder; + network: number; + paramUtxo: UTxO["input"]; + walletUtxos: UTxO[]; + authTokenUtxo: UTxO; + collateral: UTxO; + walletAddress: string; + action: "register" | "update" | "deregister"; + anchorUrl?: string; + anchorJson?: object; + multisigScriptCbor?: string; + stakeCredential?: string; +}): { dRepId: string; anchorDataHash?: string } { + const scripts = deriveProxyScripts({ + paramUtxo: args.paramUtxo, + network: args.network, + stakeCredential: args.stakeCredential, + }); + + let anchorDataHash: string | undefined; + if (args.action === "register" || args.action === "update") { + if (!args.anchorUrl || !args.anchorJson) { + throw new Error("anchorUrl and anchorJson are required for this action"); + } + anchorDataHash = hashDrepAnchor(args.anchorJson); + } + + addScriptInput(args.txBuilder, args.authTokenUtxo, args.multisigScriptCbor); + addCollateral(args.txBuilder, args.collateral); + + const requiredAmount = + args.action === "register" ? BigInt(505_000_000) : PROXY_ACTION_MIN_LOVELACE; + let totalAmount = getLovelace(args.authTokenUtxo); + for (const utxo of args.walletUtxos) { + if (totalAmount >= requiredAmount) { + break; + } + if (sameUtxoRef(utxo.input, args.authTokenUtxo.input)) { + continue; + } + addScriptInput(args.txBuilder, utxo, args.multisigScriptCbor); + totalAmount += getLovelace(utxo); + } + assertSelectedLovelace({ + context: `proxy DRep ${args.action}`, + selectedLovelace: totalAmount, + requiredLovelace: requiredAmount, + }); + + args.txBuilder.txOut(args.walletAddress, [ + { unit: scripts.authTokenId, quantity: "1" }, + ]); + + if (args.action === "register") { + args.txBuilder.drepRegistrationCertificate(scripts.dRepId, { + anchorUrl: args.anchorUrl!, + anchorDataHash: anchorDataHash!, + }); + } else if (args.action === "update") { + args.txBuilder.drepUpdateCertificate(scripts.dRepId, { + anchorUrl: args.anchorUrl!, + anchorDataHash: anchorDataHash!, + }); + } else { + args.txBuilder.drepDeregistrationCertificate(scripts.dRepId); + } + + args.txBuilder + .certificateScript(scripts.proxyCbor, "V3") + .certificateRedeemerValue(mConStr0([])) + .changeAddress(args.walletAddress); + + return { dRepId: scripts.dRepId, anchorDataHash }; +} + +export function buildProxyVoteTx(args: { + txBuilder: MeshTxBuilder; + network: number; + paramUtxo: UTxO["input"]; + walletUtxos: UTxO[]; + authTokenUtxo: UTxO; + collateral: UTxO; + walletAddress: string; + votes: ProxyVoteInput[]; + multisigScriptCbor?: string; + stakeCredential?: string; +}): { dRepId: string } { + if (args.votes.length === 0) { + throw new Error("votes must be a non-empty array"); + } + + const scripts = deriveProxyScripts({ + paramUtxo: args.paramUtxo, + network: args.network, + stakeCredential: args.stakeCredential, + }); + + addScriptInput(args.txBuilder, args.authTokenUtxo, args.multisigScriptCbor); + addCollateral(args.txBuilder, args.collateral); + + let totalAmount = getLovelace(args.authTokenUtxo); + for (const utxo of args.walletUtxos) { + if (totalAmount >= PROXY_ACTION_MIN_LOVELACE) { + break; + } + if (sameUtxoRef(utxo.input, args.authTokenUtxo.input)) { + continue; + } + addScriptInput(args.txBuilder, utxo, args.multisigScriptCbor); + totalAmount += getLovelace(utxo); + } + assertSelectedLovelace({ + context: "proxy vote", + selectedLovelace: totalAmount, + requiredLovelace: PROXY_ACTION_MIN_LOVELACE, + }); + + args.txBuilder.txOut(args.walletAddress, [ + { unit: scripts.authTokenId, quantity: "1" }, + ]); + + for (const vote of args.votes) { + const parsed = parseProposalId(vote.proposalId); + args.txBuilder + .votePlutusScriptV3() + .vote( + { + type: "DRep", + drepId: scripts.dRepId, + }, + { + txHash: parsed.txHash, + txIndex: parsed.certIndex, + }, + { + voteKind: vote.voteKind, + }, + ) + .voteScript(scripts.proxyCbor) + .voteRedeemerValue(""); + } + + args.txBuilder.changeAddress(args.walletAddress); + + return { dRepId: scripts.dRepId }; +} + +export function buildProxyCleanupTx(args: { + txBuilder: MeshTxBuilder; + network: number; + paramUtxo: UTxO["input"]; + walletUtxos: UTxO[]; + collateral: UTxO; + walletAddress: string; + authTokenId: string; + multisigScriptCbor?: string; + stakeCredential?: string; +}): { burnedAuthTokens: string } { + const scripts = deriveProxyScripts({ + paramUtxo: args.paramUtxo, + network: args.network, + stakeCredential: args.stakeCredential, + }); + if (scripts.authTokenId !== args.authTokenId) { + throw new Error("Stored proxy metadata does not match derived auth token"); + } + + let authTokenCount = BigInt(0); + for (const utxo of args.walletUtxos) { + const quantity = utxo.output.amount.find( + (asset) => asset.unit === args.authTokenId, + )?.quantity; + if (quantity) { + authTokenCount += BigInt(quantity); + } + addScriptInput(args.txBuilder, utxo, args.multisigScriptCbor); + } + + if (authTokenCount !== BigInt(10)) { + throw new Error( + `proxy cleanup requires exactly 10 auth tokens, found ${authTokenCount.toString()}`, + ); + } + + args.txBuilder + .mintPlutusScriptV3() + .mint("-10", scripts.authTokenId, "") + .mintingScript(scripts.authTokenCbor) + .mintRedeemerValue(mConStr1([])); + + addCollateral(args.txBuilder, args.collateral); + args.txBuilder.changeAddress(args.walletAddress); + + return { burnedAuthTokens: "10" }; +} + +function aggregateUtxoAmounts( + utxos: UTxO[], + extraAmounts: UTxO["output"]["amount"] = [], +): UTxO["output"]["amount"] { + const totals = new Map(); + for (const amounts of [ + ...utxos.map((utxo) => utxo.output.amount), + extraAmounts, + ]) { + for (const asset of amounts) { + totals.set(asset.unit, (totals.get(asset.unit) ?? BigInt(0)) + BigInt(asset.quantity)); + } + } + + return Array.from(totals.entries()).map(([unit, quantity]) => ({ + unit, + quantity: quantity.toString(), + })); +} + +export function buildProxyCleanupSweepTx(args: { + txBuilder: MeshTxBuilder; + network: number; + paramUtxo: UTxO["input"]; + proxyAddress: string; + proxyUtxos: UTxO[]; + walletUtxos: UTxO[]; + authTokenUtxo: UTxO; + collateral: UTxO; + walletAddress: string; + multisigScriptCbor?: string; + stakeCredential?: string; +}): { sweptProxyUtxos: string; preservedAuthTokens: string } { + if (args.proxyUtxos.length === 0) { + throw new Error("proxy cleanup sweep requires at least one proxy UTxO"); + } + + const scripts = deriveProxyScripts({ + paramUtxo: args.paramUtxo, + network: args.network, + stakeCredential: args.stakeCredential, + }); + + for (const proxyUtxo of args.proxyUtxos) { + if (proxyUtxo.output.address !== args.proxyAddress) { + throw new Error("proxy cleanup sweep received a UTxO outside the proxy address"); + } + args.txBuilder + .spendingPlutusScriptV3() + .txIn( + proxyUtxo.input.txHash, + proxyUtxo.input.outputIndex, + proxyUtxo.output.amount, + proxyUtxo.output.address, + ) + .txInScript(scripts.proxyCbor) + .txInInlineDatumPresent() + .txInRedeemerValue(mConStr0([])); + } + + addScriptInput(args.txBuilder, args.authTokenUtxo, args.multisigScriptCbor); + for (const utxo of args.walletUtxos) { + if (!sameUtxoRef(utxo.input, args.authTokenUtxo.input)) { + addScriptInput(args.txBuilder, utxo, args.multisigScriptCbor); + } + } + + addCollateral(args.txBuilder, args.collateral); + args.txBuilder.txOut( + args.walletAddress, + aggregateUtxoAmounts(args.proxyUtxos, [ + { unit: scripts.authTokenId, quantity: "1" }, + ]), + ); + args.txBuilder.changeAddress(args.walletAddress); + + return { + sweptProxyUtxos: args.proxyUtxos.length.toString(), + preservedAuthTokens: "1", + }; +} diff --git a/src/lib/server/proxyUtxos.ts b/src/lib/server/proxyUtxos.ts new file mode 100644 index 00000000..3cf5bb43 --- /dev/null +++ b/src/lib/server/proxyUtxos.ts @@ -0,0 +1,220 @@ +import type { UTxO } from "@meshsdk/core"; +import type { UtxoFetcher, UtxoRef } from "@/lib/server/resolveUtxoRefsFromChain"; + +export type { UtxoRef }; + +const MIN_COLLATERAL_LOVELACE = BigInt(5_000_000); + +function normalizeUtxoRef(ref: UtxoRef | undefined): UtxoRef | null { + const txHash = typeof ref?.txHash === "string" ? ref.txHash.trim() : ""; + const outputIndex = + typeof ref?.outputIndex === "number" && Number.isInteger(ref.outputIndex) + ? ref.outputIndex + : -1; + + if (!txHash || outputIndex < 0) { + return null; + } + + return { txHash, outputIndex }; +} + +export function getLovelace(utxo: UTxO): bigint { + return BigInt( + utxo.output.amount.find((asset) => asset.unit === "lovelace")?.quantity ?? + "0", + ); +} + +export function hasAsset(utxo: UTxO, unit: string, minimum = BigInt(1)): boolean { + const quantity = BigInt( + utxo.output.amount.find((asset) => asset.unit === unit)?.quantity ?? "0", + ); + return quantity >= minimum; +} + +export function sameUtxoRef(a: UTxO["input"], b: UTxO["input"]): boolean { + return a.txHash === b.txHash && a.outputIndex === b.outputIndex; +} + +export async function resolveSingleUtxoRefFromChain(args: { + network: number; + ref: UtxoRef | undefined; + expectedAddress?: string; + provider?: UtxoFetcher; +}): Promise<{ utxo: UTxO } | { error: string; status: number }> { + const normalized = normalizeUtxoRef(args.ref); + if (!normalized) { + return { + error: "Invalid UTxO ref: txHash and non-negative integer outputIndex required", + status: 400, + }; + } + + const provider = + args.provider ?? + (await import("@/utils/get-provider")).getProvider(args.network); + let fetched: UTxO[]; + try { + fetched = await provider.fetchUTxOs( + normalized.txHash, + normalized.outputIndex, + ); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + return { + error: `UTxO not found or not yet available: ${normalized.txHash}#${normalized.outputIndex} (${message})`, + status: 400, + }; + } + + const utxo = fetched[0]; + if (!utxo) { + return { + error: `UTxO not found or already spent: ${normalized.txHash}#${normalized.outputIndex}`, + status: 400, + }; + } + + if (args.expectedAddress && utxo.output.address !== args.expectedAddress) { + return { + error: `UTxO ${normalized.txHash}#${normalized.outputIndex} is not at the expected address`, + status: 400, + }; + } + + return { utxo }; +} + +export async function resolveCollateralRefFromChain(args: { + network: number; + collateralRef: UtxoRef | undefined; + expectedAddress?: string; + provider?: UtxoFetcher; +}): Promise<{ collateral: UTxO } | { error: string; status: number }> { + const resolved = await resolveSingleUtxoRefFromChain({ + network: args.network, + ref: args.collateralRef, + expectedAddress: args.expectedAddress, + provider: args.provider, + }); + if ("error" in resolved) { + return resolved; + } + + if (getLovelace(resolved.utxo) < MIN_COLLATERAL_LOVELACE) { + return { + error: "collateralRef must resolve to a UTxO with at least 5 ADA", + status: 400, + }; + } + if (resolved.utxo.output.amount.some((asset) => asset.unit !== "lovelace")) { + return { + error: "collateralRef must resolve to an ADA-only UTxO", + status: 400, + }; + } + + return { collateral: resolved.utxo }; +} + +export function requireAuthTokenUtxo( + utxos: UTxO[], + authTokenId: string, +): UTxO | { error: string; status: number } { + const authTokenUtxo = utxos.find((utxo) => hasAsset(utxo, authTokenId)); + if (!authTokenUtxo) { + return { + error: "No proxy auth-token UTxO found at the multisig wallet address", + status: 400, + }; + } + + return authTokenUtxo; +} + +export function selectProxyUtxosForOutputs(args: { + proxyUtxos: UTxO[]; + outputs: { unit: string; amount: string; address?: string }[]; + feeBufferLovelace?: bigint; +}): UTxO[] | { error: string; status: number } { + const requiredByUnit = new Map(); + for (const output of args.outputs) { + const amount = BigInt(output.amount); + requiredByUnit.set( + output.unit, + (requiredByUnit.get(output.unit) ?? BigInt(0)) + amount, + ); + } + requiredByUnit.set( + "lovelace", + (requiredByUnit.get("lovelace") ?? BigInt(0)) + + (args.feeBufferLovelace ?? BigInt(500_000)), + ); + + const availableByUnit = new Map(); + for (const utxo of args.proxyUtxos) { + for (const asset of utxo.output.amount) { + availableByUnit.set( + asset.unit, + (availableByUnit.get(asset.unit) ?? BigInt(0)) + BigInt(asset.quantity), + ); + } + } + + for (const [unit, needed] of requiredByUnit.entries()) { + if ((availableByUnit.get(unit) ?? BigInt(0)) < needed) { + return { + error: `Insufficient proxy balance for ${unit}`, + status: 400, + }; + } + } + + const remainingByUnit = new Map(requiredByUnit); + const candidates = [...args.proxyUtxos]; + const selected: UTxO[] = []; + + const hasRemaining = () => + Array.from(remainingByUnit.values()).some((value) => value > BigInt(0)); + + while (hasRemaining()) { + let bestIndex = -1; + let bestScore = BigInt(0); + + for (let i = 0; i < candidates.length; i++) { + const candidate = candidates[i]!; + let score = BigInt(0); + for (const asset of candidate.output.amount) { + const remaining = remainingByUnit.get(asset.unit) ?? BigInt(0); + if (remaining > BigInt(0)) { + const quantity = BigInt(asset.quantity); + score += quantity < remaining ? quantity : remaining; + } + } + if (score > bestScore) { + bestScore = score; + bestIndex = i; + } + } + + if (bestIndex === -1 || bestScore === BigInt(0)) { + return { error: "Unable to select proxy UTxOs for requested outputs", status: 400 }; + } + + const chosen = candidates.splice(bestIndex, 1)[0]!; + selected.push(chosen); + for (const asset of chosen.output.amount) { + const remaining = remainingByUnit.get(asset.unit) ?? BigInt(0); + if (remaining > BigInt(0)) { + const quantity = BigInt(asset.quantity); + remainingByUnit.set( + asset.unit, + remaining - (quantity < remaining ? quantity : remaining), + ); + } + } + } + + return selected; +} diff --git a/src/lib/server/resolveDRepAnchorFromUrl.ts b/src/lib/server/resolveDRepAnchorFromUrl.ts new file mode 100644 index 00000000..5c6ff563 --- /dev/null +++ b/src/lib/server/resolveDRepAnchorFromUrl.ts @@ -0,0 +1,168 @@ +import { timingSafeEqual } from "crypto"; +import * as dns from "node:dns/promises"; +import { hashDrepAnchor } from "@meshsdk/core"; + +function isPrivateOrLoopbackAddress(ip: string): boolean { + if (ip.includes(":")) { + const lower = ip.toLowerCase(); + if (lower === "::1") return true; + if (lower.startsWith("fe80:")) return true; + if (lower.startsWith("fc") || lower.startsWith("fd")) return true; + return false; + } + const parts = ip.split(".").map(Number); + if (parts.length !== 4 || parts.some((n) => Number.isNaN(n))) return false; + const [a, b] = parts; + if (a === undefined || b === undefined) return false; + if (a === 10) return true; + if (a === 127) return true; + if (a === 0) return true; + if (a === 169 && b === 254) return true; + if (a === 172 && b >= 16 && b <= 31) return true; + if (a === 192 && b === 168) return true; + if (a === 100 && b >= 64 && b <= 127) return true; + return false; +} + +const MAX_BYTES = 2 * 1024 * 1024; +const TIMEOUT_MS = 12_000; + +function normalizeHexForCompare(h: string): Buffer { + const s = h.trim().toLowerCase().replace(/^0x/, ""); + if (!/^[0-9a-f]+$/.test(s) || s.length % 2 !== 0) { + throw new Error("anchorDataHash must be hex"); + } + return Buffer.from(s, "hex"); +} + +async function assertUrlSafeForFetch(urlStr: string): Promise { + let u: URL; + try { + u = new URL(urlStr); + } catch { + throw new Error("Invalid anchor URL"); + } + if (u.protocol !== "https:" && u.protocol !== "http:") { + throw new Error("Anchor URL must use http or https"); + } + const host = u.hostname.toLowerCase(); + if ( + host === "localhost" || + host === "0.0.0.0" || + host.endsWith(".localhost") || + host.endsWith(".local") || + host.endsWith(".internal") || + host.endsWith(".lan") + ) { + throw new Error("Anchor URL hostname not allowed"); + } + + let records: { address: string }[]; + try { + const lookedUp = await dns.lookup(host, { all: true }); + records = Array.isArray(lookedUp) ? lookedUp : [lookedUp]; + } catch { + throw new Error("Could not resolve anchor URL host"); + } + for (const { address } of records) { + if (isPrivateOrLoopbackAddress(address)) { + throw new Error("Anchor URL resolves to a private or loopback address"); + } + } +} + +async function readBodyWithLimit( + res: Response, + maxBytes: number, +): Promise { + const body = res.body; + if (!body) { + throw new Error("Empty anchor response body"); + } + const reader = body.getReader(); + const chunks: Uint8Array[] = []; + let total = 0; + while (true) { + const { done, value } = await reader.read(); + if (done) break; + if (!value) continue; + total += value.length; + if (total > maxBytes) { + throw new Error(`Anchor response exceeds ${maxBytes} bytes`); + } + chunks.push(value); + } + const out = new Uint8Array(total); + let offset = 0; + for (const c of chunks) { + out.set(c, offset); + offset += c.length; + } + return out; +} + +/** + * Fetches JSON from anchorUrl, parses JSON, computes hashDrepAnchor (same as registerDrep after upload). + * Optional expectedAnchorDataHash (hex): rejects on mismatch. + */ +export async function resolveDRepAnchorFromUrl( + anchorUrl: string, + expectedAnchorDataHash?: string, +): Promise<{ anchorUrl: string; anchorDataHash: string }> { + const trimmed = anchorUrl.trim(); + if (!trimmed) { + throw new Error("anchorUrl is required"); + } + await assertUrlSafeForFetch(trimmed); + + const ac = new AbortController(); + const t = setTimeout(() => ac.abort(), TIMEOUT_MS); + let res: Response; + try { + res = await fetch(trimmed, { // lgtm[js/ssrf] URL validated by assertUrlSafeForFetch: protocol, hostname blocklist, DNS/IP checks, no redirects + signal: ac.signal, + redirect: "error", + headers: { Accept: "application/json, */*" }, + }); + } catch (e) { + const msg = e instanceof Error ? e.message : String(e); + throw new Error(`Anchor fetch failed: ${msg}`); + } finally { + clearTimeout(t); + } + + if (!res.ok) { + throw new Error(`Anchor fetch failed: HTTP ${res.status}`); + } + + const buf = await readBodyWithLimit(res, MAX_BYTES); + let json: unknown; + try { + json = JSON.parse(new TextDecoder().decode(buf)); + } catch { + throw new Error("Anchor URL did not return valid JSON"); + } + + const anchorDataHash = hashDrepAnchor(json as object); + + if (expectedAnchorDataHash !== undefined && expectedAnchorDataHash !== "") { + const a = normalizeHexForCompare(anchorDataHash); + const b = normalizeHexForCompare(expectedAnchorDataHash); + if (a.length !== b.length || !timingSafeEqual(a, b)) { + throw new Error("anchorDataHash does not match content at anchorUrl"); + } + } + + return { anchorUrl: trimmed, anchorDataHash }; +} + +/** Hex compare for tests / external verification */ +export function hexEqualConstantTime(a: string, b: string): boolean { + try { + const ba = normalizeHexForCompare(a); + const bb = normalizeHexForCompare(b); + return ba.length === bb.length && timingSafeEqual(ba, bb); + } catch { + return false; + } +} diff --git a/src/lib/server/resolveUtxoRefsFromChain.ts b/src/lib/server/resolveUtxoRefsFromChain.ts new file mode 100644 index 00000000..5e0885ec --- /dev/null +++ b/src/lib/server/resolveUtxoRefsFromChain.ts @@ -0,0 +1,69 @@ +import type { UTxO } from "@meshsdk/core"; + +export type UtxoRef = { txHash: string; outputIndex: number }; + +export type UtxoFetcher = { + fetchUTxOs: (hash: string, index?: number) => Promise; +}; + +/** + * Resolves UTxOs from chain refs only (amounts/addresses from provider). + * Pass `provider` in tests; defaults to Blockfrost via getProvider(network). + */ +export async function resolveUtxoRefsFromChain(args: { + network: number; + utxoRefs: UtxoRef[]; + expectedSpendAddress: string; + provider?: UtxoFetcher; +}): Promise<{ utxos: UTxO[] } | { error: string; status: number }> { + const { network, utxoRefs, expectedSpendAddress } = args; + if (!Array.isArray(utxoRefs) || utxoRefs.length === 0) { + return { error: "utxoRefs must be a non-empty array", status: 400 }; + } + + const provider = + args.provider ?? + (await import("@/utils/get-provider")).getProvider(network); + const utxos: UTxO[] = []; + + for (const ref of utxoRefs) { + const txHash = typeof ref.txHash === "string" ? ref.txHash.trim() : ""; + const outputIndex = + typeof ref.outputIndex === "number" && Number.isInteger(ref.outputIndex) + ? ref.outputIndex + : -1; + if (!txHash || outputIndex < 0) { + return { error: "Invalid utxoRef: txHash and non-negative integer outputIndex required", status: 400 }; + } + + let fetched: UTxO[]; + try { + fetched = await provider.fetchUTxOs(txHash, outputIndex); + } catch (e) { + const msg = e instanceof Error ? e.message : String(e); + return { + error: `UTxO not found or not yet available: ${txHash}#${outputIndex} (${msg})`, + status: 400, + }; + } + + if (!fetched || fetched.length === 0) { + return { + error: `UTxO not found or already spent: ${txHash}#${outputIndex}`, + status: 400, + }; + } + + const utxo = fetched[0]!; + if (utxo.output.address !== expectedSpendAddress) { + return { + error: `UTxO ${txHash}#${outputIndex} is not at the multisig spend address for this wallet`, + status: 400, + }; + } + + utxos.push(utxo); + } + + return { utxos }; +} diff --git a/src/lib/server/v1WalletAuth.ts b/src/lib/server/v1WalletAuth.ts new file mode 100644 index 00000000..a34d410b --- /dev/null +++ b/src/lib/server/v1WalletAuth.ts @@ -0,0 +1,58 @@ +import type { Wallet } from "@prisma/client"; +import type { JwtPayload } from "@/lib/verifyJwt"; +import { isBotJwt } from "@/lib/verifyJwt"; +import { db } from "@/server/db"; +import { assertBotWalletAccess } from "@/lib/auth/botAccess"; +import { parseScope, scopeIncludes, type BotScope } from "@/lib/auth/botKey"; + +const SIGN_SCOPE = "multisig:sign" as BotScope; + +/** + * addTransaction-style auth plus bot `multisig:sign` scope for bot JWTs. + */ +export async function authorizeWalletSignerForV1Tx( + payload: JwtPayload, + walletId: string, + address: string, +): Promise<{ wallet: Wallet }> { + if (payload.address !== address) { + const err = new Error("Address mismatch") as Error & { code: string }; + err.code = "ADDRESS_MISMATCH"; + throw err; + } + + if (isBotJwt(payload)) { + const botUser = await db.botUser.findUnique({ + where: { id: payload.botId }, + include: { botKey: true }, + }); + if (!botUser?.botKey) { + const err = new Error("Bot not found"); + (err as { code?: string }).code = "BOT_NOT_FOUND"; + throw err; + } + const scopes = parseScope(botUser.botKey.scope); + if (!scopeIncludes(scopes, SIGN_SCOPE)) { + const err = new Error("Insufficient scope: multisig:sign required"); + (err as { code?: string }).code = "INSUFFICIENT_SCOPE"; + throw err; + } + await assertBotWalletAccess(db, walletId, payload, true); + const wallet = await db.wallet.findUnique({ where: { id: walletId } }); + if (!wallet) { + const err = new Error("Wallet not found"); + (err as { code?: string }).code = "NOT_FOUND"; + throw err; + } + return { wallet }; + } + + const w = await db.wallet.findUnique({ where: { id: walletId } }); + const signers = w?.signersAddresses ?? []; + if (!w || !signers.includes(address)) { + const err = new Error("Not authorized for this wallet"); + (err as { code?: string }).code = "NOT_SIGNER"; + throw err; + } + return { wallet: w }; +} diff --git a/src/lib/server/walletScriptAddress.ts b/src/lib/server/walletScriptAddress.ts new file mode 100644 index 00000000..3b98c850 --- /dev/null +++ b/src/lib/server/walletScriptAddress.ts @@ -0,0 +1,53 @@ +import type { Wallet as DbWallet } from "@prisma/client"; +import { buildMultisigWallet } from "@/utils/common"; +import { addressToNetwork } from "@/utils/multisigSDK"; +import { serializeNativeScript } from "@meshsdk/core"; +import { DbWalletWithLegacy } from "@/types/wallet"; +import { + decodeNativeScriptFromCbor, + decodedToNativeScript, +} from "@/utils/nativeScriptUtils"; + +/** + * Same resolution as GET /api/v1/freeUtxos: multisig script address for SDK wallets, + * otherwise native script + stake credential from stored scriptCbor. + */ +export function resolveWalletScriptAddress( + wallet: DbWalletWithLegacy, + fallbackAddress: string, +): string { + const mWallet = buildMultisigWallet(wallet); + if (mWallet) { + return mWallet.getScript().address; + } + + const canonicalScriptCbor = wallet.scriptCbor?.trim(); + if (!canonicalScriptCbor) { + throw new Error("Wallet is missing canonical scriptCbor"); + } + + const decoded = decodeNativeScriptFromCbor(canonicalScriptCbor); + const nativeScript = decodedToNativeScript(decoded); + const signerAddress = wallet.signersAddresses.find( + (candidate) => typeof candidate === "string" && candidate.trim().length > 0, + ); + const network = addressToNetwork(signerAddress ?? fallbackAddress); + return serializeNativeScript( + nativeScript, + wallet.stakeCredentialHash ?? undefined, + network, + ).address; +} + +export function resolveWalletScriptAddressSafe( + wallet: DbWallet, + fallbackAddress: string, +): { address: string } | { error: string } { + try { + return { address: resolveWalletScriptAddress(wallet as DbWalletWithLegacy, fallbackAddress) }; + } catch (e) { + return { + error: e instanceof Error ? e.message : "Wallet script address resolution failed", + }; + } +} diff --git a/src/pages/api/v1/README.md b/src/pages/api/v1/README.md index a1666ebc..ac1ab033 100644 --- a/src/pages/api/v1/README.md +++ b/src/pages/api/v1/README.md @@ -97,6 +97,68 @@ A comprehensive REST API implementation for the multisig wallet application, pro - **Response**: Updated transaction object with witness metadata, submission state, and transaction hash - **Error Handling**: 400 (validation), 401 (signature), 403 (authorization), 404 (not found), 409 (state conflict), 502 (broadcast failure), 500 (server) +#### `botStakeCertificate.ts` - POST `/api/v1/botStakeCertificate` + +- **Purpose**: Server-build a stake certificate transaction (register, deregister, delegate, or register-and-delegate) using the same Mesh patterns as the in-app staking UI, then persist or submit it using the same rules as `addTransaction`. +- **Authentication**: Required (JWT Bearer token). `address` in the body must match the JWT `address` (human signer or bot payment address). +- **Bot requirements**: Bot JWTs must include the **`multisig:sign`** scope. The bot must have **cosigner** access to the wallet (`assertBotWalletAccess` with mutating access). Observer bots are rejected. +- **Wallet support**: **SDK multisig wallets only**, with `stakingEnabled()` true. Legacy and Summon wallets return **400** with a clear reason. +- **UTxOs**: `utxoRefs` is required (non-empty). Each entry is `{ txHash, outputIndex }`. The server loads outputs from the chain and checks they sit at the same spend address used by **`GET /api/v1/freeUtxos`** (do not send raw UTxO JSON). +- **Request Body**: + - `walletId`: string (required) + - `address`: string (required; must match JWT) + - `action`: `"register"` | `"deregister"` | `"delegate"` | `"register_and_delegate"` (required) + - `poolId`: string (required for `delegate` and `register_and_delegate`; bech32 `pool1...` or 56-character hex pool id) + - `utxoRefs`: `{ txHash: string; outputIndex: number }[]` (required) + - `description`: string (optional; defaults to a short label for the action) +- **Response**: Same as `addTransaction` — either a pending `Transaction` row (**201**) when multiple signatures are required, or the immediate **`submitTx`** result when the wallet submits in one step (single signer / `type === "any"`). +- **Follow-up**: If the transaction is pending, co-signers call **`POST /api/v1/signTransaction`** as usual. +- **Error Handling**: 400 (validation, wrong wallet type, staking disabled, bad UTxO refs or pool id), 401 (auth), 403 (not a signer, bot observer, or missing `multisig:sign` for bots), 405 (method), 500 (server) + +#### `botDRepCertificate.ts` - POST `/api/v1/botDRepCertificate` + +- **Purpose**: Server-build a DRep **registration** or **retirement** transaction (non-proxy flows only), then persist or submit like `addTransaction`. +- **Authentication**: Same as `botStakeCertificate` (JWT; body `address` must match JWT; bots need **`multisig:sign`** and cosigner access). +- **Wallet support**: **Summon** wallets return **400** (unsupported in v1). **Legacy** and **SDK** paths mirror `registerDrep` / `retire` in the app (script and change-address selection). If DRep metadata cannot be derived (`getDRep` / `dRepId`), the handler returns **400**. +- **Register — anchor**: `anchorUrl` and `anchorJson` are both required. The caller provides the JSON document at `anchorUrl` directly in the request body — the server never fetches any URL. The server computes **`hashDrepAnchor`** from `@meshsdk/core` using the provided `anchorJson` object. +- **UTxOs**: Same `utxoRefs` policy as `botStakeCertificate` (chain-resolved, address-validated). +- **Request Body**: + - `walletId`: string (required) + - `address`: string (required; must match JWT) + - `action`: `"register"` | `"retire"` (required) + - `utxoRefs`: `{ txHash: string; outputIndex: number }[]` (required) + - `description`: string (optional) + - `anchorUrl`: string (required when `action === "register"`) + - `anchorJson`: object (required when `action === "register"`; the JSON document at `anchorUrl` — server computes the hash) +- **Response**: Same pattern as `addTransaction` / `botStakeCertificate` (**201**). +- **Error Handling**: 400 (validation, invalid anchorJson, unsupported wallet), 401 (auth), 403 (signer/bot scope/access), 405 (method), 500 (server) + +#### Proxy Bot API + +Proxy endpoints let bots propose proxy setup, proxy spending, proxy DRep certificates, and proxy votes through the same pending multisig transaction flow. They do not bypass the wallet threshold: bots need **`multisig:sign`** scope and **cosigner** access for all mutating proxy routes, while observer bots may call `GET /api/v1/proxies` and `GET /api/v1/proxyDRepInfo`. + +All Plutus proxy transaction routes accept UTxO references only. Do not send raw UTxO JSON. The server resolves each ref from chain, validates wallet UTxOs are at the multisig spend address, validates proxy spend inputs are at the selected proxy address, and requires an ADA-only `collateralRef` with at least 5 ADA at the request `address`. Server-built proxy transactions are persisted with no initial signed addresses, so the proposer still signs through `POST /api/v1/signTransaction`. + +Setup lifecycle: + +1. Call `POST /api/v1/proxySetup` with `walletId`, `address`, `utxoRefs`, `collateralRef`, optional `initialProxyLovelace`, and optional `description`. +2. The response includes `{ transaction, setup }`, where `setup` contains `proxyAddress`, `authTokenId`, and `paramUtxo`. +3. If `transaction` is pending, co-signers call `POST /api/v1/signTransaction` until the transaction is submitted. +4. After the setup is confirmed on-chain, call `POST /api/v1/proxySetupFinalize` with the setup metadata and `txHash`. The server validates that the transaction created the proxy-address output, returned the auth token to the multisig wallet address, and that both are visible in current chain state before creating or reactivating the confirmed `Proxy` row. +5. Use `GET /api/v1/proxies` to list active confirmed proxies. + +Endpoints: + +- `GET /api/v1/proxies`: query `walletId`, `address`; returns active confirmed proxies for that wallet. +- `GET /api/v1/proxyDRepInfo`: query `walletId`, `address`, `proxyId`; returns `{ active, dRepId }` for the proxy script DRep credential. +- `POST /api/v1/proxySetup`: body `walletId`, `address`, `utxoRefs`, `collateralRef`, optional `initialProxyLovelace`, optional `description`; returns pending/submitted transaction plus setup metadata. When omitted, `initialProxyLovelace` defaults to the current minimal proxy output amount. +- `POST /api/v1/proxySetupFinalize`: body `walletId`, `address`, `txHash`, `proxyAddress`, `authTokenId`, `paramUtxo`, optional `description`; creates or reactivates the confirmed proxy row after chain validation. +- `POST /api/v1/proxySpend`: body `walletId`, `address`, `proxyId`, `outputs`, `utxoRefs`, `collateralRef`, optional `proxyUtxoRefs`, optional `description`; requires one multisig input containing the proxy auth token. If `proxyUtxoRefs` is omitted, the server fetches proxy-address UTxOs and selects enough to cover `outputs` plus a fee buffer. +- `POST /api/v1/proxyDRepCertificate`: body `walletId`, `address`, `proxyId`, `action` (`register`, `update`, `deregister`), `utxoRefs`, `collateralRef`, optional `description`; `anchorUrl` and `anchorJson` are required for `register` and `update`, and the server computes `hashDrepAnchor(anchorJson)` without fetching `anchorUrl`. +- `POST /api/v1/proxyVote`: body `walletId`, `address`, `proxyId`, `votes`, `utxoRefs`, `collateralRef`, optional `description`; each vote has `proposalId` in `#` form and `voteKind` (`Yes`, `No`, `Abstain`). +- `POST /api/v1/proxyCleanup`: body `walletId`, `address`, `proxyId`, `utxoRefs`, `collateralRef`, optional `proxyUtxoRefs`, optional `deactivateProxy`, optional `description`; returns cleanup metadata with phase `sweep` while proxy-address UTxOs remain, then phase `burn` once the proxy address is empty. When `proxyUtxoRefs` is provided for cleanup, it must include every currently visible proxy UTxO. +- `POST /api/v1/proxyCleanupFinalize`: body `walletId`, `address`, `proxyId`, `txHash`, optional `deactivateProxy`; validates that the confirmed burn spent the auth token without recreating it or a proxy-address output, then marks the proxy inactive only after auth tokens are gone and the proxy address is empty. `deactivateProxy: false` validates without changing the row. + ### Wallet Management #### `walletIds.ts` - GET `/api/v1/walletIds` @@ -139,6 +201,7 @@ A comprehensive REST API implementation for the multisig wallet application, pro - `signersDRepKeys`: (string | null)[] (optional) - `numRequiredSigners`: number (optional, minimum 1, clamped to signer count, default 1; stored as `null` for `all`/`any`) - `scriptType`: `"atLeast"` | `"all"` | `"any"` (optional, default `"atLeast"`) + - `paymentNativeScript`: object (optional; explicit payment script tree with `sig`/`all`/`any`/`atLeast`; sig key hashes must match `signersAddresses` payment key hashes) - `stakeCredentialHash`: string (optional, external stake) - `network`: 0 | 1 (optional, default 1 = mainnet) - **Response**: `{ walletId, address, name }` (201) @@ -372,7 +435,7 @@ A comprehensive REST API implementation for the multisig wallet application, pro 2. **Human Claims**: Owner calls `POST /api/v1/botClaim` with JWT + claim code 3. **Bot Picks Up Secret**: Bot calls `GET /api/v1/botPickupSecret` once 4. **Bot Authenticates**: Bot calls `POST /api/v1/botAuth` to receive bot JWT -5. **Bot API Access**: Bot uses JWT for bot endpoints (e.g. `botMe`, `createWallet`, governance APIs) +5. **Bot API Access**: Bot uses JWT for bot endpoints (e.g. `botMe`, `createWallet`, governance APIs, and certificate builders **`/api/v1/botStakeCertificate`** / **`/api/v1/botDRepCertificate`** when `multisig:sign` is granted) ### Error Handling @@ -435,6 +498,8 @@ A comprehensive REST API implementation for the multisig wallet application, pro - `JWT_SECRET`: Secret key for JWT token generation - `NEXT_PUBLIC_BLOCKFROST_API_KEY_PREPROD`: Preprod network API key - `NEXT_PUBLIC_BLOCKFROST_API_KEY_MAINNET`: Mainnet network API key +- `BLOCKFROST_API_KEY_PREPROD`: Optional server-side override for preprod provider calls +- `BLOCKFROST_API_KEY_MAINNET`: Optional server-side override for mainnet provider calls ### Database Configuration @@ -493,4 +558,70 @@ const response = await fetch( const freeUtxos = await response.json(); ``` +### Server-built stake / DRep certificates (bots or signers) + +Use `freeUtxos` to choose inputs, then pass only `txHash` and `outputIndex` for each UTxO. Bots must use a JWT from `botAuth` with the **`multisig:sign`** scope. + +```typescript +// Stake delegate (SDK wallet; poolId required for delegate / register_and_delegate) +await fetch("/api/v1/botStakeCertificate", { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "Content-Type": "application/json", + }, + body: JSON.stringify({ + walletId, + address: botPaymentAddress, + action: "delegate", + poolId: "pool1...", + utxoRefs: [{ txHash: "...", outputIndex: 0 }], + description: "Delegate via API", + }), +}); + +// DRep register — caller supplies anchorUrl + anchorJson; server computes the hash +await fetch("/api/v1/botDRepCertificate", { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "Content-Type": "application/json", + }, + body: JSON.stringify({ + walletId, + address: botPaymentAddress, + action: "register", + utxoRefs: [{ txHash: "...", outputIndex: 0 }], + anchorUrl: "https://example.com/drep-metadata.jsonld", + anchorJson: { "@context": { ... }, "hashAlgorithm": "blake2b-256", "body": { ... } }, + }), +}); +``` + This API v1 directory provides a comprehensive, secure, and well-documented REST API for multisig wallet operations, supporting the entire application ecosystem with robust authentication, transaction management, and blockchain integration. + +## PR Route-Chain Smoke (Real-Chain CI) + +- Workflow: `.github/workflows/pr-multisig-v1-smoke.yml` +- Bootstrap script: `scripts/ci/cli/bootstrap.ts` (stable context producer) +- Route-chain runner: `scripts/ci/cli/route-chain.ts` +- Scenario registry: `scripts/ci/scenarios/manifest.ts` + +The CI flow is split into: + +1. **Bootstrap**: create deterministic test wallets/context once. +2. **Route chain**: execute composable v1 route steps against that context. + +Signing is always enabled in this route-chain flow, and signing steps run with broadcast enabled to validate real-chain submission behavior. + +Current route-chain scenarios include: + +- discovery and route health checks (`walletIds`, `proxies`, `freeUtxos`, `nativeScript`, public wallet lookup) +- create-wallet, bot identity, auth-plane, and explicit auth-negative checks +- proxy smoke checks plus full proxy lifecycle coverage for eligible CI wallets (`legacy`, `hierarchical`, `sdk`: `proxySetup` -> `proxySetupFinalize` -> `proxySpend` -> proxy DRep register/deregister -> optional `proxyVote` -> `proxyCleanup` -> `proxyCleanupFinalize`) +- DRep and stake certificate builders, including payment/stake witness signing paths +- real transfer flow (`addTransaction` -> `signTransaction` with broadcast) +- final-state assertions (`pendingTransactions` consistency checks) + +To add coverage for a new v1 endpoint, add one step and register it in the scenario manifest without changing workflow orchestration. +Use `scripts/ci/scenarios/steps/template-route-step.ts` as a starter scaffold. diff --git a/src/pages/api/v1/addTransaction.ts b/src/pages/api/v1/addTransaction.ts index 8ef53c1e..11844c1f 100644 --- a/src/pages/api/v1/addTransaction.ts +++ b/src/pages/api/v1/addTransaction.ts @@ -1,10 +1,11 @@ import type { NextApiRequest, NextApiResponse } from "next"; +import { csl } from "@meshsdk/core-csl"; import { db } from "@/server/db"; import { verifyJwt, isBotJwt } from "@/lib/verifyJwt"; import { cors, addCorsCacheBustingHeaders } from "@/lib/cors"; -import { getProvider } from "@/utils/get-provider"; import { applyRateLimit, applyBotRateLimit, enforceBodySize } from "@/lib/security/requestGuards"; import { assertBotWalletAccess } from "@/lib/auth/botAccess"; +import { createPendingMultisigTransaction } from "@/lib/server/createPendingMultisigTransaction"; export default async function handler( req: NextApiRequest, @@ -75,6 +76,28 @@ export default async function handler( return res.status(400).json({ error: "Missing required field txJson!" }); } + // Reject unparseable CBOR/JSON up front so we never persist a row that + // the transactions page or the Cardano node cannot deserialize (#211). + if (typeof txCbor !== "string") { + return res.status(400).json({ error: "Invalid txCbor: must be a hex string" }); + } + try { + csl.Transaction.from_hex(txCbor); + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + return res.status(400).json({ error: `Invalid transaction CBOR: ${msg}` }); + } + if (typeof txJson === "string") { + try { + JSON.parse(txJson); + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + return res.status(400).json({ error: `Invalid txJson: ${msg}` }); + } + } else if (typeof txJson !== "object" || txJson === null) { + return res.status(400).json({ error: "Invalid txJson: must be a JSON object or string" }); + } + let wallet: { id: string; signersAddresses: string[]; numRequiredSigners: number | null; type: string }; if (isBotJwt(payload)) { try { @@ -98,24 +121,15 @@ export default async function handler( const network = address.includes("test") ? 0 : 1; try { - let newTx; - //ToDo refactor to more cases. - if (reqSigners === 1 || type === "any") { - const blockchainProvider = getProvider(network); - newTx = blockchainProvider.submitTx(txCbor); - } else { - newTx = await db.transaction.create({ - data: { - walletId, - txJson: typeof txJson === "object" ? JSON.stringify(txJson) : txJson, - txCbor, - signedAddresses: [address], - rejectedAddresses: [], - description, - state: 0, - }, - }); - } + const newTx = await createPendingMultisigTransaction(db, { + walletId, + wallet: { numRequiredSigners: reqSigners, type }, + proposerAddress: address, + txCbor, + txJson, + description, + network, + }); res.status(201).json(newTx); } catch (error) { diff --git a/src/pages/api/v1/botDRepCertificate.ts b/src/pages/api/v1/botDRepCertificate.ts new file mode 100644 index 00000000..51e37cc8 --- /dev/null +++ b/src/pages/api/v1/botDRepCertificate.ts @@ -0,0 +1,261 @@ +import type { NextApiRequest, NextApiResponse } from "next"; +import { db } from "@/server/db"; +import { verifyJwt, isBotJwt } from "@/lib/verifyJwt"; +import { cors, addCorsCacheBustingHeaders } from "@/lib/cors"; +import { + applyRateLimit, + applyBotRateLimit, + enforceBodySize, +} from "@/lib/security/requestGuards"; +import { authorizeWalletSignerForV1Tx } from "@/lib/server/v1WalletAuth"; +import { buildMultisigWallet, buildWallet, getWalletType } from "@/utils/common"; +import { getTxBuilder } from "@/utils/get-tx-builder"; +import { resolveWalletScriptAddress } from "@/lib/server/walletScriptAddress"; +import { resolveUtxoRefsFromChain } from "@/lib/server/resolveUtxoRefsFromChain"; +import { createPendingMultisigTransaction } from "@/lib/server/createPendingMultisigTransaction"; +import type { DbWalletWithLegacy } from "@/types/wallet"; +import type { Wallet as AppWallet } from "@/types/wallet"; +import type { MultisigWallet } from "@/utils/multisigSDK"; +import { hashDrepAnchor } from "@meshsdk/core"; + +type DRepAction = "register" | "retire"; + +function resolveDRepScripts(args: { + multisigWallet: MultisigWallet | undefined; + appWallet: AppWallet; +}): { dRepId: string; drepCbor: string; scriptCbor: string; changeAddress: string } | null { + const { multisigWallet, appWallet } = args; + if (multisigWallet) { + const drepData = multisigWallet.getDRep(appWallet); + if (!drepData) return null; + const dRepId = drepData.dRepId; + const drepCbor = drepData.drepCbor; + const multisigScript = multisigWallet.getScript(); + const multisigScriptCbor = multisigScript.scriptCbor; + const appScriptCbor = appWallet.scriptCbor; + if (!multisigScriptCbor && !appScriptCbor) return null; + const scriptCbor = multisigWallet.getKeysByRole(3) + ? multisigScriptCbor || appScriptCbor! + : appScriptCbor || multisigScriptCbor!; + const changeAddress = multisigScript.address; + return { dRepId, drepCbor, scriptCbor, changeAddress }; + } + if (!appWallet.dRepId || !appWallet.scriptCbor) return null; + return { + dRepId: appWallet.dRepId, + drepCbor: appWallet.scriptCbor, + scriptCbor: appWallet.scriptCbor, + changeAddress: appWallet.address, + }; +} + +export default async function handler( + req: NextApiRequest, + res: NextApiResponse, +) { + addCorsCacheBustingHeaders(res); + + if (!applyRateLimit(req, res, { keySuffix: "v1/botDRepCertificate" })) { + return; + } + + await cors(req, res); + if (req.method === "OPTIONS") { + return res.status(200).end(); + } + + if (req.method !== "POST") { + return res.status(405).json({ error: "Method Not Allowed" }); + } + + if (!enforceBodySize(req, res, 200 * 1024)) { + return; + } + + const authHeader = req.headers.authorization; + const token = authHeader?.startsWith("Bearer ") ? authHeader.slice(7) : null; + if (!token) { + return res.status(401).json({ error: "Unauthorized - Missing token" }); + } + + const payload = verifyJwt(token); + if (!payload) { + return res.status(401).json({ error: "Invalid or expired token" }); + } + + if (isBotJwt(payload) && !applyBotRateLimit(req, res, payload.botId)) { + return; + } + + const body = req.body as { + walletId?: string; + address?: string; + action?: string; + utxoRefs?: { txHash: string; outputIndex: number }[]; + description?: string; + anchorUrl?: string; + anchorJson?: unknown; + }; + + const walletId = typeof body.walletId === "string" ? body.walletId : ""; + const address = typeof body.address === "string" ? body.address : ""; + const action = body.action as DRepAction | undefined; + + if (!walletId) { + return res.status(400).json({ error: "Missing required field walletId" }); + } + if (!address) { + return res.status(400).json({ error: "Missing required field address" }); + } + if (action !== "register" && action !== "retire") { + return res.status(400).json({ error: "Invalid or missing action (register or retire)" }); + } + + try { + await authorizeWalletSignerForV1Tx(payload, walletId, address); + } catch (err) { + const code = (err as { code?: string }).code; + if (code === "INSUFFICIENT_SCOPE") { + return res.status(403).json({ error: (err as Error).message }); + } + return res.status(403).json({ + error: err instanceof Error ? err.message : "Not authorized for this wallet", + }); + } + + const walletRow = await db.wallet.findUnique({ where: { id: walletId } }); + if (!walletRow) { + return res.status(404).json({ error: "Wallet not found" }); + } + + const wallet = walletRow as DbWalletWithLegacy; + const wt = getWalletType(wallet); + if (wt === "summon") { + return res.status(400).json({ + error: "DRep certificates are not supported for Summon wallets in this API version", + }); + } + + const network = address.includes("test") ? 0 : 1; + const appWallet = buildWallet(wallet, network); + const multisigWallet = buildMultisigWallet(wallet); + + const scripts = resolveDRepScripts({ multisigWallet, appWallet }); + if (!scripts) { + return res.status(400).json({ + error: "DRep is not configured for this wallet (could not derive DRep id and scripts)", + }); + } + + const { dRepId, drepCbor, scriptCbor, changeAddress } = scripts; + + let spendAddress: string; + try { + spendAddress = resolveWalletScriptAddress(wallet, address); + } catch (e) { + return res.status(500).json({ + error: + e instanceof Error ? e.message : "Wallet script address resolution failed", + }); + } + + const resolved = await resolveUtxoRefsFromChain({ + network, + utxoRefs: body.utxoRefs ?? [], + expectedSpendAddress: spendAddress, + }); + if ("error" in resolved) { + return res.status(resolved.status).json({ error: resolved.error }); + } + const { utxos } = resolved; + + const txBuilder = getTxBuilder(network); + + if (action === "register") { + const anchorUrl = + typeof body.anchorUrl === "string" ? body.anchorUrl.trim() : ""; + if (!anchorUrl) { + return res.status(400).json({ error: "anchorUrl is required for register" }); + } + const anchorJson = body.anchorJson; + if (anchorJson === null || typeof anchorJson !== "object" || Array.isArray(anchorJson)) { + return res.status(400).json({ error: "anchorJson is required for register — provide the JSON object at anchorUrl so the server can compute the hash" }); + } + let anchorDataHash: string; + try { + anchorDataHash = hashDrepAnchor(anchorJson as object); + } catch { + return res.status(400).json({ error: "Failed to compute anchor data hash from anchorJson" }); + } + + for (const utxo of utxos) { + txBuilder.txIn( + utxo.input.txHash, + utxo.input.outputIndex, + utxo.output.amount, + utxo.output.address, + ); + txBuilder.txInScript(scriptCbor); + } + + txBuilder + .drepRegistrationCertificate(dRepId, { + anchorUrl, + anchorDataHash, + }) + .certificateScript(drepCbor) + .changeAddress(changeAddress); + } else { + for (const utxo of utxos) { + txBuilder.txIn( + utxo.input.txHash, + utxo.input.outputIndex, + utxo.output.amount, + utxo.output.address, + ); + txBuilder.txInScript(scriptCbor); + } + txBuilder + .changeAddress(changeAddress) + .drepDeregistrationCertificate(dRepId) + .certificateScript(drepCbor); + } + + let txHex: string; + let txJson: unknown; + try { + txHex = await txBuilder.complete(); + txJson = txBuilder.meshTxBuilderBody; + } catch (e) { + console.error("botDRepCertificate complete error:", e); + return res.status(500).json({ + error: e instanceof Error ? e.message : "Failed to build transaction", + }); + } + + const description = + typeof body.description === "string" && body.description.trim() + ? body.description.trim() + : action === "register" + ? "DRep registration" + : "DRep retirement"; + + try { + const newTx = await createPendingMultisigTransaction(db, { + walletId, + wallet: { + numRequiredSigners: walletRow.numRequiredSigners, + type: walletRow.type, + }, + proposerAddress: address, + txCbor: txHex, + txJson, + description, + network, + }); + return res.status(201).json(newTx); + } catch (error) { + console.error("botDRepCertificate persist error:", error); + return res.status(500).json({ error: "Internal Server Error" }); + } +} diff --git a/src/pages/api/v1/botStakeCertificate.ts b/src/pages/api/v1/botStakeCertificate.ts new file mode 100644 index 00000000..aa8aee45 --- /dev/null +++ b/src/pages/api/v1/botStakeCertificate.ts @@ -0,0 +1,240 @@ +import type { NextApiRequest, NextApiResponse } from "next"; +import { db } from "@/server/db"; +import { verifyJwt, isBotJwt } from "@/lib/verifyJwt"; +import { cors, addCorsCacheBustingHeaders } from "@/lib/cors"; +import { + applyRateLimit, + applyBotRateLimit, + enforceBodySize, +} from "@/lib/security/requestGuards"; +import { authorizeWalletSignerForV1Tx } from "@/lib/server/v1WalletAuth"; +import { buildMultisigWallet, buildWallet, getWalletType } from "@/utils/common"; +import { getTxBuilder } from "@/utils/get-tx-builder"; +import { + buildStakingCertificateActions, + type StakingActionApi, +} from "@/utils/stakingCertificates"; +import { normalizePoolIdForDelegation } from "@/lib/server/normalizePoolId"; +import { resolveWalletScriptAddress } from "@/lib/server/walletScriptAddress"; +import { resolveUtxoRefsFromChain } from "@/lib/server/resolveUtxoRefsFromChain"; +import { createPendingMultisigTransaction } from "@/lib/server/createPendingMultisigTransaction"; +import type { DbWalletWithLegacy } from "@/types/wallet"; + +const ACTIONS: StakingActionApi[] = [ + "register", + "deregister", + "delegate", + "register_and_delegate", +]; + +function isStakingActionApi(s: string): s is StakingActionApi { + return (ACTIONS as string[]).includes(s); +} + +export default async function handler( + req: NextApiRequest, + res: NextApiResponse, +) { + addCorsCacheBustingHeaders(res); + + if (!applyRateLimit(req, res, { keySuffix: "v1/botStakeCertificate" })) { + return; + } + + await cors(req, res); + if (req.method === "OPTIONS") { + return res.status(200).end(); + } + + if (req.method !== "POST") { + return res.status(405).json({ error: "Method Not Allowed" }); + } + + if (!enforceBodySize(req, res, 200 * 1024)) { + return; + } + + const authHeader = req.headers.authorization; + const token = authHeader?.startsWith("Bearer ") ? authHeader.slice(7) : null; + if (!token) { + return res.status(401).json({ error: "Unauthorized - Missing token" }); + } + + const payload = verifyJwt(token); + if (!payload) { + return res.status(401).json({ error: "Invalid or expired token" }); + } + + if (isBotJwt(payload) && !applyBotRateLimit(req, res, payload.botId)) { + return; + } + + const body = req.body as { + walletId?: string; + address?: string; + action?: string; + poolId?: string; + utxoRefs?: { txHash: string; outputIndex: number }[]; + description?: string; + }; + + const walletId = typeof body.walletId === "string" ? body.walletId : ""; + const address = typeof body.address === "string" ? body.address : ""; + const actionRaw = typeof body.action === "string" ? body.action : ""; + + if (!walletId) { + return res.status(400).json({ error: "Missing required field walletId" }); + } + if (!address) { + return res.status(400).json({ error: "Missing required field address" }); + } + if (!isStakingActionApi(actionRaw)) { + return res.status(400).json({ + error: + "Invalid or missing action (expected register, deregister, delegate, register_and_delegate)", + }); + } + const action = actionRaw; + + if ( + (action === "delegate" || action === "register_and_delegate") && + (typeof body.poolId !== "string" || !body.poolId.trim()) + ) { + return res.status(400).json({ error: "poolId is required for this action" }); + } + + try { + await authorizeWalletSignerForV1Tx(payload, walletId, address); + } catch (err) { + const code = (err as { code?: string }).code; + if (code === "INSUFFICIENT_SCOPE") { + return res.status(403).json({ error: (err as Error).message }); + } + const status = + code === "ADDRESS_MISMATCH" || code === "NOT_SIGNER" || code === "BOT_NOT_FOUND" + ? 403 + : 403; + return res.status(status).json({ + error: err instanceof Error ? err.message : "Not authorized for this wallet", + }); + } + + const walletRow = await db.wallet.findUnique({ where: { id: walletId } }); + if (!walletRow) { + return res.status(404).json({ error: "Wallet not found" }); + } + + const wallet = walletRow as DbWalletWithLegacy; + if (getWalletType(wallet) !== "sdk") { + return res.status(400).json({ + error: + "Stake certificates are only supported for SDK multisig wallets (legacy and Summon are not supported)", + }); + } + + const mWallet = buildMultisigWallet(wallet); + if (!mWallet?.stakingEnabled()) { + return res.status(400).json({ + error: "Staking is not enabled for this wallet (payment/stake key counts)", + }); + } + + const network = address.includes("test") ? 0 : 1; + const appWallet = buildWallet(wallet, network); + + const rewardAddress = mWallet.getStakeAddress(); + const stakingScript = appWallet.stakeScriptCbor || mWallet.getStakingScript(); + if (!rewardAddress || !stakingScript) { + return res.status(400).json({ + error: "Could not derive reward address or staking script for this wallet", + }); + } + + let spendAddress: string; + try { + spendAddress = resolveWalletScriptAddress(wallet, address); + } catch (e) { + return res.status(500).json({ + error: + e instanceof Error ? e.message : "Wallet script address resolution failed", + }); + } + + const resolved = await resolveUtxoRefsFromChain({ + network, + utxoRefs: body.utxoRefs ?? [], + expectedSpendAddress: spendAddress, + }); + if ("error" in resolved) { + return res.status(resolved.status).json({ error: resolved.error }); + } + const { utxos } = resolved; + + let poolHex = ""; + if (action === "delegate" || action === "register_and_delegate") { + try { + poolHex = normalizePoolIdForDelegation(body.poolId!); + } catch (e) { + return res.status(400).json({ + error: e instanceof Error ? e.message : "Invalid poolId", + }); + } + } + + const txBuilder = getTxBuilder(network); + const spendScriptCbor = mWallet.getScript().scriptCbor || appWallet.scriptCbor; + for (const utxo of utxos) { + txBuilder.txIn( + utxo.input.txHash, + utxo.input.outputIndex, + utxo.output.amount, + utxo.output.address, + ); + txBuilder.txInScript(spendScriptCbor); + } + + const certActions = buildStakingCertificateActions({ + txBuilder, + rewardAddress, + stakingScript, + poolHex, + }); + certActions[action].execute(); + txBuilder.changeAddress(mWallet.getScript().address); + + let txHex: string; + let txJson: unknown; + try { + txHex = await txBuilder.complete(); + txJson = txBuilder.meshTxBuilderBody; + } catch (e) { + console.error("botStakeCertificate complete error:", e); + return res.status(500).json({ + error: e instanceof Error ? e.message : "Failed to build transaction", + }); + } + + const description = + typeof body.description === "string" && body.description.trim() + ? body.description.trim() + : certActions[action].description; + + try { + const newTx = await createPendingMultisigTransaction(db, { + walletId, + wallet: { + numRequiredSigners: walletRow.numRequiredSigners, + type: walletRow.type, + }, + proposerAddress: address, + txCbor: txHex, + txJson, + description, + network, + }); + return res.status(201).json(newTx); + } catch (error) { + console.error("botStakeCertificate persist error:", error); + return res.status(500).json({ error: "Internal Server Error" }); + } +} diff --git a/src/pages/api/v1/createWallet.ts b/src/pages/api/v1/createWallet.ts index bfbae5a4..8ceac409 100644 --- a/src/pages/api/v1/createWallet.ts +++ b/src/pages/api/v1/createWallet.ts @@ -5,11 +5,86 @@ import { cors, addCorsCacheBustingHeaders } from "@/lib/cors"; import { applyRateLimit, applyBotRateLimit, enforceBodySize } from "@/lib/security/requestGuards"; import { parseScope, scopeIncludes, type BotScope } from "@/lib/auth/botKey"; import { MultisigWallet, type MultisigKey } from "@/utils/multisigSDK"; -import { resolvePaymentKeyHash, resolveStakeKeyHash } from "@meshsdk/core"; +import { + collectSigKeyHashes, + computeRequiredSigners, + decodedToNativeScript, + type DecodedNativeScript, +} from "@/utils/nativeScriptUtils"; +import { resolvePaymentKeyHash, resolveStakeKeyHash, serializeNativeScript } from "@meshsdk/core"; import { BotWalletRole } from "@prisma/client"; const CREATE_SCOPE = "multisig:create"; +type PaymentNativeScriptNode = + | { type: "sig"; keyHash: string } + | { type: "all"; scripts: PaymentNativeScriptNode[] } + | { type: "any"; scripts: PaymentNativeScriptNode[] } + | { type: "atLeast"; required: number; scripts: PaymentNativeScriptNode[] }; + +function isSupportedPaymentNativeScript( + value: unknown, +): value is DecodedNativeScript { + if (!value || typeof value !== "object") return false; + const node = value as { type?: string; keyHash?: string; required?: number; scripts?: unknown }; + + if (node.type === "sig") { + return typeof node.keyHash === "string" && !!node.keyHash.trim(); + } + + if (node.type === "all" || node.type === "any") { + return ( + Array.isArray(node.scripts) && + node.scripts.length > 0 && + node.scripts.every((child) => isSupportedPaymentNativeScript(child)) + ); + } + + if (node.type === "atLeast") { + return ( + typeof node.required === "number" && + Number.isInteger(node.required) && + node.required >= 1 && + Array.isArray(node.scripts) && + node.scripts.length > 0 && + node.required <= node.scripts.length && + node.scripts.every((child) => isSupportedPaymentNativeScript(child)) + ); + } + + return false; +} + +function buildLegacyPaymentNativeScriptInInputOrder(args: { + scriptType: "atLeast" | "all" | "any"; + requiredSigners: number; + paymentKeyHashes: string[]; +}): PaymentNativeScriptNode { + const sigScripts = args.paymentKeyHashes.map((keyHash) => ({ + type: "sig" as const, + keyHash, + })); + + if (args.scriptType === "all" || args.scriptType === "any") { + return { + type: args.scriptType, + scripts: sigScripts, + }; + } + + return { + type: "atLeast", + required: args.requiredSigners, + scripts: sigScripts, + }; +} + +function isAllRootScript( + script: DecodedNativeScript, +): script is Extract { + return script.type === "all"; +} + export default async function handler( req: NextApiRequest, res: NextApiResponse, @@ -75,6 +150,7 @@ export default async function handler( signersDRepKeys?: (string | null)[]; numRequiredSigners?: number; scriptType?: "atLeast" | "all" | "any"; + paymentNativeScript?: unknown; stakeCredentialHash?: string; network?: number; }; @@ -130,18 +206,37 @@ export default async function handler( const description = typeof body.description === "string" ? body.description.slice(0, 2000) : ""; + const paymentNativeScriptRaw = body.paymentNativeScript; + const paymentNativeScript = paymentNativeScriptRaw + ? isSupportedPaymentNativeScript(paymentNativeScriptRaw) + ? paymentNativeScriptRaw + : null + : undefined; + if (paymentNativeScriptRaw && !paymentNativeScript) { + return res.status(400).json({ + error: "paymentNativeScript must be a valid native script tree containing only sig/all/any/atLeast nodes", + }); + } + if (paymentNativeScript && !isAllRootScript(paymentNativeScript)) { + return res.status(400).json({ + error: "paymentNativeScript root type must be 'all' for hierarchical wallets", + }); + } const keys: MultisigKey[] = []; + const signerPaymentKeyHashes: string[] = []; for (let i = 0; i < signersAddresses.length; i++) { const addr = signersAddresses[i]; if (!addr) continue; try { + const paymentKeyHash = resolvePaymentKeyHash(addr); keys.push({ - keyHash: resolvePaymentKeyHash(addr), + keyHash: paymentKeyHash, role: 0, name: descs[i] ?? "", }); + signerPaymentKeyHashes.push(paymentKeyHash.toLowerCase()); } catch { const hint = i === 1 @@ -180,27 +275,83 @@ export default async function handler( return res.status(400).json({ error: "No valid signer keys" }); } + const effectiveScriptType = paymentNativeScript ? "all" : scriptType; const numRequired = - scriptType === "all" || scriptType === "any" ? null : numRequiredSigners; + paymentNativeScript + ? computeRequiredSigners(paymentNativeScript) + : effectiveScriptType === "all" || effectiveScriptType === "any" + ? null + : numRequiredSigners; let scriptCbor: string; let address: string; try { - const multisigWallet = new MultisigWallet( - name, - keys, - description, - numRequiredSigners, - network, - stakeCredentialHash, - scriptType, - ); - const script = multisigWallet.getScript(); - if (!script.scriptCbor) { - return res.status(400).json({ error: "Failed to build multisig script" }); + if (paymentNativeScript) { + const scriptSigHashes = Array.from( + new Set(collectSigKeyHashes(paymentNativeScript).map((hash) => hash.toLowerCase())), + ); + const signerSigHashes = Array.from(new Set(signerPaymentKeyHashes)); + const scriptHasExactSignerSet = + scriptSigHashes.length === signerSigHashes.length && + scriptSigHashes.every((hash) => signerSigHashes.includes(hash)); + if (!scriptHasExactSignerSet) { + return res.status(400).json({ + error: "paymentNativeScript sig keys must match signersAddresses payment keys", + }); + } + + const nativeScript = decodedToNativeScript(paymentNativeScript); + const serialized = serializeNativeScript( + nativeScript, + stakeCredentialHash, + network, + true, + ); + if (!serialized.scriptCbor) { + return res.status(400).json({ error: "Failed to serialize paymentNativeScript" }); + } + scriptCbor = serialized.scriptCbor; + address = serialized.address; + } else { + const isLegacyWallet = + !signersStakeKeys.some(Boolean) && + !signersDRepKeys.some(Boolean); + + if (isLegacyWallet) { + const legacyScript = buildLegacyPaymentNativeScriptInInputOrder({ + scriptType, + requiredSigners: numRequiredSigners, + paymentKeyHashes: signerPaymentKeyHashes, + }); + const serialized = serializeNativeScript( + legacyScript, + stakeCredentialHash, + network, + true, + ); + if (!serialized.scriptCbor) { + return res.status(400).json({ error: "Failed to build multisig script" }); + } + scriptCbor = serialized.scriptCbor; + address = serialized.address; + } else { + const multisigWallet = new MultisigWallet( + name, + keys, + description, + numRequiredSigners, + network, + stakeCredentialHash, + scriptType, + ); + const script = multisigWallet.getScript(); + if (!script.scriptCbor) { + return res.status(400).json({ error: "Failed to build multisig script" }); + } + scriptCbor = script.scriptCbor; + address = script.address; + } } - scriptCbor = script.scriptCbor; - address = script.address; } catch (e) { console.error("createWallet script build error:", e); return res.status(400).json({ @@ -221,7 +372,7 @@ export default async function handler( numRequiredSigners: numRequired, scriptCbor, stakeCredentialHash: stakeCredentialHash ?? null, - type: scriptType, + type: effectiveScriptType, ownerAddress: payload.address, }, }); diff --git a/src/pages/api/v1/drepInfo.ts b/src/pages/api/v1/drepInfo.ts new file mode 100644 index 00000000..7d4fee55 --- /dev/null +++ b/src/pages/api/v1/drepInfo.ts @@ -0,0 +1,94 @@ +import type { NextApiRequest, NextApiResponse } from "next"; +import { cors, addCorsCacheBustingHeaders } from "@/lib/cors"; +import { verifyJwt, isBotJwt } from "@/lib/verifyJwt"; +import { applyRateLimit, applyBotRateLimit } from "@/lib/security/requestGuards"; +import { db } from "@/server/db"; +import { buildMultisigWallet, buildWallet, getWalletType } from "@/utils/common"; +import { env } from "@/env"; +import type { DbWalletWithLegacy } from "@/types/wallet"; + +function getBlockfrostConfig(network: 0 | 1): { key: string; baseUrl: string } | null { + if (network === 0) { + const key = env.BLOCKFROST_API_KEY_PREPROD ?? env.NEXT_PUBLIC_BLOCKFROST_API_KEY_PREPROD; + if (!key) return null; + return { key, baseUrl: "https://cardano-preprod.blockfrost.io/api/v0" }; + } + const key = env.BLOCKFROST_API_KEY_MAINNET ?? env.NEXT_PUBLIC_BLOCKFROST_API_KEY_MAINNET; + if (!key) return null; + return { key, baseUrl: "https://cardano-mainnet.blockfrost.io/api/v0" }; +} + +export default async function handler(req: NextApiRequest, res: NextApiResponse) { + addCorsCacheBustingHeaders(res); + if (!applyRateLimit(req, res, { keySuffix: "v1/drepInfo" })) return; + await cors(req, res); + if (req.method === "OPTIONS") return res.status(200).end(); + if (req.method !== "GET") return res.status(405).json({ error: "Method Not Allowed" }); + + const authHeader = req.headers.authorization; + const token = authHeader?.startsWith("Bearer ") ? authHeader.slice(7) : null; + if (!token) return res.status(401).json({ error: "Unauthorized - Missing token" }); + + const payload = verifyJwt(token); + if (!payload) return res.status(401).json({ error: "Invalid or expired token" }); + if (isBotJwt(payload) && !applyBotRateLimit(req, res, payload.botId)) return; + + const { walletId, address } = req.query; + if (typeof walletId !== "string" || !walletId.trim()) { + return res.status(400).json({ error: "Missing or invalid walletId parameter" }); + } + if (typeof address !== "string" || !address.trim()) { + return res.status(400).json({ error: "Missing or invalid address parameter" }); + } + + const walletRow = await db.wallet.findUnique({ where: { id: walletId } }); + if (!walletRow) return res.status(404).json({ error: "Wallet not found" }); + + const wallet = walletRow as DbWalletWithLegacy; + const wt = getWalletType(wallet); + if (wt === "summon") { + return res.status(400).json({ error: "DRep certificates are not supported for Summon wallets" }); + } + + const network: 0 | 1 = address.includes("test") ? 0 : 1; + const appWallet = buildWallet(wallet, network); + const multisigWallet = buildMultisigWallet(wallet); + + let dRepId: string | undefined; + if (multisigWallet) { + const drepData = multisigWallet.getDRep(appWallet); + dRepId = drepData?.dRepId; + } else { + dRepId = appWallet.dRepId ?? undefined; + } + + if (!dRepId) { + return res.status(400).json({ error: "DRep is not configured for this wallet" }); + } + + const config = getBlockfrostConfig(network); + if (!config) { + return res.status(500).json({ error: `Missing Blockfrost API key for network ${network}` }); + } + + try { + const response = await fetch(`${config.baseUrl}/governance/dreps/${encodeURIComponent(dRepId)}`, { + headers: { project_id: config.key }, + }); + + if (response.status === 404) { + return res.status(200).json({ active: false, dRepId }); + } + if (!response.ok) { + const body = await response.text(); + console.error(`drepInfo Blockfrost error ${response.status}:`, body); + return res.status(500).json({ error: `Blockfrost returned ${response.status}` }); + } + + const data = (await response.json()) as { active?: boolean }; + return res.status(200).json({ active: data.active === true, dRepId }); + } catch (e) { + console.error("drepInfo error:", e); + return res.status(500).json({ error: "Failed to fetch DRep info" }); + } +} diff --git a/src/pages/api/v1/freeUtxos.ts b/src/pages/api/v1/freeUtxos.ts index 856332a6..1daffb12 100644 --- a/src/pages/api/v1/freeUtxos.ts +++ b/src/pages/api/v1/freeUtxos.ts @@ -4,7 +4,6 @@ import { cors, addCorsCacheBustingHeaders } from "@/lib/cors"; //remove all wallet input utxos found in pending txs from the whole pool of txs. import type { Wallet as DbWallet } from "@prisma/client"; import type { NextApiRequest, NextApiResponse } from "next"; -import { buildMultisigWallet } from "@/utils/common"; import { getProvider } from "@/utils/get-provider"; import { addressToNetwork } from "@/utils/multisigSDK"; import type { UTxO } from "@meshsdk/core"; @@ -15,6 +14,7 @@ import { DbWalletWithLegacy } from "@/types/wallet"; import { applyRateLimit, applyBotRateLimit } from "@/lib/security/requestGuards"; import { getClientIP } from "@/lib/security/rateLimit"; import { assertBotWalletAccess, getBotWalletAccess } from "@/lib/auth/botAccess"; +import { resolveWalletScriptAddress } from "@/lib/server/walletScriptAddress"; export default async function handler( req: NextApiRequest, @@ -107,25 +107,41 @@ export default async function handler( if (!walletFetch) { return res.status(404).json({ error: "Wallet not found" }); } - const mWallet = buildMultisigWallet(walletFetch as DbWalletWithLegacy); - if (!mWallet) { - return res.status(500).json({ error: "Wallet could not be constructed" }); + let addr: string; + try { + addr = resolveWalletScriptAddress( + walletFetch as DbWalletWithLegacy, + address, + ); + } catch (error) { + const message = error instanceof Error ? error.message : "unknown error"; + return res.status(500).json({ + error: `Wallet script address resolution failed: ${message}`, + }); } - const addr = mWallet.getScript().address; const network = addressToNetwork(addr); const blockchainProvider = getProvider(network); + const fresh = req.query.fresh === "true"; - // Use cached UTxO fetch to reduce Blockfrost API calls - const { cachedFetchAddressUTxOs } = await import("@/utils/blockchain-cache"); - const utxos: UTxO[] = await cachedFetchAddressUTxOs(blockchainProvider, addr, network); + let utxos: UTxO[]; + if (fresh) { + utxos = await blockchainProvider.fetchAddressUTxOs(addr); + } else { + const { cachedFetchAddressUTxOs } = await import("@/utils/blockchain-cache"); + utxos = await cachedFetchAddressUTxOs(blockchainProvider, addr, network); + } const blockedUtxos: { hash: string; index: number }[] = pendingTxsResult.flatMap((m): { hash: string; index: number }[] => { try { const txJson: { inputs: { txIn: { txHash: string; txIndex: number } }[]; + multisig?: { submissionError?: string | null }; } = JSON.parse(m.txJson); + // A tx that was broadcast but rejected by the node has a submissionError. + // Its inputs are still unspent on-chain — don't block them. + if (txJson.multisig?.submissionError) return []; return txJson.inputs.map((n) => ({ hash: n.txIn.txHash, index: n.txIn.txIndex, @@ -145,10 +161,9 @@ export default async function handler( ), ); - // Set cache headers for CDN/edge caching res.setHeader( "Cache-Control", - "public, s-maxage=30, stale-while-revalidate=60", + fresh ? "no-store" : "public, s-maxage=30, stale-while-revalidate=60", ); res.status(200).json(freeUtxos); } catch (error) { diff --git a/src/pages/api/v1/nativeScript.ts b/src/pages/api/v1/nativeScript.ts index a096bb12..cea0a6fb 100644 --- a/src/pages/api/v1/nativeScript.ts +++ b/src/pages/api/v1/nativeScript.ts @@ -76,42 +76,39 @@ export default async function handler( return res.status(404).json({ error: "Wallet not found" }); } const dbWallet = walletFetch as DbWalletWithLegacy; - const mWallet = buildMultisigWallet(dbWallet); - - // If SDK wallet not available, try to decode from stored CBOR (imported wallets) - if (!mWallet) { - const multisig = dbWallet.rawImportBodies?.multisig; - const paymentCbor = multisig?.payment_script; - const stakeCbor = multisig?.stake_script; - - const decodedScripts: Array<{ type: string; script: unknown }> = []; - - if (paymentCbor) { - try { - const decoded = decodeNativeScriptFromCbor(paymentCbor); - decodedScripts.push({ type: "payment", script: decodedToNativeScript(decoded) }); - } catch { - // keep going; stake script may still decode - } + const multisig = dbWallet.rawImportBodies?.multisig; + const decodedScripts: Array<{ type: string; script: unknown }> = []; + const addDecodedScript = (type: "payment" | "stake", scriptCbor?: string | null) => { + const cbor = scriptCbor?.trim(); + if (!cbor) { + return; } - - if (stakeCbor) { - try { - const decoded = decodeNativeScriptFromCbor(stakeCbor); - decodedScripts.push({ type: "stake", script: decodedToNativeScript(decoded) }); - } catch { - // ignore - } + try { + const decoded = decodeNativeScriptFromCbor(cbor); + decodedScripts.push({ type, script: decodedToNativeScript(decoded) }); + } catch { + // Fall through to other canonical sources/fallbacks. } + }; - if (decodedScripts.length > 0) { - res.setHeader( - "Cache-Control", - "private, max-age=300, stale-while-revalidate=600", - ); - return res.status(200).json(decodedScripts); - } + // Canonical source for the wallet payment script. + addDecodedScript("payment", dbWallet.scriptCbor); + if (decodedScripts.length === 0) { + // Imported wallets can carry payment script in raw import body. + addDecodedScript("payment", multisig?.payment_script); + } + addDecodedScript("stake", multisig?.stake_script); + + if (decodedScripts.length > 0) { + res.setHeader( + "Cache-Control", + "private, max-age=300, stale-while-revalidate=600", + ); + return res.status(200).json(decodedScripts); + } + const mWallet = buildMultisigWallet(dbWallet); + if (!mWallet) { return res.status(500).json({ error: "Wallet could not be constructed", }); diff --git a/src/pages/api/v1/proxies.ts b/src/pages/api/v1/proxies.ts new file mode 100644 index 00000000..11f8b773 --- /dev/null +++ b/src/pages/api/v1/proxies.ts @@ -0,0 +1,71 @@ +import type { NextApiRequest, NextApiResponse } from "next"; +import { db } from "@/server/db"; +import { verifyJwt, isBotJwt } from "@/lib/verifyJwt"; +import { cors, addCorsCacheBustingHeaders } from "@/lib/cors"; +import { applyRateLimit, applyBotRateLimit } from "@/lib/security/requestGuards"; +import { authorizeProxyReadForV1 } from "@/lib/server/proxyAccess"; + +export default async function handler( + req: NextApiRequest, + res: NextApiResponse, +) { + addCorsCacheBustingHeaders(res); + + if (!applyRateLimit(req, res, { keySuffix: "v1/proxies" })) { + return; + } + + await cors(req, res); + if (req.method === "OPTIONS") { + return res.status(200).end(); + } + if (req.method !== "GET") { + return res.status(405).json({ error: "Method Not Allowed" }); + } + + const authHeader = req.headers.authorization; + const token = authHeader?.startsWith("Bearer ") ? authHeader.slice(7) : null; + if (!token) { + return res.status(401).json({ error: "Unauthorized - Missing token" }); + } + + const payload = verifyJwt(token); + if (!payload) { + return res.status(401).json({ error: "Invalid or expired token" }); + } + + if (isBotJwt(payload) && !applyBotRateLimit(req, res, payload.botId)) { + return; + } + + const walletId = typeof req.query.walletId === "string" ? req.query.walletId : ""; + const address = typeof req.query.address === "string" ? req.query.address : ""; + if (!walletId) { + return res.status(400).json({ error: "Invalid walletId parameter" }); + } + if (!address) { + return res.status(400).json({ error: "Invalid address parameter" }); + } + + try { + await authorizeProxyReadForV1({ db, payload, walletId, address }); + } catch (error) { + const code = (error as { code?: string }).code; + if (code === "NOT_FOUND") { + return res.status(404).json({ error: "Wallet not found" }); + } + return res.status(403).json({ + error: error instanceof Error ? error.message : "Not authorized for this wallet", + }); + } + + const proxies = await db.proxy.findMany({ + where: { + walletId, + isActive: true, + }, + orderBy: { createdAt: "desc" }, + }); + + return res.status(200).json(proxies); +} diff --git a/src/pages/api/v1/proxyCleanup.ts b/src/pages/api/v1/proxyCleanup.ts new file mode 100644 index 00000000..7b5a8a70 --- /dev/null +++ b/src/pages/api/v1/proxyCleanup.ts @@ -0,0 +1,323 @@ +import type { NextApiRequest, NextApiResponse } from "next"; +import type { UTxO } from "@meshsdk/core"; +import { db } from "@/server/db"; +import { verifyJwt, isBotJwt } from "@/lib/verifyJwt"; +import { cors, addCorsCacheBustingHeaders } from "@/lib/cors"; +import { + applyRateLimit, + applyBotRateLimit, + enforceBodySize, +} from "@/lib/security/requestGuards"; +import { authorizeWalletSignerForV1Tx } from "@/lib/server/v1WalletAuth"; +import { loadActiveProxyForWallet } from "@/lib/server/proxyAccess"; +import { resolveWalletScriptAddress } from "@/lib/server/walletScriptAddress"; +import { resolveUtxoRefsFromChain } from "@/lib/server/resolveUtxoRefsFromChain"; +import { + requireAuthTokenUtxo, + resolveCollateralRefFromChain, + resolveSingleUtxoRefFromChain, + type UtxoRef, +} from "@/lib/server/proxyUtxos"; +import { createPendingMultisigTransaction } from "@/lib/server/createPendingMultisigTransaction"; +import { getProvider } from "@/utils/get-provider"; +import { getTxBuilder } from "@/utils/get-tx-builder"; +import { + buildProxyCleanupSweepTx, + buildProxyCleanupTx, + deriveProxyScripts, +} from "@/lib/server/proxyTxBuilders"; +import type { DbWalletWithLegacy } from "@/types/wallet"; + +type MeshTxBuilderWithBody = ReturnType & { + meshTxBuilderBody: unknown; +}; + +type CleanupMetadata = + | { phase: "sweep"; sweptProxyUtxos: string; preservedAuthTokens: string } + | { phase: "burn"; burnedAuthTokens: string }; + +function parseParamUtxo(value: string): UtxoRef | null { + try { + const parsed = JSON.parse(value) as Partial; + if ( + typeof parsed.txHash === "string" && + typeof parsed.outputIndex === "number" && + Number.isInteger(parsed.outputIndex) + ) { + return { txHash: parsed.txHash, outputIndex: parsed.outputIndex }; + } + } catch { + return null; + } + return null; +} + +function refKey(ref: UtxoRef): string { + return `${ref.txHash}:${ref.outputIndex}`; +} + +async function resolveProxyCleanupUtxos(args: { + network: number; + proxyAddress: string; + proxyUtxoRefs?: UtxoRef[]; +}): Promise<{ utxos: UTxO[] } | { error: string; status: number }> { + let visibleUtxos: UTxO[]; + try { + visibleUtxos = await getProvider(args.network).fetchAddressUTxOs(args.proxyAddress); + } catch (error) { + return { + error: error instanceof Error ? error.message : "Failed to fetch proxy UTxOs", + status: 400, + }; + } + + if (!Array.isArray(args.proxyUtxoRefs) || args.proxyUtxoRefs.length === 0) { + return { utxos: visibleUtxos }; + } + + const visibleRefs = new Set(visibleUtxos.map((utxo) => refKey(utxo.input))); + const requestedRefs = new Set(args.proxyUtxoRefs.map(refKey)); + for (const visibleRef of visibleRefs) { + if (!requestedRefs.has(visibleRef)) { + return { + error: "proxyUtxoRefs must include every currently visible proxy UTxO for cleanup", + status: 400, + }; + } + } + + const utxos: UTxO[] = []; + for (const ref of args.proxyUtxoRefs) { + const resolved = await resolveSingleUtxoRefFromChain({ + network: args.network, + ref, + expectedAddress: args.proxyAddress, + }); + if ("error" in resolved) { + return resolved; + } + utxos.push(resolved.utxo); + } + + return { utxos }; +} + +export default async function handler( + req: NextApiRequest, + res: NextApiResponse, +) { + addCorsCacheBustingHeaders(res); + + if (!applyRateLimit(req, res, { keySuffix: "v1/proxyCleanup" })) { + return; + } + + await cors(req, res); + if (req.method === "OPTIONS") { + return res.status(200).end(); + } + if (req.method !== "POST") { + return res.status(405).json({ error: "Method Not Allowed" }); + } + if (!enforceBodySize(req, res, 200 * 1024)) { + return; + } + + const authHeader = req.headers.authorization; + const token = authHeader?.startsWith("Bearer ") ? authHeader.slice(7) : null; + if (!token) { + return res.status(401).json({ error: "Unauthorized - Missing token" }); + } + + const payload = verifyJwt(token); + if (!payload) { + return res.status(401).json({ error: "Invalid or expired token" }); + } + if (isBotJwt(payload) && !applyBotRateLimit(req, res, payload.botId)) { + return; + } + + const body = req.body as { + walletId?: string; + address?: string; + proxyId?: string; + utxoRefs?: UtxoRef[]; + proxyUtxoRefs?: UtxoRef[]; + collateralRef?: UtxoRef; + deactivateProxy?: boolean; + description?: string; + }; + + const walletId = typeof body.walletId === "string" ? body.walletId : ""; + const address = typeof body.address === "string" ? body.address : ""; + const proxyId = typeof body.proxyId === "string" ? body.proxyId : ""; + if (!walletId || !address || !proxyId) { + return res.status(400).json({ error: "walletId, address, and proxyId are required" }); + } + + let walletRow; + try { + const authorized = await authorizeWalletSignerForV1Tx(payload, walletId, address); + walletRow = authorized.wallet; + } catch (error) { + const code = (error as { code?: string }).code; + if (code === "NOT_FOUND") { + return res.status(404).json({ error: "Wallet not found" }); + } + return res.status(403).json({ + error: error instanceof Error ? error.message : "Not authorized for this wallet", + }); + } + + let proxy; + try { + proxy = await loadActiveProxyForWallet({ db, walletId, proxyId }); + } catch (error) { + return res.status(404).json({ + error: error instanceof Error ? error.message : "Proxy not found", + }); + } + + const paramUtxo = parseParamUtxo(proxy.paramUtxo); + if (!paramUtxo) { + return res.status(500).json({ error: "Stored proxy paramUtxo is invalid" }); + } + + const network = address.includes("test") ? 0 : 1; + const scripts = deriveProxyScripts({ paramUtxo, network }); + if (scripts.authTokenId !== proxy.authTokenId || scripts.proxyAddress !== proxy.proxyAddress) { + return res.status(409).json({ error: "Stored proxy metadata does not match derived scripts" }); + } + + let walletAddress: string; + try { + walletAddress = resolveWalletScriptAddress(walletRow as DbWalletWithLegacy, address); + } catch (error) { + return res.status(500).json({ + error: + error instanceof Error ? error.message : "Wallet script address resolution failed", + }); + } + + const resolvedWalletUtxos = await resolveUtxoRefsFromChain({ + network, + utxoRefs: body.utxoRefs ?? [], + expectedSpendAddress: walletAddress, + }); + if ("error" in resolvedWalletUtxos) { + return res.status(resolvedWalletUtxos.status).json({ error: resolvedWalletUtxos.error }); + } + + const resolvedCollateral = await resolveCollateralRefFromChain({ + network, + collateralRef: body.collateralRef, + expectedAddress: address, + }); + if ("error" in resolvedCollateral) { + return res.status(resolvedCollateral.status).json({ error: resolvedCollateral.error }); + } + + const proxyUtxosResult = await resolveProxyCleanupUtxos({ + network, + proxyAddress: proxy.proxyAddress, + proxyUtxoRefs: body.proxyUtxoRefs, + }); + if ("error" in proxyUtxosResult) { + return res.status(proxyUtxosResult.status).json({ error: proxyUtxosResult.error }); + } + + const txBuilder = getTxBuilder(network) as MeshTxBuilderWithBody; + let cleanup: CleanupMetadata; + try { + if (proxyUtxosResult.utxos.length > 0) { + const authTokenUtxo = requireAuthTokenUtxo( + resolvedWalletUtxos.utxos, + proxy.authTokenId, + ); + if ("error" in authTokenUtxo) { + return res.status(authTokenUtxo.status).json({ error: authTokenUtxo.error }); + } + cleanup = { + phase: "sweep", + ...buildProxyCleanupSweepTx({ + txBuilder, + network, + paramUtxo, + proxyAddress: proxy.proxyAddress, + proxyUtxos: proxyUtxosResult.utxos, + walletUtxos: resolvedWalletUtxos.utxos, + authTokenUtxo, + collateral: resolvedCollateral.collateral, + walletAddress, + multisigScriptCbor: walletRow.scriptCbor, + }), + }; + } else { + cleanup = { + phase: "burn", + ...buildProxyCleanupTx({ + txBuilder, + network, + paramUtxo, + walletUtxos: resolvedWalletUtxos.utxos, + collateral: resolvedCollateral.collateral, + walletAddress, + authTokenId: proxy.authTokenId, + multisigScriptCbor: walletRow.scriptCbor, + }), + }; + } + } catch (error) { + return res.status(400).json({ + error: error instanceof Error ? error.message : "Failed to build proxy cleanup", + }); + } + + let txCbor: string; + try { + txCbor = await txBuilder.complete(); + } catch (error) { + console.error("proxyCleanup complete error:", error); + return res.status(500).json({ + error: error instanceof Error ? error.message : "Failed to build transaction", + }); + } + + const description = + typeof body.description === "string" && body.description.trim() + ? body.description.trim() + : "Proxy cleanup transaction"; + const txJson = { + ...(typeof txBuilder.meshTxBuilderBody === "object" && + txBuilder.meshTxBuilderBody !== null + ? (txBuilder.meshTxBuilderBody as Record) + : {}), + proxyBot: { + kind: "proxyCleanup", + proxyId, + cleanup, + deactivateProxy: body.deactivateProxy !== false, + description, + }, + }; + + try { + const transaction = await createPendingMultisigTransaction(db, { + walletId, + wallet: { + numRequiredSigners: walletRow.numRequiredSigners, + type: walletRow.type, + }, + proposerAddress: address, + txCbor, + txJson, + description, + network, + initialSignedAddresses: [], + }); + return res.status(201).json({ transaction, cleanup }); + } catch (error) { + console.error("proxyCleanup persist error:", error); + return res.status(500).json({ error: "Internal Server Error" }); + } +} diff --git a/src/pages/api/v1/proxyCleanupFinalize.ts b/src/pages/api/v1/proxyCleanupFinalize.ts new file mode 100644 index 00000000..5cad5735 --- /dev/null +++ b/src/pages/api/v1/proxyCleanupFinalize.ts @@ -0,0 +1,121 @@ +import type { NextApiRequest, NextApiResponse } from "next"; +import { db } from "@/server/db"; +import { verifyJwt, isBotJwt } from "@/lib/verifyJwt"; +import { cors, addCorsCacheBustingHeaders } from "@/lib/cors"; +import { + applyRateLimit, + applyBotRateLimit, + enforceBodySize, +} from "@/lib/security/requestGuards"; +import { authorizeWalletSignerForV1Tx } from "@/lib/server/v1WalletAuth"; +import { loadActiveProxyForWallet } from "@/lib/server/proxyAccess"; +import { resolveWalletScriptAddress } from "@/lib/server/walletScriptAddress"; +import { finalizeConfirmedProxyCleanup } from "@/lib/server/proxyCleanupFinalization"; +import type { DbWalletWithLegacy } from "@/types/wallet"; + +export default async function handler( + req: NextApiRequest, + res: NextApiResponse, +) { + addCorsCacheBustingHeaders(res); + + if (!applyRateLimit(req, res, { keySuffix: "v1/proxyCleanupFinalize" })) { + return; + } + + await cors(req, res); + if (req.method === "OPTIONS") { + return res.status(200).end(); + } + if (req.method !== "POST") { + return res.status(405).json({ error: "Method Not Allowed" }); + } + if (!enforceBodySize(req, res, 100 * 1024)) { + return; + } + + const authHeader = req.headers.authorization; + const token = authHeader?.startsWith("Bearer ") ? authHeader.slice(7) : null; + if (!token) { + return res.status(401).json({ error: "Unauthorized - Missing token" }); + } + + const payload = verifyJwt(token); + if (!payload) { + return res.status(401).json({ error: "Invalid or expired token" }); + } + if (isBotJwt(payload) && !applyBotRateLimit(req, res, payload.botId)) { + return; + } + + const body = req.body as { + walletId?: string; + address?: string; + proxyId?: string; + txHash?: string; + deactivateProxy?: boolean; + }; + + const walletId = typeof body.walletId === "string" ? body.walletId : ""; + const address = typeof body.address === "string" ? body.address : ""; + const proxyId = typeof body.proxyId === "string" ? body.proxyId : ""; + const txHash = typeof body.txHash === "string" ? body.txHash.trim() : ""; + if (!walletId || !address || !proxyId) { + return res.status(400).json({ error: "walletId, address, and proxyId are required" }); + } + if (!txHash) { + return res.status(400).json({ error: "Missing required field txHash" }); + } + + let walletRow; + try { + const authorized = await authorizeWalletSignerForV1Tx(payload, walletId, address); + walletRow = authorized.wallet; + } catch (error) { + const code = (error as { code?: string }).code; + if (code === "NOT_FOUND") { + return res.status(404).json({ error: "Wallet not found" }); + } + return res.status(403).json({ + error: error instanceof Error ? error.message : "Not authorized for this wallet", + }); + } + + let proxy; + try { + proxy = await loadActiveProxyForWallet({ db, walletId, proxyId }); + } catch (error) { + return res.status(404).json({ + error: error instanceof Error ? error.message : "Proxy not found", + }); + } + + let walletAddress: string; + try { + walletAddress = resolveWalletScriptAddress( + walletRow as DbWalletWithLegacy, + address, + ); + } catch (error) { + return res.status(500).json({ + error: + error instanceof Error ? error.message : "Wallet script address resolution failed", + }); + } + + const network = address.includes("test") ? 0 : 1; + const result = await finalizeConfirmedProxyCleanup({ + db, + network, + proxy, + walletAddress, + txHash, + deactivateProxy: body.deactivateProxy, + }); + + if ("error" in result) { + return res.status(result.status).json({ error: result.error }); + } + + return res.status(201).json({ proxy: result.proxy, txHash }); +} diff --git a/src/pages/api/v1/proxyDRepCertificate.ts b/src/pages/api/v1/proxyDRepCertificate.ts new file mode 100644 index 00000000..ef2b3027 --- /dev/null +++ b/src/pages/api/v1/proxyDRepCertificate.ts @@ -0,0 +1,258 @@ +import type { NextApiRequest, NextApiResponse } from "next"; +import { db } from "@/server/db"; +import { verifyJwt, isBotJwt } from "@/lib/verifyJwt"; +import { cors, addCorsCacheBustingHeaders } from "@/lib/cors"; +import { + applyRateLimit, + applyBotRateLimit, + enforceBodySize, +} from "@/lib/security/requestGuards"; +import { authorizeWalletSignerForV1Tx } from "@/lib/server/v1WalletAuth"; +import { loadActiveProxyForWallet } from "@/lib/server/proxyAccess"; +import { resolveWalletScriptAddress } from "@/lib/server/walletScriptAddress"; +import { resolveUtxoRefsFromChain } from "@/lib/server/resolveUtxoRefsFromChain"; +import { + requireAuthTokenUtxo, + resolveCollateralRefFromChain, + type UtxoRef, +} from "@/lib/server/proxyUtxos"; +import { createPendingMultisigTransaction } from "@/lib/server/createPendingMultisigTransaction"; +import { getTxBuilder } from "@/utils/get-tx-builder"; +import { + buildProxyDRepCertificateTx, + deriveProxyScripts, +} from "@/lib/server/proxyTxBuilders"; +import type { DbWalletWithLegacy } from "@/types/wallet"; + +type ProxyDRepAction = "register" | "update" | "deregister"; +type MeshTxBuilderWithBody = ReturnType & { + meshTxBuilderBody: unknown; +}; + +function parseParamUtxo(value: string): UtxoRef | null { + try { + const parsed = JSON.parse(value) as Partial; + if ( + typeof parsed.txHash === "string" && + typeof parsed.outputIndex === "number" && + Number.isInteger(parsed.outputIndex) + ) { + return { txHash: parsed.txHash, outputIndex: parsed.outputIndex }; + } + } catch { + return null; + } + return null; +} + +function isProxyDRepAction(action: string): action is ProxyDRepAction { + return action === "register" || action === "update" || action === "deregister"; +} + +export default async function handler( + req: NextApiRequest, + res: NextApiResponse, +) { + addCorsCacheBustingHeaders(res); + + if (!applyRateLimit(req, res, { keySuffix: "v1/proxyDRepCertificate" })) { + return; + } + + await cors(req, res); + if (req.method === "OPTIONS") { + return res.status(200).end(); + } + if (req.method !== "POST") { + return res.status(405).json({ error: "Method Not Allowed" }); + } + if (!enforceBodySize(req, res, 200 * 1024)) { + return; + } + + const authHeader = req.headers.authorization; + const token = authHeader?.startsWith("Bearer ") ? authHeader.slice(7) : null; + if (!token) { + return res.status(401).json({ error: "Unauthorized - Missing token" }); + } + + const payload = verifyJwt(token); + if (!payload) { + return res.status(401).json({ error: "Invalid or expired token" }); + } + if (isBotJwt(payload) && !applyBotRateLimit(req, res, payload.botId)) { + return; + } + + const body = req.body as { + walletId?: string; + address?: string; + proxyId?: string; + action?: string; + utxoRefs?: UtxoRef[]; + collateralRef?: UtxoRef; + anchorUrl?: string; + anchorJson?: unknown; + description?: string; + }; + + const walletId = typeof body.walletId === "string" ? body.walletId : ""; + const address = typeof body.address === "string" ? body.address : ""; + const proxyId = typeof body.proxyId === "string" ? body.proxyId : ""; + const actionRaw = typeof body.action === "string" ? body.action : ""; + if (!walletId || !address || !proxyId) { + return res.status(400).json({ error: "walletId, address, and proxyId are required" }); + } + if (!isProxyDRepAction(actionRaw)) { + return res.status(400).json({ error: "Invalid or missing action (register, update, deregister)" }); + } + const action = actionRaw; + + const anchorUrl = typeof body.anchorUrl === "string" ? body.anchorUrl.trim() : ""; + const anchorJson = + body.anchorJson && typeof body.anchorJson === "object" && !Array.isArray(body.anchorJson) + ? (body.anchorJson as object) + : undefined; + if ((action === "register" || action === "update") && (!anchorUrl || !anchorJson)) { + return res.status(400).json({ error: "anchorUrl and anchorJson are required for register and update" }); + } + + let walletRow; + try { + const authorized = await authorizeWalletSignerForV1Tx(payload, walletId, address); + walletRow = authorized.wallet; + } catch (error) { + const code = (error as { code?: string }).code; + if (code === "NOT_FOUND") { + return res.status(404).json({ error: "Wallet not found" }); + } + return res.status(403).json({ + error: error instanceof Error ? error.message : "Not authorized for this wallet", + }); + } + + let proxy; + try { + proxy = await loadActiveProxyForWallet({ db, walletId, proxyId }); + } catch (error) { + return res.status(404).json({ + error: error instanceof Error ? error.message : "Proxy not found", + }); + } + + const paramUtxo = parseParamUtxo(proxy.paramUtxo); + if (!paramUtxo) { + return res.status(500).json({ error: "Stored proxy paramUtxo is invalid" }); + } + + const network = address.includes("test") ? 0 : 1; + const scripts = deriveProxyScripts({ paramUtxo, network }); + if (scripts.authTokenId !== proxy.authTokenId || scripts.proxyAddress !== proxy.proxyAddress) { + return res.status(409).json({ error: "Stored proxy metadata does not match derived scripts" }); + } + + let walletAddress: string; + try { + walletAddress = resolveWalletScriptAddress(walletRow as DbWalletWithLegacy, address); + } catch (error) { + return res.status(500).json({ + error: + error instanceof Error ? error.message : "Wallet script address resolution failed", + }); + } + + const resolvedWalletUtxos = await resolveUtxoRefsFromChain({ + network, + utxoRefs: body.utxoRefs ?? [], + expectedSpendAddress: walletAddress, + }); + if ("error" in resolvedWalletUtxos) { + return res.status(resolvedWalletUtxos.status).json({ error: resolvedWalletUtxos.error }); + } + + const authTokenUtxo = requireAuthTokenUtxo( + resolvedWalletUtxos.utxos, + proxy.authTokenId, + ); + if ("error" in authTokenUtxo) { + return res.status(authTokenUtxo.status).json({ error: authTokenUtxo.error }); + } + + const resolvedCollateral = await resolveCollateralRefFromChain({ + network, + collateralRef: body.collateralRef, + expectedAddress: address, + }); + if ("error" in resolvedCollateral) { + return res.status(resolvedCollateral.status).json({ error: resolvedCollateral.error }); + } + + const txBuilder = getTxBuilder(network) as MeshTxBuilderWithBody; + let details: { dRepId: string; anchorDataHash?: string }; + try { + details = buildProxyDRepCertificateTx({ + txBuilder, + network, + paramUtxo, + walletUtxos: resolvedWalletUtxos.utxos, + authTokenUtxo, + collateral: resolvedCollateral.collateral, + walletAddress, + action, + anchorUrl, + anchorJson, + multisigScriptCbor: walletRow.scriptCbor, + }); + } catch (error) { + return res.status(400).json({ + error: error instanceof Error ? error.message : "Failed to build proxy DRep certificate", + }); + } + + let txCbor: string; + try { + txCbor = await txBuilder.complete(); + } catch (error) { + console.error("proxyDRepCertificate complete error:", error); + return res.status(500).json({ + error: error instanceof Error ? error.message : "Failed to build transaction", + }); + } + + const description = + typeof body.description === "string" && body.description.trim() + ? body.description.trim() + : `Proxy DRep ${action}`; + + try { + const transaction = await createPendingMultisigTransaction(db, { + walletId, + wallet: { + numRequiredSigners: walletRow.numRequiredSigners, + type: walletRow.type, + }, + proposerAddress: address, + txCbor, + txJson: { + ...(typeof txBuilder.meshTxBuilderBody === "object" && + txBuilder.meshTxBuilderBody !== null + ? (txBuilder.meshTxBuilderBody as Record) + : {}), + proxyBot: { + kind: "proxyDRepCertificate", + proxyId, + action, + dRepId: details.dRepId, + anchorDataHash: details.anchorDataHash, + }, + }, + description, + network, + initialSignedAddresses: [], + }); + return res.status(201).json(transaction); + } catch (error) { + console.error("proxyDRepCertificate persist error:", error); + return res.status(500).json({ error: "Internal Server Error" }); + } +} diff --git a/src/pages/api/v1/proxyDRepInfo.ts b/src/pages/api/v1/proxyDRepInfo.ts new file mode 100644 index 00000000..f13e57eb --- /dev/null +++ b/src/pages/api/v1/proxyDRepInfo.ts @@ -0,0 +1,134 @@ +import type { NextApiRequest, NextApiResponse } from "next"; +import { db } from "@/server/db"; +import { env } from "@/env"; +import { verifyJwt, isBotJwt } from "@/lib/verifyJwt"; +import { cors, addCorsCacheBustingHeaders } from "@/lib/cors"; +import { applyRateLimit, applyBotRateLimit } from "@/lib/security/requestGuards"; +import { authorizeProxyReadForV1, loadActiveProxyForWallet } from "@/lib/server/proxyAccess"; +import { deriveProxyScripts } from "@/lib/server/proxyTxBuilders"; +import type { UtxoRef } from "@/lib/server/proxyUtxos"; + +function getBlockfrostConfig(network: 0 | 1): { key: string; baseUrl: string } | null { + if (network === 0) { + const key = env.BLOCKFROST_API_KEY_PREPROD ?? env.NEXT_PUBLIC_BLOCKFROST_API_KEY_PREPROD; + if (!key) return null; + return { key, baseUrl: "https://cardano-preprod.blockfrost.io/api/v0" }; + } + const key = env.BLOCKFROST_API_KEY_MAINNET ?? env.NEXT_PUBLIC_BLOCKFROST_API_KEY_MAINNET; + if (!key) return null; + return { key, baseUrl: "https://cardano-mainnet.blockfrost.io/api/v0" }; +} + +function parseParamUtxo(value: string): UtxoRef | null { + try { + const parsed = JSON.parse(value) as Partial; + if ( + typeof parsed.txHash === "string" && + typeof parsed.outputIndex === "number" && + Number.isInteger(parsed.outputIndex) + ) { + return { txHash: parsed.txHash, outputIndex: parsed.outputIndex }; + } + } catch { + return null; + } + return null; +} + +export default async function handler( + req: NextApiRequest, + res: NextApiResponse, +) { + addCorsCacheBustingHeaders(res); + + if (!applyRateLimit(req, res, { keySuffix: "v1/proxyDRepInfo" })) { + return; + } + + await cors(req, res); + if (req.method === "OPTIONS") { + return res.status(200).end(); + } + if (req.method !== "GET") { + return res.status(405).json({ error: "Method Not Allowed" }); + } + + const authHeader = req.headers.authorization; + const token = authHeader?.startsWith("Bearer ") ? authHeader.slice(7) : null; + if (!token) { + return res.status(401).json({ error: "Unauthorized - Missing token" }); + } + + const payload = verifyJwt(token); + if (!payload) { + return res.status(401).json({ error: "Invalid or expired token" }); + } + if (isBotJwt(payload) && !applyBotRateLimit(req, res, payload.botId)) { + return; + } + + const walletId = typeof req.query.walletId === "string" ? req.query.walletId : ""; + const address = typeof req.query.address === "string" ? req.query.address : ""; + const proxyId = typeof req.query.proxyId === "string" ? req.query.proxyId : ""; + if (!walletId || !address || !proxyId) { + return res.status(400).json({ error: "walletId, address, and proxyId are required" }); + } + + try { + await authorizeProxyReadForV1({ db, payload, walletId, address }); + } catch (error) { + const code = (error as { code?: string }).code; + if (code === "NOT_FOUND") { + return res.status(404).json({ error: "Wallet not found" }); + } + return res.status(403).json({ + error: error instanceof Error ? error.message : "Not authorized for this wallet", + }); + } + + let proxy; + try { + proxy = await loadActiveProxyForWallet({ db, walletId, proxyId }); + } catch (error) { + return res.status(404).json({ + error: error instanceof Error ? error.message : "Proxy not found", + }); + } + + const paramUtxo = parseParamUtxo(proxy.paramUtxo); + if (!paramUtxo) { + return res.status(500).json({ error: "Stored proxy paramUtxo is invalid" }); + } + + const network: 0 | 1 = address.includes("test") ? 0 : 1; + const scripts = deriveProxyScripts({ paramUtxo, network }); + if (scripts.authTokenId !== proxy.authTokenId || scripts.proxyAddress !== proxy.proxyAddress) { + return res.status(409).json({ error: "Stored proxy metadata does not match derived scripts" }); + } + + const config = getBlockfrostConfig(network); + if (!config) { + return res.status(500).json({ error: `Missing Blockfrost API key for network ${network}` }); + } + + try { + const response = await fetch(`${config.baseUrl}/governance/dreps/${encodeURIComponent(scripts.dRepId)}`, { + headers: { project_id: config.key }, + }); + + if (response.status === 404) { + return res.status(200).json({ active: false, dRepId: scripts.dRepId }); + } + if (!response.ok) { + const body = await response.text(); + console.error(`proxyDRepInfo Blockfrost error ${response.status}:`, body); + return res.status(500).json({ error: `Blockfrost returned ${response.status}` }); + } + + const data = (await response.json()) as { active?: boolean }; + return res.status(200).json({ active: data.active === true, dRepId: scripts.dRepId }); + } catch (error) { + console.error("proxyDRepInfo error:", error); + return res.status(500).json({ error: "Failed to fetch proxy DRep info" }); + } +} diff --git a/src/pages/api/v1/proxySetup.ts b/src/pages/api/v1/proxySetup.ts new file mode 100644 index 00000000..837922c5 --- /dev/null +++ b/src/pages/api/v1/proxySetup.ts @@ -0,0 +1,222 @@ +import type { NextApiRequest, NextApiResponse } from "next"; +import { db } from "@/server/db"; +import { verifyJwt, isBotJwt } from "@/lib/verifyJwt"; +import { cors, addCorsCacheBustingHeaders } from "@/lib/cors"; +import { + applyRateLimit, + applyBotRateLimit, + enforceBodySize, +} from "@/lib/security/requestGuards"; +import { authorizeWalletSignerForV1Tx } from "@/lib/server/v1WalletAuth"; +import { resolveWalletScriptAddress } from "@/lib/server/walletScriptAddress"; +import { resolveUtxoRefsFromChain } from "@/lib/server/resolveUtxoRefsFromChain"; +import { resolveCollateralRefFromChain, type UtxoRef } from "@/lib/server/proxyUtxos"; +import { createPendingMultisigTransaction } from "@/lib/server/createPendingMultisigTransaction"; +import { getTxBuilder } from "@/utils/get-tx-builder"; +import { + buildProxySetupTx, + DEFAULT_PROXY_SETUP_LOVELACE, +} from "@/lib/server/proxyTxBuilders"; +import type { DbWalletWithLegacy } from "@/types/wallet"; + +type MeshTxBuilderWithBody = ReturnType & { + meshTxBuilderBody: unknown; +}; + +function validateInitialProxyLovelace( + value: unknown, +): string | { error: string } | undefined { + if (value === undefined || value === null) { + return undefined; + } + const initialProxyLovelace = typeof value === "string" ? value.trim() : ""; + if (!/^[0-9]+$/.test(initialProxyLovelace)) { + return { error: "initialProxyLovelace must be a positive integer string" }; + } + + const lovelace = BigInt(initialProxyLovelace); + if (lovelace <= BigInt(0)) { + return { error: "initialProxyLovelace must be a positive integer string" }; + } + if (lovelace < BigInt(DEFAULT_PROXY_SETUP_LOVELACE)) { + return { + error: `initialProxyLovelace must be at least ${DEFAULT_PROXY_SETUP_LOVELACE}`, + }; + } + + return initialProxyLovelace; +} + +export default async function handler( + req: NextApiRequest, + res: NextApiResponse, +) { + addCorsCacheBustingHeaders(res); + + if (!applyRateLimit(req, res, { keySuffix: "v1/proxySetup" })) { + return; + } + + await cors(req, res); + if (req.method === "OPTIONS") { + return res.status(200).end(); + } + if (req.method !== "POST") { + return res.status(405).json({ error: "Method Not Allowed" }); + } + if (!enforceBodySize(req, res, 200 * 1024)) { + return; + } + + const authHeader = req.headers.authorization; + const token = authHeader?.startsWith("Bearer ") ? authHeader.slice(7) : null; + if (!token) { + return res.status(401).json({ error: "Unauthorized - Missing token" }); + } + + const payload = verifyJwt(token); + if (!payload) { + return res.status(401).json({ error: "Invalid or expired token" }); + } + if (isBotJwt(payload) && !applyBotRateLimit(req, res, payload.botId)) { + return; + } + + const body = req.body as { + walletId?: string; + address?: string; + utxoRefs?: UtxoRef[]; + collateralRef?: UtxoRef; + initialProxyLovelace?: string; + description?: string; + }; + + const walletId = typeof body.walletId === "string" ? body.walletId : ""; + const address = typeof body.address === "string" ? body.address : ""; + if (!walletId) { + return res.status(400).json({ error: "Missing required field walletId" }); + } + if (!address) { + return res.status(400).json({ error: "Missing required field address" }); + } + + const initialProxyLovelace = validateInitialProxyLovelace( + body.initialProxyLovelace, + ); + if (initialProxyLovelace && typeof initialProxyLovelace !== "string") { + return res.status(400).json({ error: initialProxyLovelace.error }); + } + + let walletRow; + try { + const authorized = await authorizeWalletSignerForV1Tx(payload, walletId, address); + walletRow = authorized.wallet; + } catch (error) { + const code = (error as { code?: string }).code; + if (code === "NOT_FOUND") { + return res.status(404).json({ error: "Wallet not found" }); + } + return res.status(403).json({ + error: error instanceof Error ? error.message : "Not authorized for this wallet", + }); + } + + const wallet = walletRow as DbWalletWithLegacy; + const network = address.includes("test") ? 0 : 1; + + let walletAddress: string; + try { + walletAddress = resolveWalletScriptAddress(wallet, address); + } catch (error) { + return res.status(500).json({ + error: + error instanceof Error ? error.message : "Wallet script address resolution failed", + }); + } + + const resolvedWalletUtxos = await resolveUtxoRefsFromChain({ + network, + utxoRefs: body.utxoRefs ?? [], + expectedSpendAddress: walletAddress, + }); + if ("error" in resolvedWalletUtxos) { + return res + .status(resolvedWalletUtxos.status) + .json({ error: resolvedWalletUtxos.error }); + } + + const resolvedCollateral = await resolveCollateralRefFromChain({ + network, + collateralRef: body.collateralRef, + expectedAddress: address, + }); + if ("error" in resolvedCollateral) { + return res + .status(resolvedCollateral.status) + .json({ error: resolvedCollateral.error }); + } + + const txBuilder = getTxBuilder(network) as MeshTxBuilderWithBody; + let setup; + try { + setup = buildProxySetupTx({ + txBuilder, + network, + walletUtxos: resolvedWalletUtxos.utxos, + walletAddress, + collateral: resolvedCollateral.collateral, + multisigScriptCbor: walletRow.scriptCbor, + initialProxyLovelace, + }); + } catch (error) { + return res.status(400).json({ + error: error instanceof Error ? error.message : "Failed to build proxy setup", + }); + } + + let txCbor: string; + try { + txCbor = await txBuilder.complete(); + } catch (error) { + console.error("proxySetup complete error:", error); + return res.status(500).json({ + error: error instanceof Error ? error.message : "Failed to build transaction", + }); + } + + const description = + typeof body.description === "string" && body.description.trim() + ? body.description.trim() + : "Proxy setup transaction"; + const txJson = { + ...(typeof txBuilder.meshTxBuilderBody === "object" && + txBuilder.meshTxBuilderBody !== null + ? (txBuilder.meshTxBuilderBody as Record) + : {}), + proxyBot: { + kind: "proxySetup", + setup, + description, + }, + }; + + try { + const transaction = await createPendingMultisigTransaction(db, { + walletId, + wallet: { + numRequiredSigners: walletRow.numRequiredSigners, + type: walletRow.type, + }, + proposerAddress: address, + txCbor, + txJson, + description, + network, + initialSignedAddresses: [], + }); + return res.status(201).json({ transaction, setup }); + } catch (error) { + console.error("proxySetup persist error:", error); + return res.status(500).json({ error: "Internal Server Error" }); + } +} diff --git a/src/pages/api/v1/proxySetupFinalize.ts b/src/pages/api/v1/proxySetupFinalize.ts new file mode 100644 index 00000000..9fda8bc7 --- /dev/null +++ b/src/pages/api/v1/proxySetupFinalize.ts @@ -0,0 +1,121 @@ +import type { NextApiRequest, NextApiResponse } from "next"; +import { db } from "@/server/db"; +import { verifyJwt, isBotJwt } from "@/lib/verifyJwt"; +import { cors, addCorsCacheBustingHeaders } from "@/lib/cors"; +import { + applyRateLimit, + applyBotRateLimit, + enforceBodySize, +} from "@/lib/security/requestGuards"; +import { authorizeWalletSignerForV1Tx } from "@/lib/server/v1WalletAuth"; +import { resolveWalletScriptAddress } from "@/lib/server/walletScriptAddress"; +import { finalizeConfirmedProxySetup } from "@/lib/server/proxySetupFinalization"; +import type { UtxoRef } from "@/lib/server/proxyUtxos"; +import type { DbWalletWithLegacy } from "@/types/wallet"; + +export default async function handler( + req: NextApiRequest, + res: NextApiResponse, +) { + addCorsCacheBustingHeaders(res); + + if (!applyRateLimit(req, res, { keySuffix: "v1/proxySetupFinalize" })) { + return; + } + + await cors(req, res); + if (req.method === "OPTIONS") { + return res.status(200).end(); + } + if (req.method !== "POST") { + return res.status(405).json({ error: "Method Not Allowed" }); + } + if (!enforceBodySize(req, res, 100 * 1024)) { + return; + } + + const authHeader = req.headers.authorization; + const token = authHeader?.startsWith("Bearer ") ? authHeader.slice(7) : null; + if (!token) { + return res.status(401).json({ error: "Unauthorized - Missing token" }); + } + + const payload = verifyJwt(token); + if (!payload) { + return res.status(401).json({ error: "Invalid or expired token" }); + } + if (isBotJwt(payload) && !applyBotRateLimit(req, res, payload.botId)) { + return; + } + + const body = req.body as { + walletId?: string; + address?: string; + txHash?: string; + proxyAddress?: string; + authTokenId?: string; + paramUtxo?: UtxoRef; + description?: string; + }; + + const walletId = typeof body.walletId === "string" ? body.walletId : ""; + const address = typeof body.address === "string" ? body.address : ""; + const txHash = typeof body.txHash === "string" ? body.txHash.trim() : ""; + if (!walletId) { + return res.status(400).json({ error: "Missing required field walletId" }); + } + if (!address) { + return res.status(400).json({ error: "Missing required field address" }); + } + if (!txHash) { + return res.status(400).json({ error: "Missing required field txHash" }); + } + + let walletRow; + try { + const authorized = await authorizeWalletSignerForV1Tx(payload, walletId, address); + walletRow = authorized.wallet; + } catch (error) { + const code = (error as { code?: string }).code; + if (code === "NOT_FOUND") { + return res.status(404).json({ error: "Wallet not found" }); + } + return res.status(403).json({ + error: error instanceof Error ? error.message : "Not authorized for this wallet", + }); + } + + let walletAddress: string; + try { + walletAddress = resolveWalletScriptAddress( + walletRow as DbWalletWithLegacy, + address, + ); + } catch (error) { + return res.status(500).json({ + error: + error instanceof Error ? error.message : "Wallet script address resolution failed", + }); + } + + const network = address.includes("test") ? 0 : 1; + const result = await finalizeConfirmedProxySetup({ + db, + network, + walletId, + walletAddress, + txHash, + setup: { + proxyAddress: body.proxyAddress, + authTokenId: body.authTokenId, + paramUtxo: body.paramUtxo, + description: body.description, + }, + }); + + if ("error" in result) { + return res.status(result.status).json({ error: result.error }); + } + + return res.status(201).json({ proxy: result, txHash }); +} diff --git a/src/pages/api/v1/proxySpend.ts b/src/pages/api/v1/proxySpend.ts new file mode 100644 index 00000000..ca8aeaeb --- /dev/null +++ b/src/pages/api/v1/proxySpend.ts @@ -0,0 +1,312 @@ +import type { NextApiRequest, NextApiResponse } from "next"; +import type { UTxO } from "@meshsdk/core"; +import { db } from "@/server/db"; +import { verifyJwt, isBotJwt } from "@/lib/verifyJwt"; +import { cors, addCorsCacheBustingHeaders } from "@/lib/cors"; +import { + applyRateLimit, + applyBotRateLimit, + enforceBodySize, +} from "@/lib/security/requestGuards"; +import { authorizeWalletSignerForV1Tx } from "@/lib/server/v1WalletAuth"; +import { loadActiveProxyForWallet } from "@/lib/server/proxyAccess"; +import { resolveWalletScriptAddress } from "@/lib/server/walletScriptAddress"; +import { resolveUtxoRefsFromChain } from "@/lib/server/resolveUtxoRefsFromChain"; +import { + requireAuthTokenUtxo, + resolveCollateralRefFromChain, + resolveSingleUtxoRefFromChain, + selectProxyUtxosForOutputs, + type UtxoRef, +} from "@/lib/server/proxyUtxos"; +import { createPendingMultisigTransaction } from "@/lib/server/createPendingMultisigTransaction"; +import { getProvider } from "@/utils/get-provider"; +import { getTxBuilder } from "@/utils/get-tx-builder"; +import { buildProxySpendTx, deriveProxyScripts } from "@/lib/server/proxyTxBuilders"; +import type { DbWalletWithLegacy } from "@/types/wallet"; + +type ProxyOutput = { address: string; unit: string; amount: string }; +type MeshTxBuilderWithBody = ReturnType & { + meshTxBuilderBody: unknown; +}; + +function parseParamUtxo(value: string): UtxoRef | null { + try { + const parsed = JSON.parse(value) as Partial; + if ( + typeof parsed.txHash === "string" && + typeof parsed.outputIndex === "number" && + Number.isInteger(parsed.outputIndex) + ) { + return { txHash: parsed.txHash, outputIndex: parsed.outputIndex }; + } + } catch { + return null; + } + return null; +} + +function validateOutputs(outputs: unknown): ProxyOutput[] | { error: string } { + if (!Array.isArray(outputs) || outputs.length === 0) { + return { error: "outputs must be a non-empty array" }; + } + + const normalized: ProxyOutput[] = []; + for (const output of outputs) { + const candidate = output as Partial; + const address = typeof candidate.address === "string" ? candidate.address.trim() : ""; + const unit = typeof candidate.unit === "string" ? candidate.unit.trim() : ""; + const amount = typeof candidate.amount === "string" ? candidate.amount.trim() : ""; + if (!address || !unit || !amount) { + return { error: "Each output requires address, unit, and amount" }; + } + try { + if (BigInt(amount) <= BigInt(0)) { + return { error: "Output amount must be a positive integer string" }; + } + } catch { + return { error: "Output amount must be a positive integer string" }; + } + normalized.push({ address, unit, amount }); + } + + return normalized; +} + +async function resolveProxyUtxos(args: { + network: number; + proxyAddress: string; + proxyUtxoRefs?: UtxoRef[]; +}): Promise<{ utxos: UTxO[] } | { error: string; status: number }> { + if (Array.isArray(args.proxyUtxoRefs) && args.proxyUtxoRefs.length > 0) { + const utxos: UTxO[] = []; + for (const ref of args.proxyUtxoRefs) { + const resolved = await resolveSingleUtxoRefFromChain({ + network: args.network, + ref, + expectedAddress: args.proxyAddress, + }); + if ("error" in resolved) { + return resolved; + } + utxos.push(resolved.utxo); + } + return { utxos }; + } + + try { + return { + utxos: await getProvider(args.network).fetchAddressUTxOs(args.proxyAddress), + }; + } catch (error) { + return { + error: + error instanceof Error ? error.message : "Failed to fetch proxy UTxOs", + status: 400, + }; + } +} + +export default async function handler( + req: NextApiRequest, + res: NextApiResponse, +) { + addCorsCacheBustingHeaders(res); + + if (!applyRateLimit(req, res, { keySuffix: "v1/proxySpend" })) { + return; + } + + await cors(req, res); + if (req.method === "OPTIONS") { + return res.status(200).end(); + } + if (req.method !== "POST") { + return res.status(405).json({ error: "Method Not Allowed" }); + } + if (!enforceBodySize(req, res, 200 * 1024)) { + return; + } + + const authHeader = req.headers.authorization; + const token = authHeader?.startsWith("Bearer ") ? authHeader.slice(7) : null; + if (!token) { + return res.status(401).json({ error: "Unauthorized - Missing token" }); + } + + const payload = verifyJwt(token); + if (!payload) { + return res.status(401).json({ error: "Invalid or expired token" }); + } + if (isBotJwt(payload) && !applyBotRateLimit(req, res, payload.botId)) { + return; + } + + const body = req.body as { + walletId?: string; + address?: string; + proxyId?: string; + outputs?: unknown; + utxoRefs?: UtxoRef[]; + proxyUtxoRefs?: UtxoRef[]; + collateralRef?: UtxoRef; + description?: string; + }; + + const walletId = typeof body.walletId === "string" ? body.walletId : ""; + const address = typeof body.address === "string" ? body.address : ""; + const proxyId = typeof body.proxyId === "string" ? body.proxyId : ""; + if (!walletId || !address || !proxyId) { + return res.status(400).json({ error: "walletId, address, and proxyId are required" }); + } + + const outputs = validateOutputs(body.outputs); + if ("error" in outputs) { + return res.status(400).json({ error: outputs.error }); + } + + let walletRow; + try { + const authorized = await authorizeWalletSignerForV1Tx(payload, walletId, address); + walletRow = authorized.wallet; + } catch (error) { + const code = (error as { code?: string }).code; + if (code === "NOT_FOUND") { + return res.status(404).json({ error: "Wallet not found" }); + } + return res.status(403).json({ + error: error instanceof Error ? error.message : "Not authorized for this wallet", + }); + } + + let proxy; + try { + proxy = await loadActiveProxyForWallet({ db, walletId, proxyId }); + } catch (error) { + return res.status(404).json({ + error: error instanceof Error ? error.message : "Proxy not found", + }); + } + + const paramUtxo = parseParamUtxo(proxy.paramUtxo); + if (!paramUtxo) { + return res.status(500).json({ error: "Stored proxy paramUtxo is invalid" }); + } + + const network = address.includes("test") ? 0 : 1; + const scripts = deriveProxyScripts({ paramUtxo, network }); + if (scripts.authTokenId !== proxy.authTokenId || scripts.proxyAddress !== proxy.proxyAddress) { + return res.status(409).json({ error: "Stored proxy metadata does not match derived scripts" }); + } + + let walletAddress: string; + try { + walletAddress = resolveWalletScriptAddress(walletRow as DbWalletWithLegacy, address); + } catch (error) { + return res.status(500).json({ + error: + error instanceof Error ? error.message : "Wallet script address resolution failed", + }); + } + + const resolvedWalletUtxos = await resolveUtxoRefsFromChain({ + network, + utxoRefs: body.utxoRefs ?? [], + expectedSpendAddress: walletAddress, + }); + if ("error" in resolvedWalletUtxos) { + return res.status(resolvedWalletUtxos.status).json({ error: resolvedWalletUtxos.error }); + } + + const authTokenUtxo = requireAuthTokenUtxo( + resolvedWalletUtxos.utxos, + proxy.authTokenId, + ); + if ("error" in authTokenUtxo) { + return res.status(authTokenUtxo.status).json({ error: authTokenUtxo.error }); + } + + const resolvedCollateral = await resolveCollateralRefFromChain({ + network, + collateralRef: body.collateralRef, + expectedAddress: address, + }); + if ("error" in resolvedCollateral) { + return res.status(resolvedCollateral.status).json({ error: resolvedCollateral.error }); + } + + const proxyUtxosResult = await resolveProxyUtxos({ + network, + proxyAddress: proxy.proxyAddress, + proxyUtxoRefs: body.proxyUtxoRefs, + }); + if ("error" in proxyUtxosResult) { + return res.status(proxyUtxosResult.status).json({ error: proxyUtxosResult.error }); + } + + const proxyUtxos = Array.isArray(body.proxyUtxoRefs) && body.proxyUtxoRefs.length > 0 + ? proxyUtxosResult.utxos + : selectProxyUtxosForOutputs({ + proxyUtxos: proxyUtxosResult.utxos, + outputs, + }); + if ("error" in proxyUtxos) { + return res.status(proxyUtxos.status).json({ error: proxyUtxos.error }); + } + + const txBuilder = getTxBuilder(network) as MeshTxBuilderWithBody; + try { + buildProxySpendTx({ + txBuilder, + network, + proxyAddress: proxy.proxyAddress, + paramUtxo, + walletUtxos: resolvedWalletUtxos.utxos, + proxyUtxos, + authTokenUtxo, + collateral: resolvedCollateral.collateral, + outputs, + walletAddress, + multisigScriptCbor: walletRow.scriptCbor, + }); + } catch (error) { + return res.status(400).json({ + error: error instanceof Error ? error.message : "Failed to build proxy spend", + }); + } + + let txCbor: string; + try { + txCbor = await txBuilder.complete(); + } catch (error) { + console.error("proxySpend complete error:", error); + return res.status(500).json({ + error: error instanceof Error ? error.message : "Failed to build transaction", + }); + } + + const description = + typeof body.description === "string" && body.description.trim() + ? body.description.trim() + : "Proxy spend transaction"; + + try { + const transaction = await createPendingMultisigTransaction(db, { + walletId, + wallet: { + numRequiredSigners: walletRow.numRequiredSigners, + type: walletRow.type, + }, + proposerAddress: address, + txCbor, + txJson: txBuilder.meshTxBuilderBody, + description, + network, + initialSignedAddresses: [], + }); + return res.status(201).json(transaction); + } catch (error) { + console.error("proxySpend persist error:", error); + return res.status(500).json({ error: "Internal Server Error" }); + } +} diff --git a/src/pages/api/v1/proxyVote.ts b/src/pages/api/v1/proxyVote.ts new file mode 100644 index 00000000..684d491a --- /dev/null +++ b/src/pages/api/v1/proxyVote.ts @@ -0,0 +1,278 @@ +import type { NextApiRequest, NextApiResponse } from "next"; +import { db } from "@/server/db"; +import { verifyJwt, isBotJwt } from "@/lib/verifyJwt"; +import { cors, addCorsCacheBustingHeaders } from "@/lib/cors"; +import { + applyRateLimit, + applyBotRateLimit, + enforceBodySize, +} from "@/lib/security/requestGuards"; +import { authorizeWalletSignerForV1Tx } from "@/lib/server/v1WalletAuth"; +import { loadActiveProxyForWallet } from "@/lib/server/proxyAccess"; +import { resolveWalletScriptAddress } from "@/lib/server/walletScriptAddress"; +import { resolveUtxoRefsFromChain } from "@/lib/server/resolveUtxoRefsFromChain"; +import { + requireAuthTokenUtxo, + resolveCollateralRefFromChain, + type UtxoRef, +} from "@/lib/server/proxyUtxos"; +import { createPendingMultisigTransaction } from "@/lib/server/createPendingMultisigTransaction"; +import { getTxBuilder } from "@/utils/get-tx-builder"; +import { + buildProxyVoteTx, + deriveProxyScripts, + type ProxyVoteInput, +} from "@/lib/server/proxyTxBuilders"; +import { parseProposalId } from "@/lib/governance"; +import type { DbWalletWithLegacy } from "@/types/wallet"; + +type MeshTxBuilderWithBody = ReturnType & { + meshTxBuilderBody: unknown; +}; + +function parseParamUtxo(value: string): UtxoRef | null { + try { + const parsed = JSON.parse(value) as Partial; + if ( + typeof parsed.txHash === "string" && + typeof parsed.outputIndex === "number" && + Number.isInteger(parsed.outputIndex) + ) { + return { txHash: parsed.txHash, outputIndex: parsed.outputIndex }; + } + } catch { + return null; + } + return null; +} + +function validateVotes(votes: unknown): ProxyVoteInput[] | { error: string } { + if (!Array.isArray(votes) || votes.length === 0) { + return { error: "votes must be a non-empty array" }; + } + + const normalized: ProxyVoteInput[] = []; + for (const vote of votes) { + const candidate = vote as Partial; + const proposalId = + typeof candidate.proposalId === "string" ? candidate.proposalId.trim() : ""; + if (!proposalId) { + return { error: "Each vote requires proposalId" }; + } + try { + parseProposalId(proposalId); + } catch (error) { + return { + error: error instanceof Error ? error.message : "Invalid proposalId", + }; + } + if ( + candidate.voteKind !== "Yes" && + candidate.voteKind !== "No" && + candidate.voteKind !== "Abstain" + ) { + return { error: "voteKind must be Yes, No, or Abstain" }; + } + normalized.push({ + proposalId, + voteKind: candidate.voteKind, + metadata: candidate.metadata, + }); + } + + return normalized; +} + +export default async function handler( + req: NextApiRequest, + res: NextApiResponse, +) { + addCorsCacheBustingHeaders(res); + + if (!applyRateLimit(req, res, { keySuffix: "v1/proxyVote" })) { + return; + } + + await cors(req, res); + if (req.method === "OPTIONS") { + return res.status(200).end(); + } + if (req.method !== "POST") { + return res.status(405).json({ error: "Method Not Allowed" }); + } + if (!enforceBodySize(req, res, 200 * 1024)) { + return; + } + + const authHeader = req.headers.authorization; + const token = authHeader?.startsWith("Bearer ") ? authHeader.slice(7) : null; + if (!token) { + return res.status(401).json({ error: "Unauthorized - Missing token" }); + } + + const payload = verifyJwt(token); + if (!payload) { + return res.status(401).json({ error: "Invalid or expired token" }); + } + if (isBotJwt(payload) && !applyBotRateLimit(req, res, payload.botId)) { + return; + } + + const body = req.body as { + walletId?: string; + address?: string; + proxyId?: string; + votes?: unknown; + utxoRefs?: UtxoRef[]; + collateralRef?: UtxoRef; + description?: string; + }; + + const walletId = typeof body.walletId === "string" ? body.walletId : ""; + const address = typeof body.address === "string" ? body.address : ""; + const proxyId = typeof body.proxyId === "string" ? body.proxyId : ""; + if (!walletId || !address || !proxyId) { + return res.status(400).json({ error: "walletId, address, and proxyId are required" }); + } + + const votes = validateVotes(body.votes); + if ("error" in votes) { + return res.status(400).json({ error: votes.error }); + } + + let walletRow; + try { + const authorized = await authorizeWalletSignerForV1Tx(payload, walletId, address); + walletRow = authorized.wallet; + } catch (error) { + const code = (error as { code?: string }).code; + if (code === "NOT_FOUND") { + return res.status(404).json({ error: "Wallet not found" }); + } + return res.status(403).json({ + error: error instanceof Error ? error.message : "Not authorized for this wallet", + }); + } + + let proxy; + try { + proxy = await loadActiveProxyForWallet({ db, walletId, proxyId }); + } catch (error) { + return res.status(404).json({ + error: error instanceof Error ? error.message : "Proxy not found", + }); + } + + const paramUtxo = parseParamUtxo(proxy.paramUtxo); + if (!paramUtxo) { + return res.status(500).json({ error: "Stored proxy paramUtxo is invalid" }); + } + + const network = address.includes("test") ? 0 : 1; + const scripts = deriveProxyScripts({ paramUtxo, network }); + if (scripts.authTokenId !== proxy.authTokenId || scripts.proxyAddress !== proxy.proxyAddress) { + return res.status(409).json({ error: "Stored proxy metadata does not match derived scripts" }); + } + + let walletAddress: string; + try { + walletAddress = resolveWalletScriptAddress(walletRow as DbWalletWithLegacy, address); + } catch (error) { + return res.status(500).json({ + error: + error instanceof Error ? error.message : "Wallet script address resolution failed", + }); + } + + const resolvedWalletUtxos = await resolveUtxoRefsFromChain({ + network, + utxoRefs: body.utxoRefs ?? [], + expectedSpendAddress: walletAddress, + }); + if ("error" in resolvedWalletUtxos) { + return res.status(resolvedWalletUtxos.status).json({ error: resolvedWalletUtxos.error }); + } + + const authTokenUtxo = requireAuthTokenUtxo( + resolvedWalletUtxos.utxos, + proxy.authTokenId, + ); + if ("error" in authTokenUtxo) { + return res.status(authTokenUtxo.status).json({ error: authTokenUtxo.error }); + } + + const resolvedCollateral = await resolveCollateralRefFromChain({ + network, + collateralRef: body.collateralRef, + expectedAddress: address, + }); + if ("error" in resolvedCollateral) { + return res.status(resolvedCollateral.status).json({ error: resolvedCollateral.error }); + } + + const txBuilder = getTxBuilder(network) as MeshTxBuilderWithBody; + let details: { dRepId: string }; + try { + details = buildProxyVoteTx({ + txBuilder, + network, + paramUtxo, + walletUtxos: resolvedWalletUtxos.utxos, + authTokenUtxo, + collateral: resolvedCollateral.collateral, + walletAddress, + votes, + multisigScriptCbor: walletRow.scriptCbor, + }); + } catch (error) { + return res.status(400).json({ + error: error instanceof Error ? error.message : "Failed to build proxy vote", + }); + } + + let txCbor: string; + try { + txCbor = await txBuilder.complete(); + } catch (error) { + console.error("proxyVote complete error:", error); + return res.status(500).json({ + error: error instanceof Error ? error.message : "Failed to build transaction", + }); + } + + const description = + typeof body.description === "string" && body.description.trim() + ? body.description.trim() + : "Proxy governance vote"; + + try { + const transaction = await createPendingMultisigTransaction(db, { + walletId, + wallet: { + numRequiredSigners: walletRow.numRequiredSigners, + type: walletRow.type, + }, + proposerAddress: address, + txCbor, + txJson: { + ...(typeof txBuilder.meshTxBuilderBody === "object" && + txBuilder.meshTxBuilderBody !== null + ? (txBuilder.meshTxBuilderBody as Record) + : {}), + proxyBot: { + kind: "proxyVote", + proxyId, + dRepId: details.dRepId, + votes, + }, + }, + description, + network, + initialSignedAddresses: [], + }); + return res.status(201).json(transaction); + } catch (error) { + console.error("proxyVote persist error:", error); + return res.status(500).json({ error: "Internal Server Error" }); + } +} diff --git a/src/pages/api/v1/signTransaction.ts b/src/pages/api/v1/signTransaction.ts index 602149cf..869fe989 100644 --- a/src/pages/api/v1/signTransaction.ts +++ b/src/pages/api/v1/signTransaction.ts @@ -12,7 +12,7 @@ import { shouldSubmitMultisigTx, submitTxWithScriptRecovery, } from "@/utils/txSignUtils"; -import { resolvePaymentKeyHash } from "@meshsdk/core"; +import { resolvePaymentKeyHash, resolveStakeKeyHash } from "@meshsdk/core"; import { calculateTxHash } from "@meshsdk/core-csl"; import { applyRateLimit, applyBotRateLimit, enforceBodySize } from "@/lib/security/requestGuards"; import { getClientIP } from "@/lib/security/rateLimit"; @@ -97,6 +97,9 @@ export default async function handler( signature?: unknown; key?: unknown; broadcast?: unknown; + /** Optional stake-key witness for transactions that include a staking certificate. */ + stakeKey?: unknown; + stakeSignature?: unknown; }; const { @@ -106,6 +109,8 @@ export default async function handler( signature, key, broadcast: rawBroadcast, + stakeKey, + stakeSignature, } = (req.body ?? {}) as SignTransactionRequestBody; if (typeof walletId !== "string" || walletId.trim() === "") { @@ -250,6 +255,57 @@ export default async function handler( return res.status(401).json({ error: "Invalid signature for transaction" }); } + // ── Optional stake-key witness ────────────────────────────────────────── + // Submitted alongside the payment-key witness when the transaction contains + // a staking certificate whose script uses stake key hashes (role-2 keys). + // The signer's stake key hash must belong to this wallet's signersStakeKeys. + let stakeWitnessToAdd: ReturnType["witness"] | null = null; + + const rawStakeKey = typeof stakeKey === "string" ? stakeKey.trim() : ""; + const rawStakeSignature = typeof stakeSignature === "string" ? stakeSignature.trim() : ""; + + if (rawStakeKey && rawStakeSignature) { + let stakeWitnessDetails: ReturnType; + try { + stakeWitnessDetails = createVkeyWitnessFromHex( + normalizeHex(rawStakeKey, "stakeKey"), + normalizeHex(rawStakeSignature, "stakeSignature"), + ); + } catch (error: unknown) { + console.error("Invalid stake witness payload", toError(error)); + return res.status(400).json({ error: "Invalid stake witness payload" }); + } + + const isStakeSigValid = stakeWitnessDetails.publicKey.verify( + txHashBytes, + stakeWitnessDetails.signature, + ); + if (!isStakeSigValid) { + return res.status(401).json({ error: "Invalid stake signature for transaction" }); + } + + // Resolve all staking key hashes for this wallet and check membership. + const walletStakeRow = await db.wallet.findUnique({ + where: { id: walletId }, + select: { signersStakeKeys: true }, + }); + const validStakeKeyHashes = new Set(); + for (const stakeAddr of (walletStakeRow?.signersStakeKeys ?? [])) { + if (typeof stakeAddr === "string" && stakeAddr.trim()) { + try { + validStakeKeyHashes.add(resolveStakeKeyHash(stakeAddr).toLowerCase()); + } catch { + // skip malformed stake address + } + } + } + if (!validStakeKeyHashes.has(stakeWitnessDetails.keyHashHex)) { + return res.status(403).json({ error: "Stake key is not a staking key for this wallet" }); + } + + stakeWitnessToAdd = stakeWitnessDetails.witness; + } + let txHexForUpdate = storedTxHex; let vkeyWitnesses: ReturnType["vkeyWitnesses"]; try { @@ -266,6 +322,18 @@ export default async function handler( return res.status(500).json({ error: "Invalid stored transaction data" }); } + // Merge stake witness into the tx if one was provided and validated. + if (stakeWitnessToAdd) { + try { + const stakeMerge = addUniqueVkeyWitnessToTx(txHexForUpdate, stakeWitnessToAdd); + txHexForUpdate = stakeMerge.txHex; + vkeyWitnesses = stakeMerge.vkeyWitnesses; + } catch (error: unknown) { + console.error("Failed to merge stake witness into transaction", toError(error)); + return res.status(500).json({ error: "Failed to add stake witness to transaction" }); + } + } + const witnessSummaries: { keyHashHex: string; publicKeyBech32: string; diff --git a/src/pages/api/v1/stakeAccountInfo.ts b/src/pages/api/v1/stakeAccountInfo.ts new file mode 100644 index 00000000..924643a7 --- /dev/null +++ b/src/pages/api/v1/stakeAccountInfo.ts @@ -0,0 +1,44 @@ +import type { NextApiRequest, NextApiResponse } from "next"; +import { cors, addCorsCacheBustingHeaders } from "@/lib/cors"; +import { verifyJwt, isBotJwt } from "@/lib/verifyJwt"; +import { applyRateLimit, applyBotRateLimit } from "@/lib/security/requestGuards"; +import { getProvider } from "@/utils/get-provider"; + +export default async function handler(req: NextApiRequest, res: NextApiResponse) { + addCorsCacheBustingHeaders(res); + if (!applyRateLimit(req, res, { keySuffix: "v1/stakeAccountInfo" })) return; + await cors(req, res); + if (req.method === "OPTIONS") return res.status(200).end(); + if (req.method !== "GET") return res.status(405).json({ error: "Method Not Allowed" }); + + const authHeader = req.headers.authorization; + const token = authHeader?.startsWith("Bearer ") ? authHeader.slice(7) : null; + if (!token) return res.status(401).json({ error: "Unauthorized - Missing token" }); + + const payload = verifyJwt(token); + if (!payload) return res.status(401).json({ error: "Invalid or expired token" }); + if (isBotJwt(payload) && !applyBotRateLimit(req, res, payload.botId)) return; + + const { stakeAddress } = req.query; + if (typeof stakeAddress !== "string" || !stakeAddress.trim()) { + return res.status(400).json({ error: "Missing or invalid stakeAddress parameter" }); + } + + const network = stakeAddress.startsWith("stake_test") ? 0 : 1; + const provider = getProvider(network); + + try { + const info = await provider.fetchAccountInfo(stakeAddress.trim()); + return res.status(200).json({ active: info.active, poolId: info.poolId ?? null }); + } catch (e) { + // Blockfrost returns 404 for accounts that have never been registered — treat as inactive + const is404 = + (e as { status?: number })?.status === 404 || + (e instanceof Error && e.message.includes("404")); + if (is404) { + return res.status(200).json({ active: false, poolId: null }); + } + console.error("stakeAccountInfo error:", e); + return res.status(500).json({ error: "Failed to fetch stake account info" }); + } +} diff --git a/src/utils/common.ts b/src/utils/common.ts index 1be645a3..9bbfaca0 100644 --- a/src/utils/common.ts +++ b/src/utils/common.ts @@ -129,13 +129,17 @@ function resolveSummonScriptCbors(args: { */ export type WalletType = 'legacy' | 'sdk' | 'summon'; +function hasNonEmptyEntries(values?: string[] | null): boolean { + return !!values?.some((value) => value.trim().length > 0); +} + export function getWalletType(wallet: DbWalletWithLegacy): WalletType { if (wallet.rawImportBodies?.multisig) return 'summon'; // Legacy: only payment keys (no stake keys, no DRep keys) // External stake credential hash doesn't make it SDK - it's still legacy if only payment keys - const hasStakeKeys = wallet.signersStakeKeys && wallet.signersStakeKeys.length > 0; - const hasDRepKeys = wallet.signersDRepKeys && wallet.signersDRepKeys.length > 0; + const hasStakeKeys = hasNonEmptyEntries(wallet.signersStakeKeys); + const hasDRepKeys = hasNonEmptyEntries(wallet.signersDRepKeys); if (!hasStakeKeys && !hasDRepKeys) return 'legacy'; return 'sdk'; diff --git a/src/utils/stakingCertificates.ts b/src/utils/stakingCertificates.ts new file mode 100644 index 00000000..4acad168 --- /dev/null +++ b/src/utils/stakingCertificates.ts @@ -0,0 +1,119 @@ +import { getTxBuilder } from "@/utils/get-tx-builder"; + +export type StakingActionApi = + | "register" + | "deregister" + | "delegate" + | "register_and_delegate"; + +export type StakingActionUi = + | "register" + | "deregister" + | "delegate" + | "withdrawal" + | "registerAndDelegate"; + +type StakingActionConfig = { + execute: () => void; + description: string; +}; + +/** + * Mirrors StakingActions/stake.tsx certificate wiring (minus withdrawal, which needs reward balance). + */ +export function buildStakingCertificateActions({ + txBuilder, + rewardAddress, + stakingScript, + poolHex, +}: { + txBuilder: ReturnType; + rewardAddress: string; + stakingScript: string; + poolHex: string; +}): Record { + return { + register: { + execute: () => + txBuilder + .registerStakeCertificate(rewardAddress) + .certificateScript(stakingScript), + description: "Register stake.", + }, + deregister: { + execute: () => + txBuilder + .deregisterStakeCertificate(rewardAddress) + .certificateScript(stakingScript), + description: "Deregister stake.", + }, + delegate: { + execute: () => + txBuilder + .delegateStakeCertificate(rewardAddress, poolHex) + .certificateScript(stakingScript), + description: "Delegate stake.", + }, + register_and_delegate: { + execute: () => { + txBuilder + .registerStakeCertificate(rewardAddress) + .certificateScript(stakingScript); + txBuilder + .delegateStakeCertificate(rewardAddress, poolHex) + .certificateScript(stakingScript); + }, + description: "Register & delegate stake.", + }, + }; +} + +/** UI + withdrawal — same as stake.tsx StakingActionConfig map. */ +export function buildStakingActionConfigs({ + txBuilder, + rewardAddress, + stakingScript, + poolHex, + rewards, +}: { + txBuilder: ReturnType; + rewardAddress: string; + stakingScript: string; + poolHex: string; + rewards: string; +}): Record { + const base = buildStakingCertificateActions({ + txBuilder, + rewardAddress, + stakingScript, + poolHex, + }); + return { + register: { + ...base.register, + successTitle: "Stake Registered", + successMessage: "Your stake address has been registered.", + }, + deregister: { + ...base.deregister, + successTitle: "Stake Deregistered", + successMessage: "Your stake address has been deregistered.", + }, + delegate: { + ...base.delegate, + successTitle: "Stake Delegated", + successMessage: "Your stake has been delegated.", + }, + withdrawal: { + execute: () => txBuilder.withdrawal(rewardAddress, rewards), + description: "Withdraw rewards.", + successTitle: "Rewards Withdrawn", + successMessage: "Your staking rewards have been withdrawn.", + }, + registerAndDelegate: { + ...base.register_and_delegate, + successTitle: "Stake Registered & Delegated", + successMessage: "Your stake address has been registered and delegated.", + }, + }; +} diff --git a/src/utils/swagger.ts b/src/utils/swagger.ts index c07db6ce..ca62a04a 100644 --- a/src/utils/swagger.ts +++ b/src/utils/swagger.ts @@ -282,6 +282,511 @@ This API uses **Bearer Token** authentication (JWT). }, }, }, + "/api/v1/botStakeCertificate": { + post: { + tags: ["V1"], + summary: "Build stake certificate transaction (SDK multisig)", + description: + "Server builds register/delegate/deregister stake transactions using Mesh (same as UI). Requires wallet signer JWT; bots need cosigner access and multisig:sign scope. Body must include utxoRefs (txHash + outputIndex) resolved from chain; use GET /api/v1/freeUtxos to pick inputs. poolId is required for delegate and register_and_delegate (bech32 pool1... or 56-char hex).", + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + walletId: { type: "string" }, + address: { type: "string", description: "Must match JWT address" }, + action: { + type: "string", + enum: ["register", "deregister", "delegate", "register_and_delegate"], + }, + poolId: { type: "string" }, + utxoRefs: { + type: "array", + items: { + type: "object", + properties: { + txHash: { type: "string" }, + outputIndex: { type: "integer" }, + }, + required: ["txHash", "outputIndex"], + }, + }, + description: { type: "string" }, + }, + required: ["walletId", "address", "action", "utxoRefs"], + }, + }, + }, + }, + responses: { + 201: { description: "Transaction created or submitted (same shape as addTransaction)" }, + 400: { description: "Invalid input, wallet type, or staking not enabled" }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden or insufficient bot scope" }, + 405: { description: "Method not allowed" }, + 500: { description: "Internal server error" }, + }, + }, + }, + "/api/v1/botDRepCertificate": { + post: { + tags: ["V1"], + summary: "Build DRep registration or retirement transaction", + description: + "Server builds DRep register/retire (non-proxy). Bots need multisig:sign. For register, anchorUrl and anchorJson are required; the server does not fetch anchorUrl and computes hashDrepAnchor from the provided anchorJson object. utxoRefs must list UTxOs at the multisig spend address.", + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + walletId: { type: "string" }, + address: { type: "string", description: "Must match JWT address" }, + action: { type: "string", enum: ["register", "retire"] }, + utxoRefs: { + type: "array", + items: { + type: "object", + properties: { + txHash: { type: "string" }, + outputIndex: { type: "integer" }, + }, + required: ["txHash", "outputIndex"], + }, + }, + description: { type: "string" }, + anchorUrl: { type: "string" }, + anchorJson: { type: "object" }, + }, + required: ["walletId", "address", "action", "utxoRefs"], + }, + }, + }, + }, + responses: { + 201: { description: "Transaction created or submitted" }, + 400: { description: "Invalid input or unsupported wallet" }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden or insufficient bot scope" }, + 405: { description: "Method not allowed" }, + 500: { description: "Internal server error" }, + }, + }, + }, + "/api/v1/proxies": { + get: { + tags: ["V1", "Bot"], + summary: "List active confirmed proxies for a wallet", + description: + "Returns active Proxy rows for a wallet. Human callers must be wallet signers. Bot callers may use observer or cosigner wallet access.", + parameters: [ + { in: "query", name: "walletId", required: true, schema: { type: "string" } }, + { + in: "query", + name: "address", + required: true, + schema: { type: "string" }, + description: "Must match JWT address", + }, + ], + responses: { + 200: { + description: "Active proxy records", + content: { + "application/json": { + schema: { + type: "array", + items: { + type: "object", + properties: { + id: { type: "string" }, + walletId: { type: "string" }, + proxyAddress: { type: "string" }, + authTokenId: { type: "string" }, + paramUtxo: { type: "string" }, + description: { type: "string", nullable: true }, + isActive: { type: "boolean" }, + createdAt: { type: "string", format: "date-time" }, + updatedAt: { type: "string", format: "date-time" }, + }, + }, + }, + }, + }, + }, + 400: { description: "Invalid query parameters" }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden" }, + 404: { description: "Wallet not found" }, + }, + }, + }, + "/api/v1/proxyDRepInfo": { + get: { + tags: ["V1", "Bot"], + summary: "Get proxy DRep registration status", + description: + "Returns the on-chain active status for the DRep credential derived from a confirmed proxy. Human callers must be wallet signers. Bot callers may use observer or cosigner wallet access.", + parameters: [ + { in: "query", name: "walletId", required: true, schema: { type: "string" } }, + { + in: "query", + name: "address", + required: true, + schema: { type: "string" }, + description: "Must match JWT address", + }, + { in: "query", name: "proxyId", required: true, schema: { type: "string" } }, + ], + responses: { + 200: { + description: "Proxy DRep status", + content: { + "application/json": { + schema: { + type: "object", + properties: { + active: { type: "boolean" }, + dRepId: { type: "string" }, + }, + required: ["active", "dRepId"], + }, + }, + }, + }, + 400: { description: "Invalid query parameters" }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden" }, + 404: { description: "Wallet or proxy not found" }, + 409: { description: "Stored proxy metadata mismatch" }, + 500: { description: "Blockfrost or server error" }, + }, + }, + }, + "/api/v1/proxySetup": { + post: { + tags: ["V1", "Bot"], + summary: "Build a proxy setup transaction", + description: + "Builds a Plutus proxy setup transaction, persists it through the multisig pending transaction flow with no initial signed addresses, and returns derived setup metadata. Bots need multisig:sign and cosigner access. Proxy rows are not created until POST /api/v1/proxySetupFinalize validates confirmed chain state.", + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + walletId: { type: "string" }, + address: { type: "string", description: "Must match JWT address" }, + utxoRefs: { + type: "array", + items: { + type: "object", + properties: { + txHash: { type: "string" }, + outputIndex: { type: "integer" }, + }, + required: ["txHash", "outputIndex"], + }, + }, + collateralRef: { + type: "object", + properties: { + txHash: { type: "string" }, + outputIndex: { type: "integer" }, + }, + required: ["txHash", "outputIndex"], + }, + initialProxyLovelace: { + type: "string", + description: + "Optional positive integer lovelace amount to place at the proxy address during setup. Defaults to 1000000 when omitted.", + example: "5000000", + }, + description: { type: "string" }, + }, + required: ["walletId", "address", "utxoRefs", "collateralRef"], + }, + }, + }, + }, + responses: { + 201: { description: "Pending/submitted transaction plus setup metadata" }, + 400: { description: "Invalid input or UTxO refs" }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden or insufficient bot scope" }, + 500: { description: "Build or persistence failure" }, + }, + }, + }, + "/api/v1/proxySetupFinalize": { + post: { + tags: ["V1", "Bot"], + summary: "Finalize a confirmed proxy setup", + description: + "Creates the confirmed Proxy row after setup is on-chain. The server validates that txHash created a proxy-address output and returned the auth token to the multisig wallet, then validates current chain state before creating or reactivating the row.", + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + walletId: { type: "string" }, + address: { type: "string", description: "Must match JWT address" }, + txHash: { + type: "string", + description: + "Confirmed setup transaction hash. The transaction outputs must include the proxy address and the auth token at the multisig wallet address.", + }, + proxyAddress: { type: "string" }, + authTokenId: { type: "string" }, + paramUtxo: { + type: "object", + properties: { + txHash: { type: "string" }, + outputIndex: { type: "integer" }, + }, + required: ["txHash", "outputIndex"], + }, + description: { type: "string" }, + }, + required: [ + "walletId", + "address", + "txHash", + "proxyAddress", + "authTokenId", + "paramUtxo", + ], + }, + }, + }, + }, + responses: { + 201: { description: "Confirmed Proxy row" }, + 400: { description: "Missing metadata or chain validation failed" }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden or insufficient bot scope" }, + 404: { description: "Wallet not found" }, + }, + }, + }, + "/api/v1/proxySpend": { + post: { + tags: ["V1", "Bot"], + summary: "Build a proxy spend transaction", + description: + "Builds a proxy script spend transaction and persists it through the multisig pending transaction flow with no initial signed addresses. Requires an auth-token UTxO at the multisig wallet address. If proxyUtxoRefs is omitted, the server selects enough proxy-address UTxOs for the requested outputs plus fee buffer. Bots need multisig:sign and cosigner access.", + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + walletId: { type: "string" }, + address: { type: "string" }, + proxyId: { type: "string" }, + outputs: { + type: "array", + items: { + type: "object", + properties: { + address: { type: "string" }, + unit: { type: "string" }, + amount: { type: "string" }, + }, + required: ["address", "unit", "amount"], + }, + }, + utxoRefs: { type: "array", items: { type: "object" } }, + proxyUtxoRefs: { type: "array", items: { type: "object" } }, + collateralRef: { type: "object" }, + description: { type: "string" }, + }, + required: ["walletId", "address", "proxyId", "outputs", "utxoRefs", "collateralRef"], + }, + }, + }, + }, + responses: { + 201: { description: "Transaction created or submitted" }, + 400: { description: "Invalid input, UTxO refs, collateral, or missing auth token" }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden or insufficient bot scope" }, + 404: { description: "Proxy not found" }, + 409: { description: "Stored proxy metadata mismatch" }, + }, + }, + }, + "/api/v1/proxyDRepCertificate": { + post: { + tags: ["V1", "Bot"], + summary: "Build a proxy DRep certificate transaction", + description: + "Registers, updates, or deregisters the proxy script DRep through the pending multisig flow with no initial signed addresses. The server computes hashDrepAnchor(anchorJson) for register/update and requires an auth-token UTxO. Bots need multisig:sign and cosigner access.", + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + walletId: { type: "string" }, + address: { type: "string" }, + proxyId: { type: "string" }, + action: { type: "string", enum: ["register", "update", "deregister"] }, + utxoRefs: { type: "array", items: { type: "object" } }, + collateralRef: { type: "object" }, + anchorUrl: { type: "string" }, + anchorJson: { type: "object" }, + description: { type: "string" }, + }, + required: ["walletId", "address", "proxyId", "action", "utxoRefs", "collateralRef"], + }, + }, + }, + }, + responses: { + 201: { description: "Transaction created or submitted" }, + 400: { description: "Invalid input, anchor payload, UTxO refs, or collateral" }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden or insufficient bot scope" }, + 404: { description: "Proxy not found" }, + 409: { description: "Stored proxy metadata mismatch" }, + }, + }, + }, + "/api/v1/proxyVote": { + post: { + tags: ["V1", "Bot"], + summary: "Build a proxy DRep vote transaction", + description: + "Builds a governance vote as the proxy DRep through the pending multisig flow with no initial signed addresses. proposalId must use #. Requires an auth-token UTxO. Bots need multisig:sign and cosigner access.", + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + walletId: { type: "string" }, + address: { type: "string" }, + proxyId: { type: "string" }, + votes: { + type: "array", + items: { + type: "object", + properties: { + proposalId: { type: "string" }, + voteKind: { type: "string", enum: ["Yes", "No", "Abstain"] }, + metadata: {}, + }, + required: ["proposalId", "voteKind"], + }, + }, + utxoRefs: { type: "array", items: { type: "object" } }, + collateralRef: { type: "object" }, + description: { type: "string" }, + }, + required: ["walletId", "address", "proxyId", "votes", "utxoRefs", "collateralRef"], + }, + }, + }, + }, + responses: { + 201: { description: "Transaction created or submitted" }, + 400: { description: "Invalid input, proposal id, UTxO refs, or collateral" }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden or insufficient bot scope" }, + 404: { description: "Proxy not found" }, + 409: { description: "Stored proxy metadata mismatch" }, + }, + }, + }, + "/api/v1/proxyCleanup": { + post: { + tags: ["V1", "Bot"], + summary: "Build a proxy cleanup transaction", + description: + "Builds the next safe cleanup transaction through the multisig pending transaction flow with no initial signed addresses. If the proxy address still has UTxOs, the transaction sweeps them back to the multisig wallet while preserving an auth token. Once the proxy address is empty, the transaction burns all auth tokens. Bots need multisig:sign and cosigner access. The Proxy row is deactivated only after POST /api/v1/proxyCleanupFinalize validates the confirmed burn transaction hash and current chain state.", + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + walletId: { type: "string" }, + address: { type: "string" }, + proxyId: { type: "string" }, + utxoRefs: { type: "array", items: { type: "object" } }, + proxyUtxoRefs: { + type: "array", + items: { type: "object" }, + description: + "Optional explicit proxy-address UTxOs to sweep. When provided, it must include every currently visible proxy UTxO.", + }, + collateralRef: { type: "object" }, + deactivateProxy: { type: "boolean", default: true }, + description: { type: "string" }, + }, + required: ["walletId", "address", "proxyId", "utxoRefs", "collateralRef"], + }, + }, + }, + }, + responses: { + 201: { description: "Pending/submitted cleanup transaction plus cleanup metadata" }, + 400: { description: "Invalid input, UTxO refs, collateral, or auth-token count" }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden or insufficient bot scope" }, + 404: { description: "Proxy not found" }, + 409: { description: "Stored proxy metadata mismatch" }, + }, + }, + }, + "/api/v1/proxyCleanupFinalize": { + post: { + tags: ["V1", "Bot"], + summary: "Finalize a confirmed proxy cleanup", + description: + "Deactivates a Proxy row after cleanup is confirmed on-chain. The server validates that txHash spent the auth token without recreating it or a proxy-address output, then checks that auth tokens are no longer visible at the multisig wallet or proxy address and the proxy address has no remaining UTxOs.", + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + walletId: { type: "string" }, + address: { type: "string" }, + proxyId: { type: "string" }, + txHash: { + type: "string", + description: + "Confirmed cleanup burn transaction hash. The transaction must spend the auth token without recreating auth-token or proxy-address outputs.", + }, + deactivateProxy: { type: "boolean", default: true }, + }, + required: ["walletId", "address", "proxyId", "txHash"], + }, + }, + }, + }, + responses: { + 201: { description: "Deactivated Proxy row" }, + 400: { description: "Missing metadata or chain validation failed" }, + 401: { description: "Unauthorized" }, + 403: { description: "Forbidden or insufficient bot scope" }, + 404: { description: "Proxy not found" }, + }, + }, + }, "/api/v1/pendingTransactions": { get: { tags: ["V1"], @@ -1003,6 +1508,34 @@ This API uses **Bearer Token** authentication (JWT). default: "atLeast", description: "Unknown values are treated as atLeast.", }, + paymentNativeScript: { + type: "object", + description: + "Optional explicit payment script tree. Supported nodes: sig/all/any/atLeast. Sig key hashes must match signersAddresses payment key hashes.", + example: { + type: "all", + scripts: [ + { + type: "atLeast", + required: 2, + scripts: [ + { + type: "sig", + keyHash: "b8b7d19e...7776dfde7", + }, + { + type: "sig", + keyHash: "f4755fe1...0c91faa1", + }, + { + type: "sig", + keyHash: "59d8f3f9...bd3360762", + }, + ], + }, + ], + }, + }, stakeCredentialHash: { type: "string" }, network: { type: "integer", enum: [0, 1], default: 1 }, }, diff --git a/src/utils/txScriptRecovery.ts b/src/utils/txScriptRecovery.ts index d94c3e44..78d1339e 100644 --- a/src/utils/txScriptRecovery.ts +++ b/src/utils/txScriptRecovery.ts @@ -389,6 +389,9 @@ export function shouldSubmitMultisigTx( const required = appWallet.numRequiredSigners ?? 1; return signedAddressesCount >= required; } + if (appWallet.type === "all" && typeof appWallet.numRequiredSigners === "number") { + return signedAddressesCount >= appWallet.numRequiredSigners; + } return signedAddressesCount >= appWallet.signersAddresses.length; }