diff --git a/aws/lambdas/exchange-rates/go.mod b/aws/lambdas/exchange-rates/go.mod new file mode 100644 index 0000000000..fa1167230f --- /dev/null +++ b/aws/lambdas/exchange-rates/go.mod @@ -0,0 +1,9 @@ +module exchange-rates-lambda + +go 1.21.0 + +require ( + github.com/aws/aws-lambda-go v1.42.0 // indirect + github.com/aws/aws-sdk-go v1.49.1 // indirect + github.com/jmespath/go-jmespath v0.4.0 // indirect +) diff --git a/aws/lambdas/exchange-rates/go.sum b/aws/lambdas/exchange-rates/go.sum new file mode 100644 index 0000000000..bd6d4c9469 --- /dev/null +++ b/aws/lambdas/exchange-rates/go.sum @@ -0,0 +1,12 @@ +github.com/aws/aws-lambda-go v1.42.0 h1:U4QKkxLp/il15RJGAANxiT9VumQzimsUER7gokqA0+c= +github.com/aws/aws-lambda-go v1.42.0/go.mod h1:dpMpZgvWx5vuQJfBt0zqBha60q7Dd7RfgJv23DymV8A= +github.com/aws/aws-sdk-go v1.49.1 h1:Dsamcd8d/nNb3A+bZ0ucfGl0vGZsW5wlRW0vhoYGoeQ= +github.com/aws/aws-sdk-go v1.49.1/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= +github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/aws/lambdas/exchange-rates/main.go b/aws/lambdas/exchange-rates/main.go new file mode 100644 index 0000000000..7d0d5bef90 --- /dev/null +++ b/aws/lambdas/exchange-rates/main.go @@ -0,0 +1,119 @@ +package main + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "net/http" + "os" + "strconv" + "time" + + "github.com/aws/aws-lambda-go/lambda" + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/session" + "github.com/aws/aws-sdk-go/service/s3/s3manager" +) + +type RatesResponse struct { + Base string `json:"base"` + Rates map[string]float64 `json:"rates"` +} + +type MerchantRates struct { + Merchant map[string]map[string]string `json:"merchant"` +} + +func getExchangeRates(apiURL string) (map[string]map[string]string, error) { + client := &http.Client{Timeout: 15 * time.Second} + response, err := client.Get(apiURL) + if err != nil { + return nil, fmt.Errorf("failed to get exchange rates from %s: %w", apiURL, err) + } + defer response.Body.Close() + + if response.StatusCode != http.StatusOK { + return nil, fmt.Errorf("received non-200 response code: %d", response.StatusCode) + } + + var ratesResponse MerchantRates + err = json.NewDecoder(response.Body).Decode(&ratesResponse) + if err != nil { + return nil, fmt.Errorf("failed to decode response body: %w", err) + } + + return ratesResponse.Merchant, nil +} + +func transformToBaseCurrency(baseCurrency string, merchantRates map[string]map[string]string) (*RatesResponse, error) { + + usdRates := make(map[string]float64) + + for currency, rateMap := range merchantRates { + rateStr, ok := rateMap[baseCurrency] + if !ok { + return nil, fmt.Errorf("failed to find %s rate for currency %s in merchant rates", baseCurrency, currency) + } + + rate, err := strconv.ParseFloat(rateStr, 64) + if err != nil { + return nil, fmt.Errorf("failed to convert rate to float64 for currency %s: %v", currency, err) + } + + usdRates[currency] = rate + } + + return &RatesResponse{ + Base: baseCurrency, + Rates: usdRates, + }, nil +} + +func Handler() (string, error) { + apiUrl := os.Getenv("API_URL") + bucketName := os.Getenv("BUCKET_NAME") + keyName := os.Getenv("KEY_NAME") + region := os.Getenv("REGION") + baseCurrency := os.Getenv("BASE_CURRENCY") + + if apiUrl == "" || bucketName == "" || keyName == "" || region == "" { + return "", errors.New("API_URL, BUCKET_NAME, or KEY_NAME environment variable is not set") + } + + merchantRates, err := getExchangeRates(apiUrl) + if err != nil { + return "", err + } + + data, err := transformToBaseCurrency(baseCurrency, merchantRates) + if err != nil { + return "", err + } + + dataBytes, err := json.Marshal(data) + if err != nil { + return "", err + } + + sess := session.Must(session.NewSession(&aws.Config{ + Region: aws.String(region)}, + )) + + uploader := s3manager.NewUploader(sess) + + _, err = uploader.Upload(&s3manager.UploadInput{ + Bucket: aws.String(bucketName), + Key: aws.String(keyName), + Body: bytes.NewReader(dataBytes), + }) + if err != nil { + return "", err + } + + return "Successfully uploaded data to " + bucketName + "/" + keyName, nil +} + +func main() { + lambda.Start(Handler) +} diff --git a/localenv/.gitignore b/localenv/.gitignore new file mode 100644 index 0000000000..4c49bd78f1 --- /dev/null +++ b/localenv/.gitignore @@ -0,0 +1 @@ +.env diff --git a/localenv/cloud-nine-wallet/docker-compose.yml b/localenv/cloud-nine-wallet/docker-compose.yml index fc8b22641e..243fe66bca 100644 --- a/localenv/cloud-nine-wallet/docker-compose.yml +++ b/localenv/cloud-nine-wallet/docker-compose.yml @@ -40,6 +40,7 @@ services: - rafiki environment: NODE_ENV: ${NODE_ENV:-development} + INSTANCE_NAME: CLOUD-NINE TRUST_PROXY: ${TRUST_PROXY} LOG_LEVEL: debug ADMIN_PORT: 3001 @@ -59,6 +60,7 @@ services: REDIS_URL: redis://shared-redis:6379/0 WALLET_ADDRESS_URL: ${CLOUD_NINE_WALLET_ADDRESS_URL:-https://cloud-nine-wallet-backend/.well-known/pay} ILP_CONNECTOR_ADDRESS: ${CLOUD_NINE_CONNECTOR_URL} + ENABLE_TELEMETRY: false depends_on: - shared-database - shared-redis @@ -118,6 +120,7 @@ services: depends_on: - cloud-nine-backend + volumes: database-data: # named volumes can be managed easier using docker-compose diff --git a/localenv/cloud-nine-wallet/seed.yml b/localenv/cloud-nine-wallet/seed.yml index bdf6e825ec..763398e96c 100644 --- a/localenv/cloud-nine-wallet/seed.yml +++ b/localenv/cloud-nine-wallet/seed.yml @@ -5,42 +5,42 @@ self: assets: - code: USD scale: 2 - liquidity: 1000000 - liquidityThreshold: 100000 + liquidity: 100000000 + liquidityThreshold: 10000000 - code: EUR scale: 2 - liquidity: 1000000 - liquidityThreshold: 100000 + liquidity: 100000000 + liquidityThreshold: 10000000 - code: MXN scale: 2 - liquidity: 1000000 - liquidityThreshold: 100000 + liquidity: 100000000 + liquidityThreshold: 10000000 - code: JPY scale: 0 - liquidity: 10000 - liquidityThreshold: 1000 + liquidity: 1000000 + liquidityThreshold: 100000 peeringAsset: 'USD' peers: - - initialLiquidity: '100000' + - initialLiquidity: '10000000' peerUrl: http://happy-life-bank-backend:3002 peerIlpAddress: test.happy-life-bank - liquidityThreshold: 10000 + liquidityThreshold: 1000000 accounts: - name: 'Grace Franklin' path: accounts/gfranklin id: 742ab7cd-1624-4d2e-af6e-e15a71638669 - initialBalance: 50000 + initialBalance: 40000000 postmanEnvVar: gfranklinWalletAddress assetCode: USD - name: 'Bert Hamchest' id: a9adbe1a-df31-4766-87c9-d2cb2e636a9b - initialBalance: 50000 + initialBalance: 40000000 path: accounts/bhamchest postmanEnvVar: bhamchestWalletAddress assetCode: USD - name: "World's Best Donut Co" id: 5726eefe-8737-459d-a36b-0acce152cb90 - initialBalance: 2000 + initialBalance: 20000000 path: accounts/wbdc postmanEnvVar: wbdcWalletAddress assetCode: USD diff --git a/localenv/happy-life-bank/docker-compose.yml b/localenv/happy-life-bank/docker-compose.yml index e4495d0e07..f5ebc53777 100644 --- a/localenv/happy-life-bank/docker-compose.yml +++ b/localenv/happy-life-bank/docker-compose.yml @@ -34,6 +34,7 @@ services: - rafiki environment: NODE_ENV: development + INSTANCE_NAME: HAPPY-LIFE LOG_LEVEL: debug ADMIN_PORT: 3001 CONNECTOR_PORT: 3002 @@ -51,6 +52,7 @@ services: EXCHANGE_RATES_URL: http://happy-life-bank/rates REDIS_URL: redis://shared-redis:6379/1 WALLET_ADDRESS_URL: ${HAPPY_LIFE_BANK_WALLET_ADDRESS_URL:-https://happy-life-bank-backend/.well-known/pay} + ENABLE_TELEMETRY: false depends_on: - cloud-nine-backend happy-life-auth: diff --git a/localenv/happy-life-bank/seed.yml b/localenv/happy-life-bank/seed.yml index e86869588b..1289d7cac8 100644 --- a/localenv/happy-life-bank/seed.yml +++ b/localenv/happy-life-bank/seed.yml @@ -5,55 +5,55 @@ self: assets: - code: USD scale: 2 - liquidity: 1000000 - liquidityThreshold: 100000 + liquidity: 10000000000 + liquidityThreshold: 100000000 - code: EUR scale: 2 - liquidity: 1000000 - liquidityThreshold: 100000 + liquidity: 10000000000 + liquidityThreshold: 1000000 - code: MXN scale: 2 - liquidity: 1000000 - liquidityThreshold: 100000 + liquidity: 10000000000 + liquidityThreshold: 10000000 - code: JPY scale: 0 - liquidity: 10000 - liquidityThreshold: 1000 + liquidity: 1000000000 + liquidityThreshold: 1000000 peeringAsset: 'USD' peers: - initialLiquidity: '1000000000000' peerUrl: http://cloud-nine-wallet-backend:3002 peerIlpAddress: test.cloud-nine-wallet - liquidityThreshold: 100000 + liquidityThreshold: 1000000 accounts: - name: 'Philip Fry' path: accounts/pfry id: 97a3a431-8ee1-48fc-ac85-70e2f5eba8e5 - initialBalance: 50000 + initialBalance: 1 postmanEnvVar: pfryWalletAddress assetCode: USD - name: 'PlanEx Corp' id: a455cc54-b583-455b-836a-e5275c5c05b7 - initialBalance: 2000 + initialBalance: 2000000 path: accounts/planex postmanEnvVar: planexWalletAddress assetCode: USD - name: 'Alice Smith' path: accounts/asmith id: f47ac10b-58cc-4372-a567-0e02b2c3d479 - initialBalance: 500 + initialBalance: 5000000 postmanEnvVar: asmithWalletAddress skipWalletAddressCreation: true assetCode: USD - name: 'Lars' path: accounts/lars id: fd4ecbc9-205d-4ecd-a030-507d6ce2bde6 - initialBalance: 50000 + initialBalance: 50000000 assetCode: EUR - name: 'David' path: accounts/david id: 60257507-3191-4507-9d77-9071fd6b3c30 - initialBalance: 150000 + initialBalance: 1500000000 assetCode: MXN rates: EUR: diff --git a/packages/backend/package.json b/packages/backend/package.json index 42af781815..6c67965f8c 100644 --- a/packages/backend/package.json +++ b/packages/backend/package.json @@ -56,6 +56,10 @@ "@interledger/pay": "0.4.0-alpha.9", "@interledger/stream-receiver": "^0.3.3-alpha.3", "@koa/router": "^12.0.0", + "@opentelemetry/api": "^1.6.0", + "@opentelemetry/exporter-metrics-otlp-grpc": "^0.43.0", + "@opentelemetry/resources": "^1.17.0", + "@opentelemetry/sdk-metrics": "^1.17.0", "ajv": "^8.12.0", "axios": "1.6.7", "base64url": "^3.0.1", diff --git a/packages/backend/src/accounting/psql/service.ts b/packages/backend/src/accounting/psql/service.ts index b876048cbd..3a64820ea6 100644 --- a/packages/backend/src/accounting/psql/service.ts +++ b/packages/backend/src/accounting/psql/service.ts @@ -3,17 +3,18 @@ import { TransactionOrKnex } from 'objection' import { v4 as uuid } from 'uuid' import { Asset } from '../../asset/model' import { BaseService } from '../../shared/baseService' +import { TelemetryService } from '../../telemetry/service' import { isTransferError, TransferError } from '../errors' import { AccountingService, + createAccountToAccountTransfer, Deposit, LiquidityAccount, LiquidityAccountType, Transaction, TransferOptions, TransferToCreate, - Withdrawal, - createAccountToAccountTransfer + Withdrawal } from '../service' import { getAccountBalances } from './balance' import { @@ -35,6 +36,7 @@ import { import { LedgerTransfer, LedgerTransferType } from './ledger-transfer/model' export interface ServiceDependencies extends BaseService { + telemetry?: TelemetryService knex: TransactionOrKnex withdrawalThrottleDelay?: number } @@ -200,9 +202,8 @@ export async function createTransfer( deps: ServiceDependencies, args: TransferOptions ): Promise { - return createAccountToAccountTransfer({ + return createAccountToAccountTransfer(deps, { transferArgs: args, - withdrawalThrottleDelay: deps.withdrawalThrottleDelay, voidTransfers: async (transferRefs) => voidTransfers(deps, transferRefs), postTransfers: async (transferRefs) => postTransfers(deps, transferRefs), getAccountReceived: async (accountRef) => diff --git a/packages/backend/src/accounting/service.ts b/packages/backend/src/accounting/service.ts index 6ed5d482d7..a7c0e18400 100644 --- a/packages/backend/src/accounting/service.ts +++ b/packages/backend/src/accounting/service.ts @@ -1,5 +1,8 @@ import { TransactionOrKnex } from 'objection' -import { isTransferError, TransferError } from './errors' +import { BaseService } from '../shared/baseService' +import { TelemetryService } from '../telemetry/service' +import { collectTelemetryAmount } from '../telemetry/transaction-amount' +import { TransferError, isTransferError } from './errors' export enum LiquidityAccountType { ASSET = 'ASSET', @@ -9,13 +12,17 @@ export enum LiquidityAccountType { WEB_MONETIZATION = 'WEB_MONETIZATION' } +export interface LiquidityAccountAsset { + id: string + code?: string + scale?: number + ledger: number + onDebit?: (options: OnDebitOptions) => Promise +} + export interface LiquidityAccount { id: string - asset: { - id: string - ledger: number - onDebit?: (options: OnDebitOptions) => Promise - } + asset: LiquidityAccountAsset onCredit?: (options: OnCreditOptions) => Promise onDebit?: (options: OnDebitOptions) => Promise } @@ -85,6 +92,11 @@ export interface TransferToCreate { ledger: number } +export interface BaseAccountingServiceDependencies extends BaseService { + telemetry?: TelemetryService + withdrawalThrottleDelay?: number +} + interface CreateAccountToAccountTransferArgs { transferArgs: TransferOptions voidTransfers(transferIds: string[]): Promise @@ -94,10 +106,10 @@ interface CreateAccountToAccountTransferArgs { createPendingTransfers( transfers: TransferToCreate[] ): Promise - withdrawalThrottleDelay?: number } export async function createAccountToAccountTransfer( + deps: BaseAccountingServiceDependencies, args: CreateAccountToAccountTransferArgs ): Promise { const { @@ -106,10 +118,11 @@ export async function createAccountToAccountTransfer( createPendingTransfers, getAccountReceived, getAccountBalance, - withdrawalThrottleDelay, transferArgs } = args + const { withdrawalThrottleDelay, telemetry, logger } = deps + const { sourceAccount, destinationAccount, sourceAmount, destinationAmount } = transferArgs @@ -183,6 +196,21 @@ export async function createAccountToAccountTransfer( withdrawalThrottleDelay }) } + + if ( + destinationAccount.onDebit && + telemetry && + sourceAccount.asset.code && + sourceAccount.asset.scale + ) { + collectTelemetryAmount(telemetry, logger, { + amount: sourceAmount, + asset: { + code: sourceAccount.asset.code, + scale: sourceAccount.asset.scale + } + }) + } }, void: async (): Promise => { const error = await voidTransfers(pendingTransferIdsOrError) diff --git a/packages/backend/src/accounting/tigerbeetle/service.ts b/packages/backend/src/accounting/tigerbeetle/service.ts index 986a633c0b..d06ead22bb 100644 --- a/packages/backend/src/accounting/tigerbeetle/service.ts +++ b/packages/backend/src/accounting/tigerbeetle/service.ts @@ -1,16 +1,9 @@ import { Client } from 'tigerbeetle-node' import { v4 as uuid } from 'uuid' -import { calculateBalance, createAccounts, getAccounts } from './accounts' -import { - areAllAccountExistsErrors, - TigerbeetleCreateAccountError, - TigerbeetleUnknownAccountError -} from './errors' -import { NewTransferOptions, createTransfers } from './transfers' import { BaseService } from '../../shared/baseService' import { validateId } from '../../shared/utils' -import { toTigerbeetleId } from './utils' +import { TelemetryService } from '../../telemetry/service' import { AccountAlreadyExistsError, BalanceTransferError, @@ -18,14 +11,22 @@ import { } from '../errors' import { AccountingService, - createAccountToAccountTransfer, Deposit, LiquidityAccount, LiquidityAccountType, Transaction, TransferOptions, - Withdrawal + Withdrawal, + createAccountToAccountTransfer } from '../service' +import { calculateBalance, createAccounts, getAccounts } from './accounts' +import { + TigerbeetleCreateAccountError, + TigerbeetleUnknownAccountError, + areAllAccountExistsErrors +} from './errors' +import { NewTransferOptions, createTransfers } from './transfers' +import { toTigerbeetleId } from './utils' export enum TigerbeetleAccountCode { LIQUIDITY_WEB_MONETIZATION = 1, @@ -48,6 +49,7 @@ export const convertToTigerbeetleAccountCode: { } export interface ServiceDependencies extends BaseService { + telemetry?: TelemetryService tigerbeetle: Client withdrawalThrottleDelay?: number } @@ -215,9 +217,8 @@ export async function createTransfer( deps: ServiceDependencies, args: TransferOptions ): Promise { - return createAccountToAccountTransfer({ + return createAccountToAccountTransfer(deps, { transferArgs: args, - withdrawalThrottleDelay: deps.withdrawalThrottleDelay, voidTransfers: async (transferIds) => { const error = await createTransfers( deps, diff --git a/packages/backend/src/app.ts b/packages/backend/src/app.ts index ce25313d3c..ed646ec5e8 100644 --- a/packages/backend/src/app.ts +++ b/packages/backend/src/app.ts @@ -83,7 +83,7 @@ import { Rafiki as ConnectorApp } from './payment-method/ilp/connector/core' import { AxiosInstance } from 'axios' import { PaymentMethodHandlerService } from './payment-method/handler/service' import { IlpPaymentService } from './payment-method/ilp/service' - +import { TelemetryService } from './telemetry/service' export interface AppContextData { logger: Logger container: AppContainer @@ -204,6 +204,8 @@ const WALLET_ADDRESS_PATH = '/:walletAddressPath+' export interface AppServices { logger: Promise + telemetry?: Promise + internalRatesService?: Promise knex: Promise axios: Promise config: Promise diff --git a/packages/backend/src/config/app.ts b/packages/backend/src/config/app.ts index 101537a233..8005cea73f 100644 --- a/packages/backend/src/config/app.ts +++ b/packages/backend/src/config/app.ts @@ -9,6 +9,11 @@ function envString(name: string, value: string): string { return envValue == null ? value : envValue } +function envStringArray(name: string, value: string[]): string[] { + const envValue = process.env[name] + return envValue == null ? value : envValue.split(',').map((s) => s.trim()) +} + function envInt(name: string, value: number): number { const envValue = process.env[name] return envValue == null ? value : parseInt(envValue) @@ -32,6 +37,25 @@ dotenv.config({ export const Config = { logLevel: envString('LOG_LEVEL', 'info'), + enableTelemetry: envBool('ENABLE_TELEMETRY', true), + livenet: envBool('LIVENET', false), + openTelemetryCollectors: envStringArray( + 'OPEN_TELEMETRY_COLLECTOR_URLS', + process.env.LIVENET + ? [] + : [ + 'http://otel-collector-NLB-e3172ff9d2f4bc8a.elb.eu-west-2.amazonaws.com:4317' + ] + ), + openTelemetryExportInterval: envInt('OPEN_TELEMETRY_EXPORT_INTERVAL', 15000), + telemetryExchangeRatesUrl: envString( + 'TELEMETRY_EXCHANGE_RATES_URL', + 'https://telemetry-exchange-rates.s3.amazonaws.com/exchange-rates-usd.json' + ), + telemetryExchangeRatesLifetime: envInt( + 'TELEMETRY_EXCHANGE_RATES_LIFETIME', + 86_400_000 + ), adminPort: envInt('ADMIN_PORT', 3001), openPaymentsUrl: envString('OPEN_PAYMENTS_URL', 'http://127.0.0.1:3000'), openPaymentsPort: envInt('OPEN_PAYMENTS_PORT', 3003), diff --git a/packages/backend/src/index.ts b/packages/backend/src/index.ts index 75bc10f452..08a82a94d8 100644 --- a/packages/backend/src/index.ts +++ b/packages/backend/src/index.ts @@ -1,53 +1,54 @@ -import path from 'path' -import createLogger from 'pino' -import { knex } from 'knex' -import { Model } from 'objection' import { Ioc, IocContract } from '@adonisjs/fold' import { Redis } from 'ioredis' +import { knex } from 'knex' +import { Model } from 'objection' +import path from 'path' +import createLogger from 'pino' import { createClient } from 'tigerbeetle-node' import { createClient as createIntrospectionClient } from 'token-introspection' +import { createAuthenticatedClient as createOpenPaymentsClient } from '@interledger/open-payments' +import { createOpenAPI } from '@interledger/openapi' +import { StreamServer } from '@interledger/stream-receiver' +import axios from 'axios' +import { createAccountingService as createPsqlAccountingService } from './accounting/psql/service' +import { createAccountingService as createTigerbeetleAccountingService } from './accounting/tigerbeetle/service' import { App, AppServices } from './app' +import { createAssetService } from './asset/service' import { Config } from './config/app' -import { createRatesService } from './rates/service' -import { createQuoteRoutes } from './open_payments/quote/routes' -import { createQuoteService } from './open_payments/quote/service' +import { createFeeService } from './fee/service' +import { createAuthServerService } from './open_payments/authServer/service' +import { createGrantService } from './open_payments/grant/service' +import { createCombinedPaymentService } from './open_payments/payment/combined/service' +import { createIncomingPaymentRoutes } from './open_payments/payment/incoming/routes' +import { createIncomingPaymentService } from './open_payments/payment/incoming/service' +import { createRemoteIncomingPaymentService } from './open_payments/payment/incoming_remote/service' import { createOutgoingPaymentRoutes } from './open_payments/payment/outgoing/routes' import { createOutgoingPaymentService } from './open_payments/payment/outgoing/service' +import { createQuoteRoutes } from './open_payments/quote/routes' +import { createQuoteService } from './open_payments/quote/service' +import { createReceiverService } from './open_payments/receiver/service' +import { createWalletAddressKeyRoutes } from './open_payments/wallet_address/key/routes' +import { createWalletAddressKeyService } from './open_payments/wallet_address/key/service' +import { createWalletAddressRoutes } from './open_payments/wallet_address/routes' +import { createWalletAddressService } from './open_payments/wallet_address/service' +import { createPaymentMethodHandlerService } from './payment-method/handler/service' +import { createAutoPeeringRoutes } from './payment-method/ilp/auto-peering/routes' +import { createAutoPeeringService } from './payment-method/ilp/auto-peering/service' +import { createConnectorService } from './payment-method/ilp/connector' import { - createIlpPlugin, IlpPlugin, - IlpPluginOptions + IlpPluginOptions, + createIlpPlugin } from './payment-method/ilp/ilp_plugin' import { createHttpTokenService } from './payment-method/ilp/peer-http-token/service' -import { createAssetService } from './asset/service' -import { createAccountingService as createTigerbeetleAccountingService } from './accounting/tigerbeetle/service' -import { createAccountingService as createPsqlAccountingService } from './accounting/psql/service' import { createPeerService } from './payment-method/ilp/peer/service' -import { createAuthServerService } from './open_payments/authServer/service' -import { createGrantService } from './open_payments/grant/service' +import { createIlpPaymentService } from './payment-method/ilp/service' import { createSPSPRoutes } from './payment-method/ilp/spsp/routes' -import { createWalletAddressService } from './open_payments/wallet_address/service' -import { createWalletAddressKeyRoutes } from './open_payments/wallet_address/key/routes' -import { createWalletAddressRoutes } from './open_payments/wallet_address/routes' -import { createIncomingPaymentRoutes } from './open_payments/payment/incoming/routes' -import { createIncomingPaymentService } from './open_payments/payment/incoming/service' -import { StreamServer } from '@interledger/stream-receiver' -import { createWebhookService } from './webhook/service' -import { createConnectorService } from './payment-method/ilp/connector' -import { createOpenAPI } from '@interledger/openapi' -import { createAuthenticatedClient as createOpenPaymentsClient } from '@interledger/open-payments' import { createStreamCredentialsService } from './payment-method/ilp/stream-credentials/service' -import { createWalletAddressKeyService } from './open_payments/wallet_address/key/service' -import { createReceiverService } from './open_payments/receiver/service' -import { createRemoteIncomingPaymentService } from './open_payments/payment/incoming_remote/service' -import { createCombinedPaymentService } from './open_payments/payment/combined/service' -import { createFeeService } from './fee/service' -import { createAutoPeeringService } from './payment-method/ilp/auto-peering/service' -import { createAutoPeeringRoutes } from './payment-method/ilp/auto-peering/routes' -import axios from 'axios' -import { createIlpPaymentService } from './payment-method/ilp/service' -import { createPaymentMethodHandlerService } from './payment-method/handler/service' +import { createRatesService } from './rates/service' +import { TelemetryService, createTelemetryService } from './telemetry/service' +import { createWebhookService } from './webhook/service' BigInt.prototype.toJSON = function () { return this.toString() @@ -127,6 +128,40 @@ export function initIocContainer( replica_addresses: config.tigerbeetleReplicaAddresses }) }) + + container.singleton('ratesService', async (deps) => { + const config = await deps.use('config') + return createRatesService({ + logger: await deps.use('logger'), + exchangeRatesUrl: config.exchangeRatesUrl, + exchangeRatesLifetime: config.exchangeRatesLifetime + }) + }) + + if (config.enableTelemetry) { + container.singleton('internalRatesService', async (deps) => { + return createRatesService({ + logger: await deps.use('logger'), + exchangeRatesUrl: config.telemetryExchangeRatesUrl, + exchangeRatesLifetime: config.telemetryExchangeRatesLifetime + }) + }) + + container.singleton('telemetry', async (deps) => { + const config = await deps.use('config') + return createTelemetryService({ + logger: await deps.use('logger'), + aseRatesService: await deps.use('ratesService'), + internalRatesService: await deps.use('internalRatesService')!, + instanceName: config.instanceName, + collectorUrls: config.openTelemetryCollectors, + exportIntervalMillis: config.openTelemetryExportInterval, + baseAssetCode: 'USD', + baseScale: 4 + }) + }) + } + container.singleton('openApi', async () => { const resourceServerSpec = await createOpenAPI( path.resolve(__dirname, './openapi/resource-server.yaml') @@ -184,11 +219,17 @@ export function initIocContainer( const knex = await deps.use('knex') const config = await deps.use('config') + let telemetry: TelemetryService | undefined + if (config.enableTelemetry && config.openTelemetryCollectors.length > 0) { + telemetry = await deps.use('telemetry') + } + if (config.useTigerbeetle) { const tigerbeetle = await deps.use('tigerbeetle') return createTigerbeetleAccountingService({ logger, + telemetry, knex, tigerbeetle, withdrawalThrottleDelay: config.withdrawalThrottleDelay @@ -197,6 +238,7 @@ export function initIocContainer( return createPsqlAccountingService({ logger, + telemetry, knex, withdrawalThrottleDelay: config.withdrawalThrottleDelay }) @@ -314,15 +356,6 @@ export function initIocContainer( }) }) - container.singleton('ratesService', async (deps) => { - const config = await deps.use('config') - return createRatesService({ - logger: await deps.use('logger'), - exchangeRatesUrl: config.exchangeRatesUrl, - exchangeRatesLifetime: config.exchangeRatesLifetime - }) - }) - container.singleton('walletAddressKeyService', async (deps) => { return createWalletAddressKeyService({ logger: await deps.use('logger'), @@ -330,18 +363,6 @@ export function initIocContainer( }) }) - container.singleton('makeIlpPlugin', async (deps) => { - const connectorApp = await deps.use('connectorApp') - return ({ - sourceAccount, - unfulfillable = false - }: IlpPluginOptions): IlpPlugin => { - return createIlpPlugin((data: Buffer): Promise => { - return connectorApp.handleIlpData(sourceAccount, unfulfillable, data) - }) - } - }) - container.singleton('connectorApp', async (deps) => { const config = await deps.use('config') return await createConnectorService({ @@ -357,6 +378,19 @@ export function initIocContainer( }) }) + container.singleton('makeIlpPlugin', async (deps) => { + const connectorApp = await deps.use('connectorApp') + + return ({ + sourceAccount, + unfulfillable = false + }: IlpPluginOptions): IlpPlugin => { + return createIlpPlugin((data: Buffer): Promise => { + return connectorApp.handleIlpData(sourceAccount, unfulfillable, data) + }) + } + }) + container.singleton('combinedPaymentService', async (deps) => { return await createCombinedPaymentService({ logger: await deps.use('logger'), @@ -433,6 +467,7 @@ export function initIocContainer( }) container.singleton('outgoingPaymentService', async (deps) => { + const config = await deps.use('config') return await createOutgoingPaymentService({ logger: await deps.use('logger'), knex: await deps.use('knex'), @@ -442,9 +477,13 @@ export function initIocContainer( 'paymentMethodHandlerService' ), peerService: await deps.use('peerService'), - walletAddressService: await deps.use('walletAddressService') + walletAddressService: await deps.use('walletAddressService'), + telemetry: config.enableTelemetry + ? await deps.use('telemetry') + : undefined }) }) + container.singleton('outgoingPaymentRoutes', async (deps) => { return createOutgoingPaymentRoutes({ config: await deps.use('config'), @@ -469,6 +508,11 @@ export const gracefulShutdown = async ( tigerbeetle.destroy() const redis = await container.use('redis') redis.disconnect() + + const telemetry = await container.use('telemetry') + if (telemetry) { + await telemetry.shutdown() + } } export const start = async ( diff --git a/packages/backend/src/open_payments/payment/outgoing/lifecycle.ts b/packages/backend/src/open_payments/payment/outgoing/lifecycle.ts index b98aa815b7..2109731139 100644 --- a/packages/backend/src/open_payments/payment/outgoing/lifecycle.ts +++ b/packages/backend/src/open_payments/payment/outgoing/lifecycle.ts @@ -80,6 +80,13 @@ export async function handleSending( finalDebitAmount: maxDebitAmount, finalReceiveAmount: maxReceiveAmount }) + deps.telemetry + ?.getOrCreateMetric('transactions_total', { + description: 'Count of funded transactions' + }) + .add(1, { + source: deps.telemetry.getInstanceName() + }) await handleCompleted(deps, payment) } diff --git a/packages/backend/src/open_payments/payment/outgoing/service.ts b/packages/backend/src/open_payments/payment/outgoing/service.ts index 0826769f44..5449507e52 100644 --- a/packages/backend/src/open_payments/payment/outgoing/service.ts +++ b/packages/backend/src/open_payments/payment/outgoing/service.ts @@ -35,6 +35,7 @@ import { Interval } from 'luxon' import { knex } from 'knex' import { AccountAlreadyExistsError } from '../../../accounting/errors' import { PaymentMethodHandlerService } from '../../../payment-method/handler/service' +import { TelemetryService } from '../../../telemetry/service' export interface OutgoingPaymentService extends WalletAddressSubresourceService { @@ -54,6 +55,7 @@ export interface ServiceDependencies extends BaseService { peerService: PeerService paymentMethodHandlerService: PaymentMethodHandlerService walletAddressService: WalletAddressService + telemetry?: TelemetryService } export async function createOutgoingPaymentService( diff --git a/packages/backend/src/payment-method/ilp/connector/core/rafiki.ts b/packages/backend/src/payment-method/ilp/connector/core/rafiki.ts index 05081ee2b8..f5ed36ba9b 100644 --- a/packages/backend/src/payment-method/ilp/connector/core/rafiki.ts +++ b/packages/backend/src/payment-method/ilp/connector/core/rafiki.ts @@ -1,18 +1,11 @@ import * as http from 'http' /* eslint-disable @typescript-eslint/no-explicit-any */ -import Koa, { Middleware } from 'koa' +import { StreamServer } from '@interledger/stream-receiver' +import { Errors } from 'ilp-packet' import { Redis } from 'ioredis' +import Koa, { Middleware } from 'koa' import { Logger } from 'pino' -import { Errors } from 'ilp-packet' -import { StreamServer } from '@interledger/stream-receiver' //import { Router } from './services/router' -import { - createIlpPacketMiddleware, - ZeroCopyIlpPrepare, - IlpResponse -} from './middleware/ilp-packet' -import { createTokenAuthMiddleware } from './middleware' -import { RatesService } from '../../../../rates/service' import { CreateAccountError, TransferError @@ -23,9 +16,16 @@ import { Transaction } from '../../../../accounting/service' import { AssetOptions } from '../../../../asset/service' -import { WalletAddressService } from '../../../../open_payments/wallet_address/service' import { IncomingPaymentService } from '../../../../open_payments/payment/incoming/service' +import { WalletAddressService } from '../../../../open_payments/wallet_address/service' +import { RatesService } from '../../../../rates/service' import { PeerService } from '../../peer/service' +import { createTokenAuthMiddleware } from './middleware' +import { + IlpResponse, + ZeroCopyIlpPrepare, + createIlpPacketMiddleware +} from './middleware/ilp-packet' // Model classes that represent an Interledger sender, receiver, or // connector SHOULD implement this ConnectorAccount interface. @@ -159,6 +159,7 @@ export class Rafiki { get walletAddresses(): WalletAddressService { return config.walletAddresses }, + logger } @@ -173,6 +174,7 @@ export class Rafiki { ): Promise { const prepare = new ZeroCopyIlpPrepare(rawPrepare) const response = new IlpResponse() + await this.routes( { request: { prepare, rawPrepare }, diff --git a/packages/backend/src/payment-method/ilp/connector/index.ts b/packages/backend/src/payment-method/ilp/connector/index.ts index b7fdfc9229..c46f91552f 100644 --- a/packages/backend/src/payment-method/ilp/connector/index.ts +++ b/packages/backend/src/payment-method/ilp/connector/index.ts @@ -1,32 +1,32 @@ -import { Redis } from 'ioredis' import { StreamServer } from '@interledger/stream-receiver' +import { Redis } from 'ioredis' +import { AccountingService } from '../../../accounting/service' +import { IncomingPaymentService } from '../../../open_payments/payment/incoming/service' +import { WalletAddressService } from '../../../open_payments/wallet_address/service' +import { RatesService } from '../../../rates/service' +import { BaseService } from '../../../shared/baseService' +import { PeerService } from '../peer/service' import { - createApp, - Rafiki, ILPContext, ILPMiddleware, + Rafiki, + createAccountMiddleware, + createApp, createBalanceMiddleware, - createIncomingErrorHandlerMiddleware, - createIldcpMiddleware, - createStreamController, - createOutgoingExpireMiddleware, createClientController, + createIldcpMiddleware, + createIncomingErrorHandlerMiddleware, createIncomingMaxPacketAmountMiddleware, createIncomingRateLimitMiddleware, createIncomingThroughputMiddleware, + createOutgoingExpireMiddleware, createOutgoingReduceExpiryMiddleware, createOutgoingThroughputMiddleware, createOutgoingValidateFulfillmentMiddleware, - createAccountMiddleware, - createStreamAddressMiddleware + createStreamAddressMiddleware, + createStreamController } from './core' -import { AccountingService } from '../../../accounting/service' -import { WalletAddressService } from '../../../open_payments/wallet_address/service' -import { IncomingPaymentService } from '../../../open_payments/payment/incoming/service' -import { PeerService } from '../peer/service' -import { RatesService } from '../../../rates/service' -import { BaseService } from '../../../shared/baseService' interface ServiceDependencies extends BaseService { redis: Redis @@ -76,6 +76,7 @@ export async function createConnectorService({ // Local pay createBalanceMiddleware(), + // Outgoing Rules createStreamController(), createOutgoingThroughputMiddleware(), diff --git a/packages/backend/src/rates/service.ts b/packages/backend/src/rates/service.ts index 150b246376..5a6ed7322e 100644 --- a/packages/backend/src/rates/service.ts +++ b/packages/backend/src/rates/service.ts @@ -29,6 +29,10 @@ export enum ConvertError { InvalidDestinationPrice = 'InvalidDestinationPrice' } +// eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/explicit-module-boundary-types +export const isConvertError = (o: any): o is ConvertError => + Object.values(ConvertError).includes(o) + export function createRatesService(deps: ServiceDependencies): RatesService { return new RatesServiceImpl(deps) } diff --git a/packages/backend/src/telemetry/privacy.test.ts b/packages/backend/src/telemetry/privacy.test.ts new file mode 100644 index 0000000000..cd1e531b60 --- /dev/null +++ b/packages/backend/src/telemetry/privacy.test.ts @@ -0,0 +1,105 @@ +import { privacy } from './privacy' +describe('Privacy functions', () => { + const clipParams = { + minBucketSize: 1000, + maxBucketSize: 10000 + } + + afterEach(() => { + jest.unmock('./privacy') + }) + + test('test laplace distributuin math', () => { + const scale = 0.5 + jest.spyOn(Math, 'random').mockReturnValueOnce(0.25) + const noise1 = privacy.generateLaplaceNoise(scale) + + jest.spyOn(Math, 'random').mockReturnValueOnce(0.75) + const noise2 = privacy.generateLaplaceNoise(scale) + + expect(noise1).toBe(-0.34657359027997264) + expect(noise2).toBe(0.34657359027997264) + }) + + test('computePrivacyParameter should return 0 when sensitivity is 0', () => { + const sensitivity = 0 + const privacyParameter = privacy.computePrivacyParameter(sensitivity) + expect(privacyParameter).toBe(0) + }) + + test('computePrivacyParameter should return a non-zero value when sensitivity is non-zero', () => { + const sensitivity = 1 + const privacyParameter = privacy.computePrivacyParameter(sensitivity) + expect(privacyParameter).toBe(0.1) + }) + + test('roundValue should return minBucketSize when rawValue is very small', () => { + const rawValue = 10 + const bucketSize = 1000 + const roundedValue = privacy.roundValue(rawValue, bucketSize, clipParams) + expect(roundedValue).toBe(clipParams.minBucketSize) + }) + + test('roundValue should return maxBucketSize when rawValue is very large', () => { + const rawValue = 1000000 + const bucketSize = 1000 + const roundedValue = privacy.roundValue(rawValue, bucketSize, clipParams) + expect(roundedValue).toBe(clipParams.maxBucketSize) + }) + + test('roundValue should return a number within the specified range', () => { + const rawValue = 5000 + const bucketSize = 1000 + const roundedValue = privacy.roundValue(rawValue, bucketSize, clipParams) + expect(roundedValue).toBeGreaterThanOrEqual(clipParams.minBucketSize) + expect(roundedValue).toBeLessThanOrEqual(clipParams.maxBucketSize) + }) + + test('getBucketSize should return maxBucketSize when rawValue is very large', () => { + const rawValue = 1000000 + const bucketSize = privacy.getBucketSize(rawValue, clipParams) + expect(bucketSize).toBe(clipParams.maxBucketSize) + }) + + test('getBucketSize should return minBucketSize when rawValue is very small', () => { + const rawValue = 10 + const bucketSize = privacy.getBucketSize(rawValue, clipParams) + expect(bucketSize).toBe(clipParams.minBucketSize) + }) + + test('getBucketSize should return a number within the specified range', () => { + const rawValue = 5000 + const bucketSize = privacy.getBucketSize(rawValue, clipParams) + expect(bucketSize).toBeGreaterThanOrEqual(clipParams.minBucketSize) + expect(bucketSize).toBeLessThanOrEqual(clipParams.maxBucketSize) + }) + + test('applyPrivacy should call all the necessary functions with the correct arguments', () => { + const rawValue = 5000 + + const mockPrivacy = { + ...privacy, + getBucketSize: jest.fn().mockReturnValue(1000), + generateLaplaceNoise: jest.fn().mockReturnValue(0), + computePrivacyParameter: jest.fn().mockReturnValue(0.1), + roundValue: jest.fn().mockReturnValue(500) + } + + const applyPrivacy = privacy.applyPrivacy.bind(mockPrivacy) + + const result = applyPrivacy(rawValue, clipParams) + + expect(mockPrivacy.getBucketSize).toHaveBeenCalledWith(rawValue, clipParams) + expect(mockPrivacy.computePrivacyParameter).toHaveBeenCalledWith( + Math.max(500 / 10, 1000) + ) + expect(mockPrivacy.generateLaplaceNoise).toHaveBeenCalledWith(0.1) + expect(mockPrivacy.roundValue).toHaveBeenCalledWith( + rawValue, + 1000, + clipParams + ) + + expect(result).toBe(500) + }) +}) diff --git a/packages/backend/src/telemetry/privacy.ts b/packages/backend/src/telemetry/privacy.ts new file mode 100644 index 0000000000..14ccafe942 --- /dev/null +++ b/packages/backend/src/telemetry/privacy.ts @@ -0,0 +1,74 @@ +export type ClipParams = { + minBucketSize: number + maxBucketSize: number +} + +export const privacy = { + getBucketSize: function (rawValue: number, clip: ClipParams): number { + const { minBucketSize, maxBucketSize } = clip + + // Base parameter is used in the logarithimc function for calculating the bucket size when the rawValue exceeds the threshold. + // Increasing the base would result in larger bucket sizes. + const base = 2 + // The scale parameter is used in both the linear and logarithmic functions for calculating the bucket size. + // Increasing the scale would result in larger bucket sizes. + const scale = 5000 + // Used to determine when to switch from linear to logarithmic function + // Increasing the threshold would result in smaller bucket sizes. + const threshold = 20000 + + let bucketSize + if (rawValue < threshold) { + bucketSize = Math.round(rawValue / scale) * scale + } else { + bucketSize = + Math.pow(base, Math.ceil(Math.log(rawValue / scale) / Math.log(base))) * + scale + } + + return Math.max(minBucketSize, Math.min(bucketSize, maxBucketSize)) + }, + + generateLaplaceNoise: function (scale: number): number { + const u = Math.random() - 0.5 + return -scale * Math.sign(u) * Math.log(1 - 2 * Math.abs(u)) + }, + + computePrivacyParameter: function (sensitivity: number): number { + return sensitivity * 0.1 + }, + + roundValue: function ( + rawValue: number, + bucketSize: number, + clip: ClipParams + ): number { + rawValue = Math.min(rawValue, clip.maxBucketSize) + rawValue = Math.max(rawValue, clip.minBucketSize) + const lowerBound = Math.floor(rawValue / bucketSize) * bucketSize + const upperBound = Math.ceil(rawValue / bucketSize) * bucketSize + const median = (lowerBound + upperBound) / 2 + const roundedValue = rawValue <= median ? lowerBound : upperBound + return Math.max(roundedValue, bucketSize / 2) + }, + + applyPrivacy: function ( + rawValue: number, + clip: ClipParams = { + minBucketSize: 2500, + maxBucketSize: 10000000 + } + ): number { + const bucketSize = this.getBucketSize(rawValue, clip) + let roundedValue = this.roundValue(rawValue, bucketSize, clip) + const privacyParameter = this.computePrivacyParameter( + Math.max(roundedValue / 10, bucketSize) + ) + const laplaceNoise = this.generateLaplaceNoise(privacyParameter) + roundedValue += Math.round(laplaceNoise) + if (roundedValue === 0) { + roundedValue = bucketSize / 2 + } + return roundedValue + } +} diff --git a/packages/backend/src/telemetry/service.test.ts b/packages/backend/src/telemetry/service.test.ts new file mode 100644 index 0000000000..08616ee218 --- /dev/null +++ b/packages/backend/src/telemetry/service.test.ts @@ -0,0 +1,112 @@ +import { IocContract } from '@adonisjs/fold' +import { initIocContainer } from '..' +import { AppServices } from '../app' +import { Config } from '../config/app' +import { ConvertError, RatesService } from '../rates/service' +import { TestContainer, createTestApp } from '../tests/app' +import { mockCounter } from '../tests/telemetry' +import { TelemetryService } from './service' + +jest.mock('@opentelemetry/api', () => ({ + ...jest.requireActual('@opentelemetry/api'), + metrics: { + setGlobalMeterProvider: jest.fn(), + getMeter: jest.fn().mockReturnValue({ + createCounter: jest.fn().mockImplementation(() => mockCounter) + }) + } +})) + +jest.mock('@opentelemetry/resources', () => ({ Resource: jest.fn() })) + +jest.mock('@opentelemetry/sdk-metrics', () => ({ + MeterProvider: jest.fn().mockImplementation(() => ({ + shutdown: jest.fn(), + addMetricReader: jest.fn() + })) +})) + +describe('TelemetryServiceImpl', () => { + let deps: IocContract + let appContainer: TestContainer + let telemetryService: TelemetryService + let aseRatesService: RatesService + let internalRatesService: RatesService + + beforeAll(async (): Promise => { + deps = initIocContainer({ + ...Config, + enableTelemetry: true, + telemetryExchangeRatesUrl: 'http://example-rates.com', + telemetryExchangeRatesLifetime: 100, + openTelemetryCollectors: [] + }) + + appContainer = await createTestApp(deps) + telemetryService = await deps.use('telemetry')! + aseRatesService = await deps.use('ratesService')! + internalRatesService = await deps.use('internalRatesService')! + }) + + afterAll(async (): Promise => { + await appContainer.shutdown() + }) + + it('should create a counter when getOrCreate is called for a new metric', () => { + const counter = telemetryService.getOrCreateMetric('testMetric') + expect(counter).toBe(mockCounter) + }) + + it('should return an existing counter when getOrCreate is called for an existing metric', () => { + const existingCounter = telemetryService.getOrCreateMetric('existingMetric') + const retrievedCounter = + telemetryService.getOrCreateMetric('existingMetric') + expect(retrievedCounter).toBe(existingCounter) + }) + + it('should return the instance name when calling getInstanceName', () => { + const serviceName = telemetryService.getInstanceName() + + expect(serviceName).toBe('Rafiki') + }) + + describe('conversion', () => { + it('should try to convert using aseRatesService and fallback to internalRatesService', async () => { + const aseConvertSpy = jest + .spyOn(aseRatesService, 'convert') + .mockImplementation(() => + Promise.resolve(ConvertError.InvalidDestinationPrice) + ) + + const internalConvertSpy = jest + .spyOn(internalRatesService, 'convert') + .mockImplementation(() => Promise.resolve(10000n)) + + const converted = await telemetryService.convertAmount({ + sourceAmount: 100n, + sourceAsset: { code: 'USD', scale: 2 } + }) + + expect(aseConvertSpy).toHaveBeenCalled() + expect(internalConvertSpy).toHaveBeenCalled() + expect(converted).toBe(10000n) + }) + + it('should not call the fallback internalRatesService if aseRatesService call is successful', async () => { + const aseConvertSpy = jest + .spyOn(aseRatesService, 'convert') + .mockImplementation(() => Promise.resolve(500n)) + + const internalConvertSpy = jest.spyOn(internalRatesService, 'convert') + + const converted = await telemetryService.convertAmount({ + sourceAmount: 100n, + sourceAsset: { code: 'USD', scale: 2 } + }) + + expect(aseConvertSpy).toHaveBeenCalled() + expect(internalConvertSpy).not.toHaveBeenCalled() + expect(converted).toBe(500n) + }) + }) +}) diff --git a/packages/backend/src/telemetry/service.ts b/packages/backend/src/telemetry/service.ts new file mode 100644 index 0000000000..386b8628c8 --- /dev/null +++ b/packages/backend/src/telemetry/service.ts @@ -0,0 +1,145 @@ +import { Counter, MetricOptions, metrics } from '@opentelemetry/api' +import { OTLPMetricExporter } from '@opentelemetry/exporter-metrics-otlp-grpc' +import { Resource } from '@opentelemetry/resources' +import { + MeterProvider, + PeriodicExportingMetricReader +} from '@opentelemetry/sdk-metrics' + +import { ConvertError, RatesService, isConvertError } from '../rates/service' +import { ConvertOptions } from '../rates/util' +import { BaseService } from '../shared/baseService' + +export interface TelemetryService { + shutdown(): void + getOrCreateMetric(name: string, options?: MetricOptions): Counter + getInstanceName(): string | undefined + getBaseAssetCode(): string + getBaseScale(): number + convertAmount( + convertOptions: Omit + ): Promise +} + +interface TelemetryServiceDependencies extends BaseService { + instanceName: string + collectorUrls: string[] + exportIntervalMillis?: number + aseRatesService: RatesService + internalRatesService: RatesService + baseAssetCode: string + baseScale: number +} + +const METER_NAME = 'Rafiki' +const SERVICE_NAME = 'RAFIKI_NETWORK' + +export function createTelemetryService( + deps: TelemetryServiceDependencies +): TelemetryService { + return new TelemetryServiceImpl(deps) +} + +class TelemetryServiceImpl implements TelemetryService { + private instanceName: string + private meterProvider?: MeterProvider + private internalRatesService: RatesService + private aseRatesService: RatesService + + private counters = new Map() + constructor(private deps: TelemetryServiceDependencies) { + // debug logger: + // diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.DEBUG) + this.instanceName = deps.instanceName + this.internalRatesService = deps.internalRatesService + this.aseRatesService = deps.aseRatesService + + if (deps.collectorUrls.length === 0) { + deps.logger.info( + 'No collector URLs specified, metrics will not be exported' + ) + return + } + + this.meterProvider = new MeterProvider({ + resource: new Resource({ 'service.name': SERVICE_NAME }) + }) + + deps.collectorUrls.forEach((url) => { + const metricExporter = new OTLPMetricExporter({ + url: url + }) + + const metricReader = new PeriodicExportingMetricReader({ + exporter: metricExporter, + exportIntervalMillis: deps.exportIntervalMillis ?? 15000 + }) + + this.meterProvider?.addMetricReader(metricReader) + }) + + metrics.setGlobalMeterProvider(this.meterProvider) + } + + public async shutdown(): Promise { + await this.meterProvider?.shutdown() + } + + private createCounter( + name: string, + options: MetricOptions | undefined + ): Counter { + const counter = metrics.getMeter(METER_NAME).createCounter(name, options) + this.counters.set(name, counter) + return counter + } + + public getOrCreateMetric(name: string, options?: MetricOptions): Counter { + const existing = this.counters.get(name) + if (existing) { + return existing + } + return this.createCounter(name, options) + } + + public async convertAmount( + convertOptions: Omit + ) { + const destinationAsset = { + code: this.deps.baseAssetCode, + scale: this.deps.baseScale + } + + let converted = await this.aseRatesService.convert({ + ...convertOptions, + destinationAsset + }) + if (isConvertError(converted)) { + this.deps.logger.error( + `Unable to convert amount from provided rates: ${converted}` + ) + converted = await this.internalRatesService.convert({ + ...convertOptions, + destinationAsset + }) + if (isConvertError(converted)) { + this.deps.logger.error( + `Unable to convert amount from internal rates: ${converted}` + ) + } + } + return converted + } + + public getInstanceName(): string | undefined { + return this.instanceName + } + + getBaseAssetCode(): string { + return this.deps.baseAssetCode + } + + getBaseScale(): number { + return this.deps.baseScale + } +} diff --git a/packages/backend/src/telemetry/transaction-amount.test.ts b/packages/backend/src/telemetry/transaction-amount.test.ts new file mode 100644 index 0000000000..cb4d73e357 --- /dev/null +++ b/packages/backend/src/telemetry/transaction-amount.test.ts @@ -0,0 +1,99 @@ +import { ValueType } from '@opentelemetry/api' +import { ConvertError } from '../rates/service' +import { Asset } from '../rates/util' +import { MockTelemetryService } from '../tests/telemetry' +import { privacy } from './privacy' +import { collectTelemetryAmount } from './transaction-amount' +import { Logger } from 'pino' + +const telemetryService = new MockTelemetryService() +const mockLogger = { error: jest.fn() } as unknown as Logger + +const asset: Asset = { code: 'USD', scale: 2 } + +describe('Telemetry Amount Collection', function () { + it('should not collect telemetry when conversion returns InvalidDestinationPrice', async () => { + const convertSpy = jest + .spyOn(telemetryService, 'convertAmount') + .mockImplementation(() => + Promise.resolve(ConvertError.InvalidDestinationPrice) + ) + + const addSpy = jest.spyOn( + telemetryService.getOrCreateMetric('transactions_amount', { + description: 'Amount sent through the network', + valueType: ValueType.DOUBLE + }), + 'add' + ) + + await collectTelemetryAmount(telemetryService, mockLogger, { + amount: 100n, + asset + }) + + expect(convertSpy).toHaveBeenCalled() + expect(addSpy).not.toHaveBeenCalled() + }) + it('should handle invalid amount by not collecting telemetry', async () => { + const convertSpy = jest + .spyOn(telemetryService, 'convertAmount') + .mockImplementation(() => + Promise.resolve(ConvertError.InvalidDestinationPrice) + ) + + await collectTelemetryAmount(telemetryService, mockLogger, { + amount: 0n, + asset + }) + + expect(convertSpy).not.toHaveBeenCalled() + }) + + it('should collect telemetry when conversion is successful', async () => { + const convertSpy = jest + .spyOn(telemetryService, 'convertAmount') + .mockImplementation(() => Promise.resolve(10000n)) + const addSpy = jest.spyOn( + telemetryService.getOrCreateMetric('transactions_amount', { + description: 'Amount sent through the network', + valueType: ValueType.DOUBLE + }), + 'add' + ) + jest.spyOn(privacy, 'applyPrivacy').mockReturnValue(12000) + + await collectTelemetryAmount(telemetryService, mockLogger, { + amount: 100n, + asset + }) + + expect(convertSpy).toHaveBeenCalled() + expect(addSpy).toHaveBeenCalledWith(12000) + }) + + it('should apply privacy to the collected telemetry', async () => { + const convertSpy = jest + .spyOn(telemetryService, 'convertAmount') + .mockImplementation(() => Promise.resolve(10000n)) + const privacySpy = jest + .spyOn(privacy, 'applyPrivacy') + .mockReturnValue(12000) + const addSpy = jest.spyOn( + telemetryService.getOrCreateMetric('transactions_amount', { + description: 'Amount sent through the network', + valueType: ValueType.DOUBLE + }), + 'add' + ) + + await collectTelemetryAmount(telemetryService, mockLogger, { + amount: 100n, + asset + }) + + expect(convertSpy).toHaveBeenCalled() + expect(privacySpy).toHaveBeenCalledWith(Number(10000n)) + expect(addSpy).toHaveBeenCalledWith(12000) + }) +}) diff --git a/packages/backend/src/telemetry/transaction-amount.ts b/packages/backend/src/telemetry/transaction-amount.ts new file mode 100644 index 0000000000..c07700ca8f --- /dev/null +++ b/packages/backend/src/telemetry/transaction-amount.ts @@ -0,0 +1,40 @@ +import { ValueType } from '@opentelemetry/api' +import { ConvertError } from '../rates/service' +import { Asset, ConvertOptions } from '../rates/util' +import { privacy } from './privacy' +import { TelemetryService } from './service' +import { Logger } from 'pino' + +export async function collectTelemetryAmount( + telemetryService: TelemetryService, + logger: Logger, + { amount, asset }: { amount: bigint; asset: Asset } +) { + if (!amount) { + return + } + + const convertOptions: Omit< + ConvertOptions, + 'exchangeRate' | 'destinationAsset' + > = { + sourceAmount: amount, + sourceAsset: { code: asset.code, scale: asset.scale } + } + + try { + const converted = await telemetryService.convertAmount(convertOptions) + if (converted === ConvertError.InvalidDestinationPrice) { + return + } + + telemetryService + .getOrCreateMetric('transactions_amount', { + description: 'Amount sent through the network', + valueType: ValueType.DOUBLE + }) + .add(privacy.applyPrivacy(Number(converted))) + } catch (e) { + logger.error(e, `Unable to collect telemetry`) + } +} diff --git a/packages/backend/src/tests/app.ts b/packages/backend/src/tests/app.ts index 287aec89ab..11a4e38a1b 100644 --- a/packages/backend/src/tests/app.ts +++ b/packages/backend/src/tests/app.ts @@ -1,20 +1,20 @@ -import Axios from 'axios' -import createLogger from 'pino' -import { Knex } from 'knex' -import nock from 'nock' -import fetch from 'cross-fetch' import { IocContract } from '@adonisjs/fold' import { ApolloClient, + ApolloLink, InMemoryCache, NormalizedCacheObject, - createHttpLink, - ApolloLink + createHttpLink } from '@apollo/client' -import { onError } from '@apollo/client/link/error' import { setContext } from '@apollo/client/link/context' +import { onError } from '@apollo/client/link/error' +import Axios from 'axios' +import fetch from 'cross-fetch' +import { Knex } from 'knex' +import nock from 'nock' +import createLogger from 'pino' -import { start, gracefulShutdown } from '..' +import { gracefulShutdown, start } from '..' import { App, AppServices } from '../app' export const testAccessToken = 'test-app-access' diff --git a/packages/backend/src/tests/telemetry.ts b/packages/backend/src/tests/telemetry.ts new file mode 100644 index 0000000000..de7b204290 --- /dev/null +++ b/packages/backend/src/tests/telemetry.ts @@ -0,0 +1,61 @@ +import { Attributes, Counter, MetricOptions } from '@opentelemetry/api' +import { TelemetryService } from '../telemetry/service' +import { ConvertError, Rates, RatesService } from '../rates/service' +import { ConvertOptions } from '../rates/util' + +export const mockCounter = { add: jest.fn() } as Counter + +export class MockRatesService implements RatesService { + async convert(): Promise { + return BigInt(10000) + } + async rates(): Promise { + return { + base: 'USD', + rates: { + BGN: 0.55, + BNB: 249.39, + BTC: 40829.24, + ETH: 2162.15, + EUR: 1.08, + GBP: 1.25, + RON: 0.22, + USD: 1, + XRP: 0.5994 + } + } + } +} + +export class MockTelemetryService implements TelemetryService { + public aseRatesService = new MockRatesService() + public internalRatesService = new MockRatesService() + public getOrCreateMetric( + _name: string, + _options?: MetricOptions | undefined + ): Counter { + return mockCounter + } + public getInstanceName(): string | undefined { + return 'serviceName' + } + public shutdown(): void {} + + public async convertAmount( + _convertOptions: Omit + ): Promise { + let converted = await this.aseRatesService.convert() + if (typeof converted !== 'bigint' && converted in ConvertError) { + converted = await this.internalRatesService.convert() + } + return Promise.resolve(converted) + } + + public getBaseAssetCode(): string { + return 'USD' + } + + public getBaseScale(): number { + return 4 + } +} diff --git a/packages/documentation/astro.config.mjs b/packages/documentation/astro.config.mjs index df6772f3f0..164b7f9343 100644 --- a/packages/documentation/astro.config.mjs +++ b/packages/documentation/astro.config.mjs @@ -143,6 +143,18 @@ export default defineConfig({ } ] }, + { + label: 'Telemetry', + collapsed: true, + items: [ + { label: 'Overview', link: 'telemetry/overview' }, + { label: 'Privacy', link: 'telemetry/privacy' }, + { + label: 'Deploying Custom Telemetry', + link: 'telemetry/integrating' + } + ] + }, { label: 'Local Playground', collapsed: true, diff --git a/packages/documentation/public/img/telemetry-architecture.png b/packages/documentation/public/img/telemetry-architecture.png new file mode 100644 index 0000000000..57958d8865 Binary files /dev/null and b/packages/documentation/public/img/telemetry-architecture.png differ diff --git a/packages/documentation/src/content/docs/telemetry/integrating.md b/packages/documentation/src/content/docs/telemetry/integrating.md new file mode 100644 index 0000000000..8bbd89b38d --- /dev/null +++ b/packages/documentation/src/content/docs/telemetry/integrating.md @@ -0,0 +1,92 @@ +--- +title: Deploying Custom Telemetry Service +--- + +Rafiki allows for integrating [Account Servicing Entities](/reference/glossary#account-servicing-entity) (ASE) to build their own telemetry solution based on the [OpenTelemetry](https://opentelemetry.io/) standardized metrics format that Rafiki exposes. + +In order to do so, the integrating ASE must deploy its own OpenTelemetry collector that should act as a sidecar container to Rafiki. It needs to provide the OpenTelemetry collector's ingest endpoint so that Rafiki can start sending metrics to it. + +## Rafiki Telemetry Environment Variables + +- `ENABLE_TELEMETRY`: boolean, defaults to `true`. Enables the telemetry service on Rafiki. +- `LIVENET`: boolean. Should be set to `true` on production environments dealing with real money. If it is not set, it will default to `false`, and metrics will get sent to the testnet otel-collector +- `OPEN_TELEMETRY_COLLECTOR_URLS`: CSV of URLs for Open Telemetry collectors (e.g., `http://otel-collector-NLB-e3172ff9d2f4bc8a.elb.eu-west-2.amazonaws.com:4317,http://happy-life-otel-collector:4317`). +- `OPEN_TELEMETRY_EXPORT_INTERVAL`: number in milliseconds, defaults to `15000`. Defines how often the instrumented Rafiki instance should send metrics. +- `TELEMETRY_EXCHANGE_RATES_URL`: string URL, defaults to `https://telemetry-exchange-rates.s3.amazonaws.com/exchange-rates-usd.json`. It defines the endpoint that Rafiki will query for exchange rates, as a fallback when ASE does not [provide them](/integration/getting-started/#exchange-rates). If set, the response format of the external exchange rates API should be of type Rates, as the rates service expects. + The default endpoint set here points to a public S3 that has the previously mentioned required format, updated daily. + +## Example Docker OpenTelemetry Collector Image and Configuration + +Example of Docker OpenTelemetry Collector image and configuration that integrates with Rafiki and sends data to a Prometheus remote write endpoint: + +(it can be tested in our [Local Playground](/playground/overview) setup, by also providing the environment variables listed above to happy-life-backend in the [docker-compose](https://github.com/interledger/rafiki/blob/main/localenv/happy-life-bank/docker-compose.yml)) + +#### Docker-compose config: + +```yaml +#Serves as example for optional local collector configuration +happy-life-otel-collector: + image: otel/opentelemetry-collector-contrib:latest + command: ['--config=/etc/otel-collector-config.yaml', ''] + environment: + - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID-''} + - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY-''} + volumes: + - ../collector/otel-collector-config.yaml:/etc/otel-collector-config.yaml + networks: + - rafiki + expose: + - 4317 + ports: + - '13132:13133' # health_check extension +``` + +#### OpenTelemetry OTEL collector config: + +[OTEL Collector config docs](https://opentelemetry.io/docs/collector/configuration/) + +```yaml +# Serves as example for the configuration of a local OpenTelemetry Collector that sends metrics to an AWS Managed Prometheus Workspace +# Sigv4auth required for AWS Prometheus Remote Write access (USER with access keys needed) + +extensions: + sigv4auth: + assume_role: + arn: 'arn:aws:iam::YOUR-ROLE:role/PrometheusRemoteWrite' + sts_region: 'YOUR-REGION' + +receivers: + otlp: + protocols: + grpc: + http: + cors: + allowed*origins: + - http://* + - https://\_ + +processors: + batch: + +exporters: + logging: + verbosity: 'normal' + prometheusremotewrite: + endpoint: 'https://aps-workspaces.YOUR-REGION.amazonaws.com/workspaces/ws-YOUR-WORKSPACE-IDENTIFIER/api/v1/remote_write' + auth: + authenticator: sigv4auth + +service: + telemetry: + logs: + level: 'debug' + metrics: + level: 'detailed' + address: 0.0.0.0:8888 + extensions: [sigv4auth] + pipelines: + metrics: + receivers: [otlp] + processors: [batch] + exporters: [logging, prometheusremotewrite] +``` diff --git a/packages/documentation/src/content/docs/telemetry/overview.md b/packages/documentation/src/content/docs/telemetry/overview.md new file mode 100644 index 0000000000..d96cbdfe28 --- /dev/null +++ b/packages/documentation/src/content/docs/telemetry/overview.md @@ -0,0 +1,74 @@ +--- +title: Overview +--- + +## Purpose + +The objective of the telemetry feature is to gather metrics and establish an infrastructure for visualizing valuable network insights. The metrics we at the Interledger Foundation collect include: + +- The total amount of money transferred via packet data within a specified time frame (daily, weekly, monthly). +- The number of transactions from outgoing payments that have been at least partially successful. +- The average amount of money held within the network per transaction. + +We aim to track the growth of the network in terms of transaction sizes and the number of transactions processed. Our goal is to use these data for our own insights and to enable [Account Servicing Entities](/reference/glossary#account-servicing-entity) (ASEs) to gain their own insights. + +## Privacy and Optionality + +Privacy is a paramount concern for us. Rafiki's telemetry feature is designed to provide valuable network insights without violating privacy or aiding malicious ASEs. For more information, please [read the privacy docs](../privacy). + +The telemetry feature is optional for ASEs. + +## Architecture + +The architecture of the telemetry feature is illustrated below: + +![Telemetry architecture](/img/telemetry-architecture.png) + +## OpenTelemetry + +We have adopted [OpenTelemetry](https://opentelemetry.io/) to ensure compliance with a standardized framework that is compatible with a variety of tool suites. This allows clients to use their preferred tools for data analysis, while Rafiki is instrumented and observable through a standardized metrics format. + +## Telemetry ECS Cluster + +The Telemetry Replica service is hosted on AWS ECS Fargate and is configured for availability and load balancing of custom ADOT (AWS Distro for Opentelemetry) Collector ECS tasks. + +When ASEs opt for telemetry, metrics are sent to our Telemetry Service. To enable ASEs to build their own telemetry solutions, instrumented Rafiki can send data to multiple endpoints. This allows the integration of a local [Otel collector](https://opentelemetry.io/docs/collector/) container that can support custom requirements. Metrics communication is facilitated through [gRPC](https://grpc.io/). + +## Otel SDK - Rafiki Instrumentation + +The Opentelemetry SDK is integrated into Rafiki to create, collect, and export metrics. The SDK integrates seamlessly with the OTEL Collector. + +## Prometheus - AMP + +We use Amazon's managed service for Prometheus (AMP) to collect data from the Telemetry cluster. + +**Note**: AMP offers limited configuration options and cannot crawl data outside of AWS. This limitation led us to adopt a push model, using prometheusRemoteWrite, instead of a pull model. For future development, we may consider hosting our own Prometheus. + +## Grafana - Grafana Cloud + +Grafana Cloud is used for data visualization dashboards. It offers multiple tools that extend Prometheus Promql. + +**Note**: We initially used Amazon hosted Grafana, but it did not meet our needs for embedding dashboards. Grafana Cloud offers a feature called “Public dashboards”, which allows us to share dashboards. However, embedding may still pose a challenge. + +## Exchange Rates + +For telemetry purposes, all amounts collected by instrumented Rafiki should be converted to a base currency. + +**Privacy Reasoning**: If only two ASEs are peered over a non-USD currency and we collect data in that currency, it would be easy to determine the volumes moved between those two ASEs. To maintain privacy, we convert all amounts to a base currency. + +If an ASE does not provide the necessary exchange rate for a transaction, the telemetry solution will still convert the amount to the base currency using external exchange rates. A Lambda function on AWS retrieves and stores these external exchange rates. It is triggered by a daily CloudWatch event and stores the rates in a public S3 Bucket. The S3 Bucket does not have versioning, and the data is overwritten daily to further ensure privacy. + +## Instrumentation + +Rafiki currently has two counter metrics. All data points (counter increases) are exported to collection endpoints at a configurable interval (default recommended to 15s). + +Currently collected metrics: + +- `transactions_total` - Counter metric + - Description: “Count of funded transactions” + - This counter metric increases by 1 for each successfully sent transaction. +- `transactions_amount` - Counter metric + - Description: “Amount sent through the network”. + - This amount metric increases by the amount sent in each ILP packet. + +**Note**: The current implementation only collects metrics on the SENDING side of a transaction. Metrics for external open-payments transactions RECEIVED by a Rafiki instance in the network are not collected. diff --git a/packages/documentation/src/content/docs/telemetry/privacy.md b/packages/documentation/src/content/docs/telemetry/privacy.md new file mode 100644 index 0000000000..403f55fc28 --- /dev/null +++ b/packages/documentation/src/content/docs/telemetry/privacy.md @@ -0,0 +1,57 @@ +--- +title: Privacy +--- + +Rafiki telemetry is designed with a strong emphasis on privacy. The system anonymizes user data and refrains from collecting identifiable information. Since transactions can originate from any user to a Rafiki instance, the privacy measures are implemented directly at the source (each Rafiki instance). This means that at the individual level, the data is already anonymous as single Rafiki instances service transactions for multiple users. + +## Differential Privacy and Local Differential Privacy + +Differential Privacy is a system for publicly sharing information about a dataset by describing the patterns of groups within the dataset while withholding information about individuals in the dataset. Local Differential Privacy (LDP) is a variant of differential privacy where noise is added to each individual's data point before it is sent to the server. This ensures that the server never sees the actual data, providing a strong privacy guarantee. + +## Rounding Technique and Bucketing + +In our implementation, we use a rounding technique that essentially aggregates multiple transactions into the same value, making them indistinguishable. This is achieved by dividing the transaction values into buckets and rounding the values to the nearest bucket. + +The bucket size is calculated based on the raw transaction value. For lower value transactions, which are expected to occur more frequently, the bucket sizes are determined linearly for higher granularity. However, after a certain threshold, the bucket size calculation switches to a logarithmic function to ensure privacy for higher value transactions, which are less frequent but pose greater privacy concerns. + +To handle outliers, a "clipping" technique is implemented, capping the buckets. Any value that exceeds a given threshold is placed in a single bucket. Conversely, any value that falls below a certain minimum is also placed in a single bucket. This ensures that both high and low outliers do not disproportionately affect the overall data, providing further privacy guarantees for these transactions. + +## Laplacian Distribution + +The Laplacian distribution is often used in differential privacy due to its double exponential decay property. This property ensures that a small change in the data will not significantly affect the probability distribution of the output, providing a strong privacy guarantee. + +To achieve Local Differential Privacy (LDP), noise is selected from the Laplacian distribution and added to the rounded values. The noise is generated based on a privacy parameter, which is calculated using the sensitivity of the function. + +The sensitivity of a function in differential privacy is the maximum amount that any single observation can change the output of the function. In this case, the sensitivity is considered to be the maximum of the rounded value and the bucket size. + +The privacy parameter is computed as one-tenth of the sensitivity. This parameter controls the trade-off between privacy and utility: a smaller privacy parameter means more privacy but less utility, and a larger privacy parameter means less privacy but more utility. + +The noise, selected from the Laplacian distribution, is then generated using this privacy parameter and added to the rounded value. If the resulting value is zero, it is set to half the bucket size to ensure that the noise does not completely obscure the transaction value. + +## Currency Conversion + +Another factor that obscures sensitive data is currency conversion. In cross-currency transactions, exchange rates are provided by [ASEs](/reference/glossary#account-servicing-entity) internally. As such, they cannot be correlated to an individual transaction. If the necessary rates are not provided or not available from the ASE, an external API for exchange rates is used. The obtained exchange rates are overwritten frequently in this case, with no versioning or history access. This introduces an additional layer of noise and further protects the privacy of the transactions. + +## References + +Rafiki's telemetry solution is a combination of techniques described in various white papers on privacy-preserving data collection. For more information, you can refer to the following papers: + +- [Local Differential Privacy for Human-Centered Computing](https://jwcn-eurasipjournals.springeropen.com/articles/10.1186/s13638-020-01675-8) +- [Collecting Telemetry Data Privately](https://www.microsoft.com/en-us/research/blog/collecting-telemetry-data-privately/) +- [Collecting Telemetry Data Privately - NeurIPS Publication](https://proceedings.neurips.cc/paper_files/paper/2017/file/253614bbac999b38b5b60cae531c4969-Paper.pdf) by Bolin Ding, Janardhan Kulkarni, Sergey Yekhanin from Microsoft Research. +- [RAPPOR: Randomized Aggregatable Privacy-Preserving Ordinal Response](https://static.googleusercontent.com/media/research.google.com/en//pubs/archive/42852.pdf) + +# Experimental Transaction Values when using the Algorithm + +The following table shows the values in the algorithm when running transactions for different amounts. The raw value increases as you go down the rows of the table. +(all values are in scale 4) +| Raw Value | Bucket Size | Rounded Value | Privacy Parameter | Laplace Noise | Final Value | +|-----------|-------------|---------------|-------------------|---------------|-------------| +| 8300 | 10000 | 10000 | 1000 | 2037 | 12037 | +| 13200 | 15000 | 15000 | 1500 | 1397 | 16397 | +| 147700 | 160000 | 160000 | 16000 | -27128 | 132872 | +| 1426100 | 2560000 | 2560000 | 256000 | -381571 | 2178429 | +| 1788200 | 2560000 | 2560000 | 256000 | 463842 | 3023842 | +| 90422400 | 10000000 | 90000000 | 1000000 | 2210649 | 92210649 | +| 112400400 | 10000000 | 100000000 | 1000000 | 407847 | 100407847 | +| 222290500 | 10000000 | 100000000 | 1000000 | -686149 | 99313851 | diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index fda482c85b..76165ec981 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -286,6 +286,18 @@ importers: '@koa/router': specifier: ^12.0.0 version: 12.0.0 + '@opentelemetry/api': + specifier: ^1.6.0 + version: 1.7.0 + '@opentelemetry/exporter-metrics-otlp-grpc': + specifier: ^0.43.0 + version: 0.43.0(@opentelemetry/api@1.7.0) + '@opentelemetry/resources': + specifier: ^1.17.0 + version: 1.18.1(@opentelemetry/api@1.7.0) + '@opentelemetry/sdk-metrics': + specifier: ^1.17.0 + version: 1.18.1(@opentelemetry/api@1.7.0) ajv: specifier: ^8.12.0 version: 8.12.0 @@ -3146,6 +3158,25 @@ packages: dependencies: graphql: 16.8.1 + /@grpc/grpc-js@1.9.11: + resolution: {integrity: sha512-QDhMfbTROOXUhLHMroow8f3EHiCKUOh6UwxMP5S3EuXMnWMNSVIhatGZRwkpg9OUTYdZPsDUVH3cOAkWhGFUJw==} + engines: {node: ^8.13.0 || >=10.10.0} + dependencies: + '@grpc/proto-loader': 0.7.10 + '@types/node': 18.18.5 + dev: false + + /@grpc/proto-loader@0.7.10: + resolution: {integrity: sha512-CAqDfoaQ8ykFd9zqBDn4k6iWT9loLAlc2ETmDFS9JCD70gDcnA4L3AFEo2iV7KyAtAAHFW9ftq1Fz+Vsgq80RQ==} + engines: {node: '>=6'} + hasBin: true + dependencies: + lodash.camelcase: 4.3.0 + long: 5.2.3 + protobufjs: 7.2.5 + yargs: 17.7.2 + dev: false + /@headlessui/react@1.7.18(react-dom@18.2.0)(react@18.2.0): resolution: {integrity: sha512-4i5DOrzwN4qSgNsL4Si61VMkUcWbcSKueUV7sFhpHzQcSShdlHENE5+QBntMSRvHt8NyoFO2AGG8si9lq+w4zQ==} engines: {node: '>=10'} @@ -3686,6 +3717,187 @@ packages: which: 3.0.1 dev: true + /@opentelemetry/api-logs@0.43.0: + resolution: {integrity: sha512-0CXMOYPXgAdLM2OzVkiUfAL6QQwWVhnMfUXCqLsITY42FZ9TxAhZIHkoc4mfVxvPuXsBnRYGR8UQZX86p87z4A==} + engines: {node: '>=14'} + dependencies: + '@opentelemetry/api': 1.7.0 + dev: false + + /@opentelemetry/api@1.7.0: + resolution: {integrity: sha512-AdY5wvN0P2vXBi3b29hxZgSFvdhdxPB9+f0B6s//P9Q8nibRWeA3cHm8UmLpio9ABigkVHJ5NMPk+Mz8VCCyrw==} + engines: {node: '>=8.0.0'} + dev: false + + /@opentelemetry/core@1.17.0(@opentelemetry/api@1.7.0): + resolution: {integrity: sha512-tfnl3h+UefCgx1aeN2xtrmr6BmdWGKXypk0pflQR0urFS40aE88trnkOMc2HTJZbMrqEEl4HsaBeFhwLVXsrJg==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.7.0' + dependencies: + '@opentelemetry/api': 1.7.0 + '@opentelemetry/semantic-conventions': 1.17.0 + dev: false + + /@opentelemetry/core@1.18.1(@opentelemetry/api@1.7.0): + resolution: {integrity: sha512-kvnUqezHMhsQvdsnhnqTNfAJs3ox/isB0SVrM1dhVFw7SsB7TstuVa6fgWnN2GdPyilIFLUvvbTZoVRmx6eiRg==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.8.0' + dependencies: + '@opentelemetry/api': 1.7.0 + '@opentelemetry/semantic-conventions': 1.18.1 + dev: false + + /@opentelemetry/exporter-metrics-otlp-grpc@0.43.0(@opentelemetry/api@1.7.0): + resolution: {integrity: sha512-m7HtZAvfFt1YDjjzVf/kLr2pyuFth3NU3pfqs41zfYB5o/n/RbxRhVLphRzr6qLDccqsL0mxn1e6tkUuIn/Hfg==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + dependencies: + '@grpc/grpc-js': 1.9.11 + '@opentelemetry/api': 1.7.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.7.0) + '@opentelemetry/exporter-metrics-otlp-http': 0.43.0(@opentelemetry/api@1.7.0) + '@opentelemetry/otlp-grpc-exporter-base': 0.43.0(@opentelemetry/api@1.7.0) + '@opentelemetry/otlp-transformer': 0.43.0(@opentelemetry/api@1.7.0) + '@opentelemetry/resources': 1.17.0(@opentelemetry/api@1.7.0) + '@opentelemetry/sdk-metrics': 1.17.0(@opentelemetry/api@1.7.0) + dev: false + + /@opentelemetry/exporter-metrics-otlp-http@0.43.0(@opentelemetry/api@1.7.0): + resolution: {integrity: sha512-k0KHKLS/xEWI4e5xrsnHpRk7Adj7JSFbFeKF4ti1d9soek3y85ZC2fTzDQC+ysUYo/lccoAXGR/gjcYgQOe7pg==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + dependencies: + '@opentelemetry/api': 1.7.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.7.0) + '@opentelemetry/otlp-exporter-base': 0.43.0(@opentelemetry/api@1.7.0) + '@opentelemetry/otlp-transformer': 0.43.0(@opentelemetry/api@1.7.0) + '@opentelemetry/resources': 1.17.0(@opentelemetry/api@1.7.0) + '@opentelemetry/sdk-metrics': 1.17.0(@opentelemetry/api@1.7.0) + dev: false + + /@opentelemetry/otlp-exporter-base@0.43.0(@opentelemetry/api@1.7.0): + resolution: {integrity: sha512-LXNtRFVuPRXB9q0qdvrLikQ3NtT9Jmv255Idryz3RJPhOh/Fa03sBASQoj3D55OH3xazmA90KFHfhJ/d8D8y4A==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + dependencies: + '@opentelemetry/api': 1.7.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.7.0) + dev: false + + /@opentelemetry/otlp-grpc-exporter-base@0.43.0(@opentelemetry/api@1.7.0): + resolution: {integrity: sha512-oOpqtDJo9BBa1+nD6ID1qZ55ZdTwEwSSn2idMobw8jmByJKaanVLdr9SJKsn5T9OBqo/c5QY2brMf0TNZkobJQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + dependencies: + '@grpc/grpc-js': 1.9.11 + '@opentelemetry/api': 1.7.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.7.0) + '@opentelemetry/otlp-exporter-base': 0.43.0(@opentelemetry/api@1.7.0) + protobufjs: 7.2.5 + dev: false + + /@opentelemetry/otlp-transformer@0.43.0(@opentelemetry/api@1.7.0): + resolution: {integrity: sha512-KXYmgzWdVBOD5NvPmGW1nEMJjyQ8gK3N8r6pi4HvmEhTp0v4T13qDSax4q0HfsqmbPJR355oqQSJUnu1dHNutw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.7.0' + dependencies: + '@opentelemetry/api': 1.7.0 + '@opentelemetry/api-logs': 0.43.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.7.0) + '@opentelemetry/resources': 1.17.0(@opentelemetry/api@1.7.0) + '@opentelemetry/sdk-logs': 0.43.0(@opentelemetry/api-logs@0.43.0)(@opentelemetry/api@1.7.0) + '@opentelemetry/sdk-metrics': 1.17.0(@opentelemetry/api@1.7.0) + '@opentelemetry/sdk-trace-base': 1.17.0(@opentelemetry/api@1.7.0) + dev: false + + /@opentelemetry/resources@1.17.0(@opentelemetry/api@1.7.0): + resolution: {integrity: sha512-+u0ciVnj8lhuL/qGRBPeVYvk7fL+H/vOddfvmOeJaA1KC+5/3UED1c9KoZQlRsNT5Kw1FaK8LkY2NVLYfOVZQw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.7.0' + dependencies: + '@opentelemetry/api': 1.7.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.7.0) + '@opentelemetry/semantic-conventions': 1.17.0 + dev: false + + /@opentelemetry/resources@1.18.1(@opentelemetry/api@1.7.0): + resolution: {integrity: sha512-JjbcQLYMttXcIabflLRuaw5oof5gToYV9fuXbcsoOeQ0BlbwUn6DAZi++PNsSz2jjPeASfDls10iaO/8BRIPRA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.8.0' + dependencies: + '@opentelemetry/api': 1.7.0 + '@opentelemetry/core': 1.18.1(@opentelemetry/api@1.7.0) + '@opentelemetry/semantic-conventions': 1.18.1 + dev: false + + /@opentelemetry/sdk-logs@0.43.0(@opentelemetry/api-logs@0.43.0)(@opentelemetry/api@1.7.0): + resolution: {integrity: sha512-JyJ2BBRKm37Mc4cSEhFmsMl5ASQn1dkGhEWzAAMSlhPtLRTv5PfvJwhR+Mboaic/eDLAlciwsgijq8IFlf6IgQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.4.0 <1.7.0' + '@opentelemetry/api-logs': '>=0.39.1' + dependencies: + '@opentelemetry/api': 1.7.0 + '@opentelemetry/api-logs': 0.43.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.7.0) + '@opentelemetry/resources': 1.17.0(@opentelemetry/api@1.7.0) + dev: false + + /@opentelemetry/sdk-metrics@1.17.0(@opentelemetry/api@1.7.0): + resolution: {integrity: sha512-HlWM27yGmYuwCoVRe3yg2PqKnIsq0kEF0HQgvkeDWz2NYkq9fFaSspR6kvjxUTbghAlZrabiqbgyKoYpYaXS3w==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.7.0' + dependencies: + '@opentelemetry/api': 1.7.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.7.0) + '@opentelemetry/resources': 1.17.0(@opentelemetry/api@1.7.0) + lodash.merge: 4.6.2 + dev: false + + /@opentelemetry/sdk-metrics@1.18.1(@opentelemetry/api@1.7.0): + resolution: {integrity: sha512-TEFgeNFhdULBYiCoHbz31Y4PDsfjjxRp8Wmdp6ybLQZPqMNEb+dRq+XN8Xw3ivIgTaf9gYsomgV5ensX99RuEQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.8.0' + dependencies: + '@opentelemetry/api': 1.7.0 + '@opentelemetry/core': 1.18.1(@opentelemetry/api@1.7.0) + '@opentelemetry/resources': 1.18.1(@opentelemetry/api@1.7.0) + lodash.merge: 4.6.2 + dev: false + + /@opentelemetry/sdk-trace-base@1.17.0(@opentelemetry/api@1.7.0): + resolution: {integrity: sha512-2T5HA1/1iE36Q9eg6D4zYlC4Y4GcycI1J6NsHPKZY9oWfAxWsoYnRlkPfUqyY5XVtocCo/xHpnJvGNHwzT70oQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.7.0' + dependencies: + '@opentelemetry/api': 1.7.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.7.0) + '@opentelemetry/resources': 1.17.0(@opentelemetry/api@1.7.0) + '@opentelemetry/semantic-conventions': 1.17.0 + dev: false + + /@opentelemetry/semantic-conventions@1.17.0: + resolution: {integrity: sha512-+fguCd2d8d2qruk0H0DsCEy2CTK3t0Tugg7MhZ/UQMvmewbZLNnJ6heSYyzIZWG5IPfAXzoj4f4F/qpM7l4VBA==} + engines: {node: '>=14'} + dev: false + + /@opentelemetry/semantic-conventions@1.18.1: + resolution: {integrity: sha512-+NLGHr6VZwcgE/2lw8zDIufOCGnzsA5CbQIMleXZTrgkBd0TanCX+MiDYJ1TOS4KL/Tqk0nFRxawnaYr6pkZkA==} + engines: {node: '>=14'} + dev: false + /@pagefind/darwin-arm64@1.0.3: resolution: {integrity: sha512-vsHDtvao3W4iFCxVc4S0BVhpj3E2MAoIVM7RmuQfGp1Ng22nGLRaMP6FguLO8TMabRJdvp4SVr227hL4WGKOHA==} cpu: [arm64] @@ -5501,7 +5713,6 @@ packages: engines: {node: '>=8'} dependencies: color-convert: 2.0.1 - dev: true /ansi-styles@5.2.0: resolution: {integrity: sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==} @@ -6470,7 +6681,6 @@ packages: string-width: 4.2.3 strip-ansi: 6.0.1 wrap-ansi: 7.0.0 - dev: true /clone@1.0.4: resolution: {integrity: sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==} @@ -8753,7 +8963,6 @@ packages: /get-caller-file@2.0.5: resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} engines: {node: 6.* || 8.* || >= 10.*} - dev: true /get-intrinsic@1.2.1: resolution: {integrity: sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw==} @@ -10968,7 +11177,6 @@ packages: /lodash.camelcase@4.3.0: resolution: {integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==} - dev: true /lodash.clonedeep@4.5.0: resolution: {integrity: sha512-H5ZhCF25riFd9uB5UCkVKo61m3S/xZk1x4wA6yp/L3RFP6Z/eHH1ymQcGLo7J3GMPfm0V/7m1tryHuGVxpqEBQ==} @@ -11049,6 +11257,10 @@ packages: /long@4.0.0: resolution: {integrity: sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==} + /long@5.2.3: + resolution: {integrity: sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==} + dev: false + /longest-streak@3.0.1: resolution: {integrity: sha512-cHlYSUpL2s7Fb3394mYxwTYj8niTaNHUCLr0qdiCXQfSjfuA7CKofpX2uSwEfFDQ0EB7JcnMnm+GjbqqoinYYg==} @@ -13514,6 +13726,25 @@ packages: /property-information@6.1.1: resolution: {integrity: sha512-hrzC564QIl0r0vy4l6MvRLhafmUowhO/O3KgVSoXIbbA2Sz4j8HGpJc6T2cubRVwMwpdiG/vKGfhT4IixmKN9w==} + /protobufjs@7.2.5: + resolution: {integrity: sha512-gGXRSXvxQ7UiPgfw8gevrfRWcTlSbOFg+p/N+JVJEK5VhueL2miT6qTymqAmjr1Q5WbOCyJbyrk6JfWKwlFn6A==} + engines: {node: '>=12.0.0'} + requiresBuild: true + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/base64': 1.1.2 + '@protobufjs/codegen': 2.0.4 + '@protobufjs/eventemitter': 1.1.0 + '@protobufjs/fetch': 1.1.0 + '@protobufjs/float': 1.0.2 + '@protobufjs/inquire': 1.1.0 + '@protobufjs/path': 1.1.2 + '@protobufjs/pool': 1.1.0 + '@protobufjs/utf8': 1.1.0 + '@types/node': 18.18.5 + long: 5.2.3 + dev: false + /proxy-addr@2.0.7: resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==} engines: {node: '>= 0.10'} @@ -13991,7 +14222,6 @@ packages: /require-directory@2.1.1: resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} engines: {node: '>=0.10.0'} - dev: true /require-from-string@2.0.2: resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==} @@ -16070,7 +16300,6 @@ packages: ansi-styles: 4.3.0 string-width: 4.2.3 strip-ansi: 6.0.1 - dev: true /wrap-ansi@8.1.0: resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==} @@ -16162,7 +16391,6 @@ packages: /y18n@5.0.8: resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} engines: {node: '>=10'} - dev: true /yallist@3.1.1: resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==} @@ -16220,6 +16448,19 @@ packages: yargs-parser: 21.1.1 dev: true + /yargs@17.7.2: + resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} + engines: {node: '>=12'} + dependencies: + cliui: 8.0.1 + escalade: 3.1.1 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + string-width: 4.2.3 + y18n: 5.0.8 + yargs-parser: 21.1.1 + dev: false + /ylru@1.3.2: resolution: {integrity: sha512-RXRJzMiK6U2ye0BlGGZnmpwJDPgakn6aNQ0A7gHRbD4I0uvK4TW6UqkK1V0pp9jskjJBAXd3dRrbzWkqJ+6cxA==} engines: {node: '>= 4.0.0'}