diff --git a/Blockchain/Sources/Blockchain/AccumulateFunction.swift b/Blockchain/Sources/Blockchain/AccumulateFunction.swift new file mode 100644 index 00000000..d0d2d12f --- /dev/null +++ b/Blockchain/Sources/Blockchain/AccumulateFunction.swift @@ -0,0 +1,73 @@ +import Foundation +import Utils + +public struct AccumulateArguments { + public var result: WorkResult + public var paylaodHash: Data32 + public var packageHash: Data32 + public var authorizationOutput: Data + + public init(result: WorkResult, paylaodHash: Data32, packageHash: Data32, authorizationOutput: Data) { + self.result = result + self.paylaodHash = paylaodHash + self.packageHash = packageHash + self.authorizationOutput = authorizationOutput + } +} + +public struct DeferredTransfers { + // s + public var sender: ServiceIndex + // d + public var destination: ServiceIndex + // a + public var amount: Balance + // m + public var memo: Data64 + // g + public var gasLimit: Gas + + public init(sender: ServiceIndex, destination: ServiceIndex, amount: Balance, memo: Data64, gasLimit: Gas) { + self.sender = sender + self.destination = destination + self.amount = amount + self.memo = memo + self.gasLimit = gasLimit + } +} + +public struct AccumlateResultContext { + // s: updated current account + public var account: ServiceAccount? + // c + public var authorizationQueue: ConfigFixedSizeArray< + ConfigFixedSizeArray< + Data32, + ProtocolConfig.MaxAuthorizationsQueueItems + >, + ProtocolConfig.TotalNumberOfCores + > + // v + public var validatorQueue: ConfigFixedSizeArray< + ValidatorKey, ProtocolConfig.TotalNumberOfValidators + > + // i + public var serviceIndex: ServiceIndex + // t + public var transfers: [DeferredTransfers] + // n + public var newAccounts: [ServiceIndex: ServiceAccount] + // p + public var privilegedServices: PrivilegedServices +} + +public protocol AccumulateFunction { + func invoke( + config: ProtocolConfigRef, + service: ServiceIndex, + code: Data, + serviceAccounts: [ServiceIndex: ServiceAccount], + gas: Gas, + arguments: [AccumulateArguments] + ) throws -> (ctx: AccumlateResultContext, result: Data32?) +} diff --git a/Blockchain/Sources/Blockchain/Accumulation.swift b/Blockchain/Sources/Blockchain/Accumulation.swift new file mode 100644 index 00000000..e5111fce --- /dev/null +++ b/Blockchain/Sources/Blockchain/Accumulation.swift @@ -0,0 +1,161 @@ +import Utils + +public enum AccumulationError: Error { + case invalidServiceIndex + case duplicatedServiceIndex +} + +public struct AccumulationOutput { + public var commitments: [(ServiceIndex, Data32)] + public var privilegedServices: PrivilegedServices + public var validatorQueue: ConfigFixedSizeArray< + ValidatorKey, ProtocolConfig.TotalNumberOfValidators + > + public var authorizationQueue: ConfigFixedSizeArray< + ConfigFixedSizeArray< + Data32, + ProtocolConfig.MaxAuthorizationsQueueItems + >, + ProtocolConfig.TotalNumberOfCores + > + public var serviceAccounts: [ServiceIndex: ServiceAccount] +} + +public protocol Accumulation { + var privilegedServices: PrivilegedServices { get } + var serviceAccounts: [ServiceIndex: ServiceAccount] { get } + var accumlateFunction: AccumulateFunction { get } + var onTransferFunction: OnTransferFunction { get } +} + +extension Accumulation { + public func update(config: ProtocolConfigRef, workReports: [WorkReport]) throws -> AccumulationOutput { + var servicesGasRatio: [ServiceIndex: Gas] = [:] + var servicesGas: [ServiceIndex: Gas] = [:] + + // privileged gas + for (service, gas) in privilegedServices.basicGas { + servicesGas[service] = gas + } + + let totalGasRatio = workReports.flatMap(\.results).reduce(0) { $0 + $1.gasRatio } + let totalMinimalGas = try workReports.flatMap(\.results) + .reduce(0) { try $0 + serviceAccounts[$1.serviceIndex].unwrap(orError: AccumulationError.invalidServiceIndex).minAccumlateGas } + for report in workReports { + for result in report.results { + servicesGasRatio[result.serviceIndex, default: 0] += result.gasRatio + servicesGas[result.serviceIndex, default: 0] += try serviceAccounts[result.serviceIndex] + .unwrap(orError: AccumulationError.invalidServiceIndex).minAccumlateGas + } + } + let remainingGas = config.value.coreAccumulationGas - totalMinimalGas + + for (service, gas) in servicesGas { + servicesGas[service] = gas + servicesGasRatio[service, default: 0] * remainingGas / totalGasRatio + } + + var serviceArguments: [ServiceIndex: [AccumulateArguments]] = [:] + + // ensure privileged services will be called + for service in privilegedServices.basicGas.keys { + serviceArguments[service] = [] + } + + for report in workReports { + for result in report.results { + serviceArguments[result.serviceIndex, default: []].append(AccumulateArguments( + result: result, + paylaodHash: result.payloadHash, + packageHash: report.packageSpecification.workPackageHash, + authorizationOutput: report.authorizationOutput + )) + } + } + + var commitments = [(ServiceIndex, Data32)]() + var newPrivilegedServices: PrivilegedServices? + var newValidatorQueue: ConfigFixedSizeArray< + ValidatorKey, ProtocolConfig.TotalNumberOfValidators + >? + var newAuthorizationQueue: ConfigFixedSizeArray< + ConfigFixedSizeArray< + Data32, + ProtocolConfig.MaxAuthorizationsQueueItems + >, + ProtocolConfig.TotalNumberOfCores + >? + + var newServiceAccounts = serviceAccounts + + var transferReceivers = [ServiceIndex: [DeferredTransfers]]() + + for (service, arguments) in serviceArguments { + guard let gas = servicesGas[service] else { + assertionFailure("unreachable: service not found") + throw AccumulationError.invalidServiceIndex + } + let acc = try serviceAccounts[service].unwrap(orError: AccumulationError.invalidServiceIndex) + guard let code = acc.preimages[acc.codeHash] else { + continue + } + let (ctx, commitment) = try accumlateFunction.invoke( + config: config, + service: service, + code: code, + serviceAccounts: serviceAccounts, + gas: gas, + arguments: arguments + ) + if let commitment { + commitments.append((service, commitment)) + } + + for (service, account) in ctx.newAccounts { + guard newServiceAccounts[service] == nil else { + throw AccumulationError.duplicatedServiceIndex + } + newServiceAccounts[service] = account + } + + newServiceAccounts[service] = ctx.account + + switch service { + case privilegedServices.empower: + newPrivilegedServices = ctx.privilegedServices + case privilegedServices.assign: + newAuthorizationQueue = ctx.authorizationQueue + case privilegedServices.designate: + newValidatorQueue = ctx.validatorQueue + default: + break + } + + for transfer in ctx.transfers { + transferReceivers[transfer.sender, default: []].append(transfer) + } + } + + for (service, transfers) in transferReceivers { + let acc = try serviceAccounts[service].unwrap(orError: AccumulationError.invalidServiceIndex) + guard let code = acc.preimages[acc.codeHash] else { + continue + } + newServiceAccounts[service] = try onTransferFunction.invoke( + config: config, + service: service, + code: code, + serviceAccounts: newServiceAccounts, + transfers: transfers + ) + } + + return .init( + commitments: commitments, + // those cannot be nil because priviledge services are always called + privilegedServices: newPrivilegedServices!, + validatorQueue: newValidatorQueue!, + authorizationQueue: newAuthorizationQueue!, + serviceAccounts: newServiceAccounts + ) + } +} diff --git a/Blockchain/Sources/Blockchain/Config/ProtocolConfig.swift b/Blockchain/Sources/Blockchain/Config/ProtocolConfig.swift index 80180e7b..da4b1261 100644 --- a/Blockchain/Sources/Blockchain/Config/ProtocolConfig.swift +++ b/Blockchain/Sources/Blockchain/Config/ProtocolConfig.swift @@ -29,13 +29,13 @@ public struct ProtocolConfig: Sendable { public var auditBiasFactor: Int // GA: The total gas allocated to a core for Accumulation. - public var coreAccumulationGas: Int + public var coreAccumulationGas: Gas // GI: The gas allocated to invoke a work-package’s Is-Authorized logic. - public var workPackageAuthorizerGas: Int + public var workPackageAuthorizerGas: Gas // GR: The total gas allocated for a work-package’s Refine logic. - public var workPackageRefineGas: Int + public var workPackageRefineGas: Gas // H = 8: The size of recent history, in blocks. public var recentHistorySize: Int @@ -116,9 +116,9 @@ public struct ProtocolConfig: Sendable { preimagePurgePeriod: Int, epochLength: Int, auditBiasFactor: Int, - coreAccumulationGas: Int, - workPackageAuthorizerGas: Int, - workPackageRefineGas: Int, + coreAccumulationGas: Gas, + workPackageAuthorizerGas: Gas, + workPackageRefineGas: Gas, recentHistorySize: Int, maxWorkItems: Int, maxTicketsPerExtrinsic: Int, @@ -248,23 +248,26 @@ extension ProtocolConfig { } } - public enum CoreAccumulationGas: ReadInt { + public enum CoreAccumulationGas: ReadUInt64 { public typealias TConfig = ProtocolConfigRef - public static func read(config: ProtocolConfigRef) -> Int { + public typealias TOutput = Gas + public static func read(config: ProtocolConfigRef) -> Gas { config.value.coreAccumulationGas } } - public enum WorkPackageAuthorizerGas: ReadInt { + public enum WorkPackageAuthorizerGas: ReadUInt64 { public typealias TConfig = ProtocolConfigRef - public static func read(config: ProtocolConfigRef) -> Int { + public typealias TOutput = Gas + public static func read(config: ProtocolConfigRef) -> Gas { config.value.workPackageAuthorizerGas } } - public enum WorkPackageRefineGas: ReadInt { + public enum WorkPackageRefineGas: ReadUInt64 { public typealias TConfig = ProtocolConfigRef - public static func read(config: ProtocolConfigRef) -> Int { + public typealias TOutput = Gas + public static func read(config: ProtocolConfigRef) -> Gas { config.value.workPackageRefineGas } } diff --git a/Blockchain/Sources/Blockchain/OnTransferFunction.swift b/Blockchain/Sources/Blockchain/OnTransferFunction.swift new file mode 100644 index 00000000..e002c409 --- /dev/null +++ b/Blockchain/Sources/Blockchain/OnTransferFunction.swift @@ -0,0 +1,11 @@ +import Foundation + +public protocol OnTransferFunction { + func invoke( + config: ProtocolConfigRef, + service: ServiceIndex, + code: Data, + serviceAccounts: [ServiceIndex: ServiceAccount], + transfers: [DeferredTransfers] + ) throws -> ServiceAccount +} diff --git a/Blockchain/Sources/Blockchain/Runtime.swift b/Blockchain/Sources/Blockchain/Runtime.swift index 9517ace9..044712a7 100644 --- a/Blockchain/Sources/Blockchain/Runtime.swift +++ b/Blockchain/Sources/Blockchain/Runtime.swift @@ -18,6 +18,9 @@ public final class Runtime { case invalidAssuranceParentHash case invalidAssuranceSignature case assuranceForEmptyCore + case preimagesNotSorted + case invalidPreimageServiceIndex + case duplicatedPreimage case other(any Swift.Error) case validateError(any Swift.Error) } @@ -102,7 +105,12 @@ public final class Runtime { try updateDisputes(block: block, state: &newState) // depends on Safrole and Disputes - try updateReports(block: block, state: &newState) + let availableReports = try updateReports(block: block, state: &newState) + let res = try newState.update(config: config, workReports: availableReports) + newState.privilegedServices = res.privilegedServices + newState.serviceAccounts = res.serviceAccounts + newState.authorizationQueue = res.authorizationQueue + newState.validatorQueue = res.validatorQueue newState.coreAuthorizationPool = try updateAuthorizationPool( block: block, state: prevState @@ -202,7 +210,8 @@ public final class Runtime { return pool } - public func updateReports(block: BlockRef, state newState: inout State) throws { + // returns available reports + public func updateReports(block: BlockRef, state newState: inout State) throws -> [WorkReport] { for assurance in block.extrinsic.availability.assurances { let hash = Blake2b256.hash(assurance.parentHash, assurance.assurance) let payload = SigningContext.available + hash.data @@ -220,16 +229,46 @@ public final class Runtime { } } + var availableReports = [WorkReport]() + for (idx, count) in availabilityCount.enumerated() where count > 0 { - guard newState.reports[idx] != nil else { + guard let report = newState.reports[idx] else { throw Error.assuranceForEmptyCore } if count >= ProtocolConfig.TwoThirdValidatorsPlusOne.read(config: config) { + availableReports.append(report.workReport) newState.reports[idx] = nil // remove available report from pending reports } } newState.reports = try newState.update(config: config, extrinsic: block.extrinsic.reports) + + return availableReports + } + + public func updatePreimages(block: BlockRef, state newState: inout State) throws { + let preimages = block.extrinsic.preimages.preimages + + guard preimages.isSortedAndUnique() else { + throw Error.preimagesNotSorted + } + + for preimage in preimages { + guard var acc = newState.serviceAccounts[preimage.serviceIndex] else { + throw Error.invalidPreimageServiceIndex + } + + let hash = preimage.data.blake2b256hash() + let hashAndLength = HashAndLength(hash: hash, length: UInt32(preimage.data.count)) + guard acc.preimages[hash] == nil, acc.preimageInfos[hashAndLength] == nil else { + throw Error.duplicatedPreimage + } + + acc.preimages[hash] = preimage.data + acc.preimageInfos[hashAndLength] = .init([newState.timeslot]) + + newState.serviceAccounts[preimage.serviceIndex] = acc + } } // TODO: add tests diff --git a/Blockchain/Sources/Blockchain/Types/ExtrinsicPreimages.swift b/Blockchain/Sources/Blockchain/Types/ExtrinsicPreimages.swift index 9587aa21..7071623b 100644 --- a/Blockchain/Sources/Blockchain/Types/ExtrinsicPreimages.swift +++ b/Blockchain/Sources/Blockchain/Types/ExtrinsicPreimages.swift @@ -27,3 +27,12 @@ extension ExtrinsicPreimages: Dummy { ExtrinsicPreimages(preimages: []) } } + +extension ExtrinsicPreimages.PreimageItem: Comparable { + public static func < (lhs: ExtrinsicPreimages.PreimageItem, rhs: ExtrinsicPreimages.PreimageItem) -> Bool { + if lhs.serviceIndex != rhs.serviceIndex { + return lhs.serviceIndex < rhs.serviceIndex + } + return lhs.data.lexicographicallyPrecedes(rhs.data) + } +} diff --git a/Blockchain/Sources/Blockchain/Types/PrivilegedServices.swift b/Blockchain/Sources/Blockchain/Types/PrivilegedServices.swift new file mode 100644 index 00000000..a7f0c8cd --- /dev/null +++ b/Blockchain/Sources/Blockchain/Types/PrivilegedServices.swift @@ -0,0 +1,17 @@ +public struct PrivilegedServices: Sendable, Equatable, Codable { + // m + public var empower: ServiceIndex + // a + public var assign: ServiceIndex + // v + public var designate: ServiceIndex + // g + public var basicGas: [ServiceIndex: Gas] + + public init(empower: ServiceIndex, assign: ServiceIndex, designate: ServiceIndex, basicGas: [ServiceIndex: Gas]) { + self.empower = empower + self.assign = assign + self.designate = designate + self.basicGas = basicGas + } +} diff --git a/Blockchain/Sources/Blockchain/Types/State+Merklization.swift b/Blockchain/Sources/Blockchain/Types/State+Merklization.swift index b5c8ee82..cdf42d2e 100644 --- a/Blockchain/Sources/Blockchain/Types/State+Merklization.swift +++ b/Blockchain/Sources/Blockchain/Types/State+Merklization.swift @@ -51,7 +51,7 @@ extension State { res[Self.constructKey(9)] = try JamEncoder.encode(previousValidators) res[Self.constructKey(10)] = try JamEncoder.encode(reports) res[Self.constructKey(11)] = try JamEncoder.encode(timeslot) - res[Self.constructKey(12)] = try JamEncoder.encode(privilegedServiceIndices) + res[Self.constructKey(12)] = try JamEncoder.encode(privilegedServices) res[Self.constructKey(13)] = try JamEncoder.encode(activityStatistics) for (idx, account) in serviceAccounts { diff --git a/Blockchain/Sources/Blockchain/Types/State.swift b/Blockchain/Sources/Blockchain/Types/State.swift index c6624e76..efc83d2b 100644 --- a/Blockchain/Sources/Blockchain/Types/State.swift +++ b/Blockchain/Sources/Blockchain/Types/State.swift @@ -1,19 +1,8 @@ import Codec +import Foundation import Utils public struct State: Sendable, Equatable, Codable { - public struct PrivilegedServiceIndices: Sendable, Equatable, Codable { - public var empower: ServiceIndex - public var assign: ServiceIndex - public var designate: ServiceIndex - - public init(empower: ServiceIndex, assign: ServiceIndex, designate: ServiceIndex) { - self.empower = empower - self.assign = assign - self.designate = designate - } - } - // α: The core αuthorizations pool. public var coreAuthorizationPool: ConfigFixedSizeArray< ConfigLimitedSizeArray< @@ -70,7 +59,7 @@ public struct State: Sendable, Equatable, Codable { > // χ: The privileged service indices. - public var privilegedServiceIndices: PrivilegedServiceIndices + public var privilegedServices: PrivilegedServices // ψ: past judgements public var judgements: JudgementsState @@ -112,7 +101,7 @@ public struct State: Sendable, Equatable, Codable { >, ProtocolConfig.TotalNumberOfCores >, - privilegedServiceIndices: PrivilegedServiceIndices, + privilegedServices: PrivilegedServices, judgements: JudgementsState, activityStatistics: ValidatorActivityStatistics ) { @@ -127,7 +116,7 @@ public struct State: Sendable, Equatable, Codable { self.reports = reports self.timeslot = timeslot self.authorizationQueue = authorizationQueue - self.privilegedServiceIndices = privilegedServiceIndices + self.privilegedServices = privilegedServices self.judgements = judgements self.activityStatistics = activityStatistics } @@ -157,10 +146,11 @@ extension State: Dummy { config: config, defaultValue: ConfigFixedSizeArray(config: config, defaultValue: Data32()) ), - privilegedServiceIndices: PrivilegedServiceIndices( + privilegedServices: PrivilegedServices( empower: ServiceIndex(), assign: ServiceIndex(), - designate: ServiceIndex() + designate: ServiceIndex(), + basicGas: [:] ), judgements: JudgementsState.dummy(config: config), activityStatistics: ValidatorActivityStatistics.dummy(config: config) @@ -217,3 +207,36 @@ extension State: Guaranteeing { judgements.punishSet } } + +struct DummyFunction: AccumulateFunction, OnTransferFunction { + func invoke( + config _: ProtocolConfigRef, + service _: ServiceIndex, + code _: Data, + serviceAccounts _: [ServiceIndex: ServiceAccount], + gas _: Gas, + arguments _: [AccumulateArguments] + ) throws -> (ctx: AccumlateResultContext, result: Data32?) { + fatalError("not implemented") + } + + func invoke( + config _: ProtocolConfigRef, + service _: ServiceIndex, + code _: Data, + serviceAccounts _: [ServiceIndex: ServiceAccount], + transfers _: [DeferredTransfers] + ) throws -> ServiceAccount { + fatalError("not implemented") + } +} + +extension State: Accumulation { + public var accumlateFunction: AccumulateFunction { + DummyFunction() + } + + public var onTransferFunction: OnTransferFunction { + DummyFunction() + } +} diff --git a/Blockchain/Sources/Blockchain/Types/WorkReport.swift b/Blockchain/Sources/Blockchain/Types/WorkReport.swift index 0db20500..d7bbfd28 100644 --- a/Blockchain/Sources/Blockchain/Types/WorkReport.swift +++ b/Blockchain/Sources/Blockchain/Types/WorkReport.swift @@ -15,8 +15,8 @@ public struct WorkReport: Sendable, Equatable, Codable { // a: authorizer hash public var authorizerHash: Data32 - // o: output - public var output: Data + // o: authorization output + public var authorizationOutput: Data // r: the results of the evaluation of each of the items in the package public var results: ConfigLimitedSizeArray< @@ -28,14 +28,14 @@ public struct WorkReport: Sendable, Equatable, Codable { public init( authorizerHash: Data32, coreIndex: CoreIndex, - output: Data, + authorizationOutput: Data, refinementContext: RefinementContext, packageSpecification: AvailabilitySpecifications, results: ConfigLimitedSizeArray ) { self.authorizerHash = authorizerHash self.coreIndex = coreIndex - self.output = output + self.authorizationOutput = authorizationOutput self.refinementContext = refinementContext self.packageSpecification = packageSpecification self.results = results @@ -48,7 +48,7 @@ extension WorkReport: Dummy { WorkReport( authorizerHash: Data32(), coreIndex: 0, - output: Data(), + authorizationOutput: Data(), refinementContext: RefinementContext.dummy(config: config), packageSpecification: AvailabilitySpecifications.dummy(config: config), results: try! ConfigLimitedSizeArray(config: config, defaultValue: WorkResult.dummy(config: config)) @@ -64,7 +64,8 @@ extension WorkReport { extension WorkReport: EncodedSize { public var encodedSize: Int { - authorizerHash.encodedSize + coreIndex.encodedSize + output.encodedSize + refinementContext.encodedSize + packageSpecification + authorizerHash.encodedSize + coreIndex.encodedSize + authorizationOutput.encodedSize + refinementContext + .encodedSize + packageSpecification .encodedSize + results.encodedSize } diff --git a/Blockchain/Sources/Blockchain/Types/WorkResult.swift b/Blockchain/Sources/Blockchain/Types/WorkResult.swift index c92773a8..a9822016 100644 --- a/Blockchain/Sources/Blockchain/Types/WorkResult.swift +++ b/Blockchain/Sources/Blockchain/Types/WorkResult.swift @@ -15,7 +15,7 @@ public struct WorkResult: Sendable, Equatable, Codable { // g: the gas prioritization ratio // used when determining how much gas should be allocated to execute of this item’s accumulate - public var gas: Gas + public var gasRatio: Gas // o: there is the output or error of the execution of the code o // which may be either an octet sequence in case it was successful, or a member of the set J, if not @@ -31,7 +31,7 @@ public struct WorkResult: Sendable, Equatable, Codable { self.serviceIndex = serviceIndex self.codeHash = codeHash self.payloadHash = payloadHash - self.gas = gas + gasRatio = gas self.output = output } } @@ -51,7 +51,7 @@ extension WorkResult: Dummy { extension WorkResult: EncodedSize { public var encodedSize: Int { - serviceIndex.encodedSize + codeHash.encodedSize + payloadHash.encodedSize + gas.encodedSize + output.encodedSize + serviceIndex.encodedSize + codeHash.encodedSize + payloadHash.encodedSize + gasRatio.encodedSize + output.encodedSize } public static var encodeedSizeHint: Int? { diff --git a/JAMTests/Tests/JAMTests/CodecTests.swift b/JAMTests/Tests/JAMTests/CodecTests.swift index 640906f6..acdbc44c 100644 --- a/JAMTests/Tests/JAMTests/CodecTests.swift +++ b/JAMTests/Tests/JAMTests/CodecTests.swift @@ -120,7 +120,7 @@ struct CodecTests { if value is WorkResult { return [ "code_hash": json["codeHash"]!, - "gas_ratio": json["gas"]!, + "gas_ratio": json["gasRatio"]!, "payload_hash": json["payloadHash"]!, "service": json["serviceIndex"]!, "result": json["output"]!["success"] == nil ? json["output"]! : [ @@ -167,7 +167,7 @@ struct CodecTests { "context": transform(json["refinementContext"]!, value: value.refinementContext), "core_index": json["coreIndex"]!, "authorizer_hash": json["authorizerHash"]!, - "auth_output": json["output"]!, + "auth_output": json["authorizationOutput"]!, "results": transform(json["results"]!, value: value.results), ].json } diff --git a/PolkaVM/Sources/PolkaVM/Instruction.swift b/PolkaVM/Sources/PolkaVM/Instruction.swift index 3761f88c..84f9bdab 100644 --- a/PolkaVM/Sources/PolkaVM/Instruction.swift +++ b/PolkaVM/Sources/PolkaVM/Instruction.swift @@ -39,6 +39,7 @@ extension Instruction { } catch let e as Memory.Error { // this passes test vector context.state.consumeGas(gasCost()) + logger.debug("memory error: \(e)") return .exit(.pageFault(e.address)) } catch let e { // other unknown errors diff --git a/PolkaVM/Sources/PolkaVM/Memory.swift b/PolkaVM/Sources/PolkaVM/Memory.swift index d9056e0d..38c9ac2d 100644 --- a/PolkaVM/Sources/PolkaVM/Memory.swift +++ b/PolkaVM/Sources/PolkaVM/Memory.swift @@ -1,5 +1,64 @@ import Foundation +public class MemorySection { + /// lowest address bound + public let startAddressBound: UInt32 + /// highest address bound + public let endAddressBound: UInt32 + /// is the section writable + public let isWritable: Bool + /// allocated data + fileprivate var data: Data + + /// current data end address, also the start address of empty space + public var currentEnd: UInt32 { + startAddressBound + UInt32(data.count) + } + + public init(startAddressBound: UInt32, endAddressBound: UInt32, data: Data, isWritable: Bool) { + self.startAddressBound = startAddressBound + self.endAddressBound = endAddressBound + self.data = data + self.isWritable = isWritable + } +} + +extension MemorySection { + public func read(address: UInt32, length: Int) throws(Memory.Error) -> Data { + guard startAddressBound <= address, address + UInt32(length) < endAddressBound else { + throw Memory.Error.pageFault(address) + } + let start = address - startAddressBound + let end = start + UInt32(length) + + let validCount = min(end, UInt32(data.count)) + let dataToRead = data[start ..< validCount] + + let zeroCount = max(0, Int(end - validCount)) + let zeros = Data(repeating: 0, count: zeroCount) + + return dataToRead + zeros + } + + public func write(address: UInt32, values: some Sequence) throws(Memory.Error) { + let valuesData = Data(values) + guard isWritable else { + throw Memory.Error.notWritable(address) + } + guard startAddressBound <= address, address + UInt32(valuesData.count) < endAddressBound else { + throw Memory.Error.notWritable(address) + } + + let start = address - startAddressBound + let end = start + UInt32(valuesData.count) + guard end < data.count else { + throw Memory.Error.notWritable(address) + } + + data[start ..< end] = valuesData + } +} + public class Memory { public enum Error: Swift.Error { case pageFault(UInt32) @@ -18,18 +77,43 @@ public class Memory { } } - private let pageMap: [(address: UInt32, length: UInt32, writable: Bool)] - private var chunks: [(address: UInt32, data: Data)] - private let heapStart: UInt32 - private var heapEnd: UInt32 // start idx of unallocated heap - private let heapLimit: UInt32 + // standard program sections + private var readOnly: MemorySection? + private var heap: MemorySection? + private var stack: MemorySection? + private var argument: MemorySection? + + // general program sections + private var memorySections: [MemorySection] = [] + /// General program init with a fixed page map and some initial data public init(pageMap: [(address: UInt32, length: UInt32, writable: Bool)], chunks: [(address: UInt32, data: Data)]) { - self.pageMap = pageMap - self.chunks = chunks - heapStart = pageMap.first(where: { $0.writable })?.address ?? 0 - heapLimit = UInt32.max - heapEnd = chunks.reduce(0) { max($0, $1.address + UInt32($1.data.count)) } + readOnly = nil + heap = nil + stack = nil + argument = nil + memorySections = [] + + let sortedPageMap = pageMap.sorted(by: { $0.address < $1.address }) + let sortedChunks = chunks.sorted(by: { $0.address < $1.address }) + + for (address, length, writable) in sortedPageMap { + var data = Data(repeating: 0, count: Int(length)) + if sortedChunks.count != 0 { + let chunkIndex = Memory.binarySearch(array: sortedChunks.map(\.address), value: address) + let chunk = sortedChunks[chunkIndex] + if address <= chunk.address, chunk.address + UInt32(chunk.data.count) <= address + length { + data = chunk.data + } + } + let section = MemorySection( + startAddressBound: address, + endAddressBound: address + length, + data: data, + isWritable: writable + ) + memorySections.append(section) + } } /// Standard Program init @@ -45,174 +129,107 @@ public class Memory { let argumentDataLen = UInt32(argumentData.count) let heapStart = 2 * ZQ + Q(readOnlyLen, config) - - pageMap = [ - (ZQ, readOnlyLen, false), - (ZQ + readOnlyLen, P(readOnlyLen, config) - readOnlyLen, false), - (heapStart, readWriteLen, true), // heap - (heapStart + readWriteLen, P(readWriteLen, config) + heapEmptyPagesSize - readWriteLen, true), // heap - (UInt32(config.pvmProgramInitStackBaseAddress) - P(stackSize, config), stackSize, true), // stack - (UInt32(config.pvmProgramInitInputStartAddress), argumentDataLen, false), // argument - (UInt32(config.pvmProgramInitInputStartAddress) + argumentDataLen, P(argumentDataLen, config) - argumentDataLen, false), - ] - - chunks = [ - (ZQ, readOnlyData), - (heapStart, readWriteData), - (UInt32(config.pvmProgramInitInputStartAddress), argumentData), - ] - - self.heapStart = heapStart - heapLimit = heapStart + P(readWriteLen, config) + heapEmptyPagesSize - heapEnd = heapStart + readWriteLen + let stackPageAlignedSize = P(stackSize, config) + + readOnly = MemorySection( + startAddressBound: ZQ, + endAddressBound: ZQ + P(readOnlyLen, config), + data: readWriteData, + isWritable: false + ) + heap = MemorySection( + startAddressBound: heapStart, + endAddressBound: heapStart + P(readWriteLen, config) + heapEmptyPagesSize, + data: readWriteData, + isWritable: true + ) + stack = MemorySection( + startAddressBound: UInt32(config.pvmProgramInitStackBaseAddress) - stackPageAlignedSize, + endAddressBound: UInt32(config.pvmProgramInitStackBaseAddress), + // TODO: check is this necessary + data: Data(repeating: 0, count: Int(stackPageAlignedSize)), + isWritable: true + ) + argument = MemorySection( + startAddressBound: UInt32(config.pvmProgramInitInputStartAddress), + endAddressBound: UInt32(config.pvmProgramInitInputStartAddress) + P(argumentDataLen, config), + data: argumentData, + isWritable: false + ) } - public func isWritable(address: UInt32) -> Bool { - // check heap range - guard heapStart <= address, address < heapLimit else { - return false - } - - // TODO: optimize - for page in pageMap { - if page.address <= address, address < page.address + page.length { - return page.writable + /// if value not in array, return the index of the previous element or 0 + static func binarySearch(array: [UInt32], value: UInt32) -> Int { + var low = 0 + var high = array.count - 1 + while low <= high { + let mid = (low + high) / 2 + if array[mid] < value { + low = mid + 1 + } else if array[mid] > value { + high = mid - 1 + } else { + return mid } } - - return false + return max(0, low - 1) } - public func read(address: UInt32) throws(Error) -> UInt8 { - // TODO: optimize this - // check for chunks - for chunk in chunks { - if chunk.address <= address, address < chunk.address + UInt32(chunk.data.count) { - return chunk.data[Int(address - chunk.address)] + private func getSection(forAddress address: UInt32) throws(Error) -> MemorySection { + if memorySections.count != 0 { + return memorySections[Memory.binarySearch(array: memorySections.map(\.startAddressBound), value: address)] + } else if let readOnly { + if address >= readOnly.startAddressBound, address < readOnly.endAddressBound { + return readOnly } - } - // check for page map - for page in pageMap { - if page.address <= address, address < page.address + page.length { - return 0 + } else if let heap { + if address >= heap.startAddressBound, address < heap.endAddressBound { + return heap + } + } else if let stack { + if address >= stack.startAddressBound, address < stack.endAddressBound { + return stack + } + } else if let argument { + if address >= argument.startAddressBound, address < argument.endAddressBound { + return argument } } throw Error.pageFault(address) } + public func read(address: UInt32) throws(Error) -> UInt8 { + try getSection(forAddress: address).read(address: address, length: 1).first ?? 0 + } + public func read(address: UInt32, length: Int) throws -> Data { - // TODO: optimize this - // check for chunks - for chunk in chunks { - if chunk.address <= address, address < chunk.address + UInt32(chunk.data.count) { - let startIndex = Int(address - chunk.address) - let endIndex = min(startIndex + length, chunk.data.endIndex) - let res = chunk.data[startIndex ..< endIndex] - let remaining = length - res.count - if remaining == 0 { - return res - } else { - let startAddress = chunk.address &+ UInt32(chunk.data.count) // wrapped add - let remainingData = try read(address: startAddress, length: remaining) - return res + remainingData - } - } - } - // check for page map - for page in pageMap { - if page.address <= address, address < page.address + page.length { - // TODO: handle reads that cross page boundaries - return Data(repeating: 0, count: length) - } - } - throw Error.pageFault(address) + try getSection(forAddress: address).read(address: address, length: length) } public func write(address: UInt32, value: UInt8) throws(Error) { - guard isWritable(address: address) else { - throw Error.notWritable(address) - } - - // TODO: optimize this - // check for chunks - for i in 0 ..< chunks.count { - var chunk = chunks[i] - if chunk.address <= address, address < chunk.address + UInt32(chunk.data.count) { - chunk.data[Int(address - chunk.address)] = value - chunks[i] = chunk - return - } - } - // check for page map - for page in pageMap { - if page.address <= address, address < page.address + page.length { - var newChunk = (address: address, data: Data(repeating: 0, count: Int(page.length))) - newChunk.data[Int(address - page.address)] = value - chunks.append(newChunk) - heapEnd = max(heapEnd, address + 1) - return - } - } - throw Error.notWritable(address) + try getSection(forAddress: address).write(address: address, values: Data([value])) } public func write(address: UInt32, values: some Sequence) throws(Error) { - guard isWritable(address: address) else { - throw Error.notWritable(address) - } - - // TODO: optimize this - // check for chunks - for i in 0 ..< chunks.count { - var chunk = chunks[i] - if chunk.address <= address, address < chunk.address + UInt32(chunk.data.count) { - var idx = Int(address - chunk.address) - for v in values { - if idx == chunk.data.endIndex { - chunk.data.append(v) - } else { - chunk.data[idx] = v - } - idx += 1 - } - chunks[i] = chunk - return - } - } - // check for page map - for page in pageMap { - if page.address <= address, address < page.address + page.length { - var newChunk = (address: address, data: Data(repeating: 0, count: Int(page.length))) - var idx = Int(address - page.address) - for v in values { - if idx == newChunk.data.endIndex { - throw Error.notWritable(address) - } else { - newChunk.data[idx] = v - } - idx += 1 - } - chunks.append(newChunk) - heapEnd = max(heapEnd, UInt32(idx)) - return - } - } - throw Error.notWritable(address) + try getSection(forAddress: address).write(address: address, values: values) } public func sbrk(_ increment: UInt32) throws -> UInt32 { - // TODO: optimize - for page in pageMap { - let pageEnd = page.address + page.length - if page.writable, heapEnd >= page.address, heapEnd + increment < pageEnd { - let newChunk = (address: heapEnd, data: Data(repeating: 0, count: Int(increment))) - chunks.append(newChunk) - heapEnd += increment - return heapEnd - } + var section: MemorySection + if let heap { + section = heap + } else if memorySections.count != 0 { + section = memorySections.last! + } else { + throw Error.pageFault(0) } - throw Error.outOfMemory(heapEnd) + let oldSectionEnd = section.currentEnd + guard section.isWritable, oldSectionEnd + increment < section.endAddressBound else { + throw Error.outOfMemory(oldSectionEnd) + } + section.data.append(Data(repeating: 0, count: Int(increment))) + return oldSectionEnd } } diff --git a/Utils/Sources/Utils/Extensions/Optional+Utils.swift b/Utils/Sources/Utils/Extensions/Optional+Utils.swift new file mode 100644 index 00000000..da92d52d --- /dev/null +++ b/Utils/Sources/Utils/Extensions/Optional+Utils.swift @@ -0,0 +1,19 @@ +public enum OptionalError: Swift.Error { + case nilValue +} + +extension Optional { + public func unwrap() throws -> Wrapped { + guard let self else { + throw OptionalError.nilValue + } + return self + } + + public func unwrap(orError: @autoclosure () -> E) throws(E) -> Wrapped { + guard let self else { + throw orError() + } + return self + } +} diff --git a/Utils/Sources/Utils/Read.swift b/Utils/Sources/Utils/Read.swift new file mode 100644 index 00000000..86498689 --- /dev/null +++ b/Utils/Sources/Utils/Read.swift @@ -0,0 +1,9 @@ +public protocol ReadInt { + associatedtype TConfig + static func read(config: TConfig) -> Int +} + +public protocol ReadUInt64 { + associatedtype TConfig + static func read(config: TConfig) -> UInt64 +} diff --git a/Utils/Sources/Utils/ReadInt.swift b/Utils/Sources/Utils/ReadInt.swift deleted file mode 100644 index cdf66e9a..00000000 --- a/Utils/Sources/Utils/ReadInt.swift +++ /dev/null @@ -1,4 +0,0 @@ -public protocol ReadInt { - associatedtype TConfig - static func read(config: TConfig) -> Int -}