Skip to content

API Reference

Detailed auto-generated reference for Accumulate Python Client.


Client

client

AccumulateClient

Client for interacting with the Accumulate JSON-RPC API with a persistent auto-incrementing request ID.

Source code in accumulate\api\client.py
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
class AccumulateClient:
    """Client for interacting with the Accumulate JSON-RPC API with a persistent auto-incrementing request ID."""

    def __init__(self, base_url: str = None):
        self.base_url = base_url or get_accumulate_rpc_url()
        self.transport = RoutedTransport(self.base_url)
        self._id_counter = load_counter()  # Load last used ID

    async def json_rpc_request(self, method: str, params: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
        """Send a JSON-RPC request with an auto-incrementing ID stored persistently."""
        rpc_id = self._id_counter
        self._id_counter += 1
        save_counter(self._id_counter)  # Save new counter value

        json_rpc_payload = {
            "jsonrpc": "2.0",
            "method": method,
            "params": params or {},
            "id": rpc_id,
        }

        logger.info(f"RPC Request: {json_rpc_payload}")

        try:
            response = await self.transport.send_request(endpoint="v3", method="POST", data=json_rpc_payload)

            logger.info(f"RPC Response: {response}")

            if "error" in response:
                raise AccumulateError(f"JSON-RPC request failed ({method}): {response['error'].get('message', 'Unknown error')}")

            return response.get("result", {})

        except Exception as e:
            logger.error(f"JSON-RPC request failed ({method}): {e}")
            raise AccumulateError(f"JSON-RPC request failed ({method}): {e}")


    async def submit(self, envelope: Dict[str, Any], verify: bool = True, wait: bool = True) -> Dict[str, Any]:
        """
        Submit a transaction to the Accumulate network.

        Args:
            envelope (Dict[str, Any]): The transaction envelope containing transactions and signatures.
            verify (bool, optional): If True, verifies the envelope before submission. Defaults to True.
            wait (bool, optional): If True, blocks until submission is confirmed or rejected. Defaults to True.

        Returns:
            Dict[str, Any]: The API response containing transaction details.
        """
        if not isinstance(envelope, dict):
            raise ValueError("Envelope must be a dictionary.")

        #  Ensure the envelope has "signatures" (FIRST in order)
        if "signatures" not in envelope or not isinstance(envelope["signatures"], list) or not envelope["signatures"]:
            raise ValueError("Envelope must contain at least one signature in a list.")

        #  Ensure "transaction" is correctly structured (SECOND in order)
        if "transaction" not in envelope or not isinstance(envelope["transaction"], list) or not envelope["transaction"]:
            raise ValueError("Envelope must contain at least one transaction in a list.")

        #  Validate transaction structure
        for txn in envelope["transaction"]:
            if not isinstance(txn, dict):
                raise ValueError("Each transaction must be a dictionary.")

            if "header" not in txn or not isinstance(txn["header"], dict):
                raise ValueError("Each transaction must contain a 'header' as a dictionary.")

            if "body" not in txn or not isinstance(txn["body"], dict):
                raise ValueError("Each transaction must contain a 'body' as a dictionary.")

            #  Ensure transactionHash (if exists) is properly formatted
            if "transactionHash" in txn and not isinstance(txn["transactionHash"], str):
                raise ValueError("Transaction hash must be a raw hex string.")

        #  Order envelope fields correctly
        ordered_envelope = {
            "signatures": envelope["signatures"],  # Signatures First
            "transaction": envelope["transaction"],  # Transactions Second
        }

        #  Include messages if they exist (LAST in order)
        if "messages" in envelope and isinstance(envelope["messages"], list):
            ordered_envelope["messages"] = envelope["messages"]

        params = {
            "envelope": ordered_envelope,
            "verify": verify,
            "wait": wait,
        }

        #  Debugging Before JSON Serialization
        for sig in envelope["signatures"]:
            if inspect.iscoroutine(sig):
                raise RuntimeError(f" Signature contains an unawaited coroutine: {sig}")

        json_params = json.dumps(params)  # Convert Python dict to JSON string
        formatted_params = json.loads(json_params)  # Convert JSON string back to dict (ensures double quotes)

        #  Log the final envelope before submission
        logger.info(" Debug: Final Submission Payload")
        logger.info(json.dumps(formatted_params, indent=2))

        return await self.json_rpc_request("submit", formatted_params)


    async def validate(self, envelope: Dict[str, Any], full: bool = False) -> Dict[str, Any]:
        """Validate a transaction envelope against the Accumulate network using JSON-RPC"""
        params = {"envelope": envelope, "full": full}
        return await self.json_rpc_request("validate", params)


    async def query_block(self, block_type: str, index: Optional[int] = None, start: Optional[int] = None, count: Optional[int] = None) -> dict:
        """
        Query a minor or major block.

        Args:
            block_type (str): Either "minor" or "major"
            index (Optional[int]): Block index (if querying a specific block)
            start (Optional[int]): Start index for range queries
            count (Optional[int]): Number of blocks to retrieve in range queries

        Returns:
            dict: JSON response from the API
        """
        if block_type not in ["minor", "major"]:
            raise ValueError("Invalid block type. Must be 'minor' or 'major'.")

        params = {}

        if index is not None:
            #  Query a specific minor or major block
            url = f"{self.transport.base_url}/block/{block_type}/{index}"

            if block_type == "major":
                #  Fetch the first 3 minor blocks within the major block
                params["minor_start"] = 0
                params["minor_count"] = 3
                params["omit_empty"] = True  # Exclude empty minor blocks

        else:
            #  Query a block range
            url = f"{self.transport.base_url}/block/{block_type}"
            if start is not None:
                params["start"] = start
            if count is not None:
                params["count"] = count

        logger.info(f" HTTP Request: GET {url} with params {params}")

        response = await self.transport.send_request(endpoint=url, method="GET", params=params)

        if not response:
            logger.error(f" API request returned no response.")
            raise AccumulateError(f"Block query failed: No response received.")

        if response.get("error"):
            logger.error(f" API request failed: {response['error']}")
            raise AccumulateError(f"Block query failed: {response['error']['message']}")

        return response


    async def query(self, scope: str, query: Query) -> Record:
        """Submit a query to the Accumulate network using JSON-RPC."""
        if not query.is_valid():
            raise ValueError("Invalid query.")

        # Convert query to dictionary
        query_dict = query.to_dict()
        query_dict["queryType"] = query.query_type.to_rpc_format()

        params = {"scope": scope, "query": query_dict}

        #  DEBUG: Log raw API request
        logger.debug(f" Sending Query Request: {params}")

        response = await self.json_rpc_request("query", params)

        #  DEBUG: Log raw API response
        logger.debug(f" Raw API Response: {response} (Type: {type(response)})")

        #  If response is a string, try parsing it as JSON
        if isinstance(response, str):
            try:
                response = json.loads(response)
                logger.debug(f" Decoded String Response: {response}")
            except json.JSONDecodeError:
                raise AccumulateError(f"API returned an invalid JSON string: {response}")

        # Ensure response is a dictionary
        if not isinstance(response, dict):
            raise AccumulateError(f"Unexpected API response format: {response} (type: {type(response)})")

        #  Convert API response keys from camelCase to snake_case
        response = {camel_to_snake(k): v for k, v in response.items()}

        if "record_type" not in response:
            raise AccumulateError(f"Unexpected response format: {response}")

        record_type = response["record_type"]

        #  Ensure consistency with RecordType enum
        record_mapping = {
            RecordType.ACCOUNT.name.lower(): AccountRecord,
            RecordType.CHAIN.name.lower(): ChainRecord,
            RecordType.MESSAGE.name.lower(): MessageRecord,
            RecordType.CHAIN_ENTRY.name.lower(): ChainEntryRecord,
            RecordType.KEY.name.lower(): KeyRecord,
            RecordType.SIGNATURE_SET.name.lower(): SignatureSetRecord,
            RecordType.URL.name.lower(): UrlRecord,
            RecordType.TX_ID.name.lower(): TxIDRecord,
            RecordType.RANGE.name.lower(): RecordRange,
            "directory": RecordRange,  # Some APIs might return this as 'directory'
        }

        record_cls = record_mapping.get(record_type, Record)  # Default to Record if unknown

        #  Ensure return type is correct
        record_obj = record_cls.from_dict(response)
        logger.debug(f" Processed Query Response: {record_obj} (Type: {type(record_obj)})")

        return record_obj  # This should return an object with is_valid()


    async def search(self, account_id: str, search_type: str, value: str, extra_params: Optional[Dict[str, Any]] = None) -> dict:
        """
        Search an account for an anchor, public key, or delegate using JSON-RPC.

        Args:
            account_id (str): The account ID to search within.
            search_type (str): The type of search. Must be 'anchor', 'publicKey', or 'delegate'.
            value (str): The value to search for (anchor hash, public key, or delegate URL).
            extra_params (Optional[Dict[str, Any]]): Additional query parameters.

        Returns:
            dict: JSON response from the API.
        """
        if search_type not in ["anchor", "publicKey", "delegate"]:
            raise ValueError("Invalid search type. Must be 'anchor', 'publicKey', or 'delegate'.")

        #  Ensure correct JSON-RPC format
        params = {
            "scope": account_id,
            "query": {
                "queryType": search_type,
                "value": value
            }
        }

        #  Merge optional parameters if provided
        if extra_params:
            params["query"].update(extra_params)

        logger.info(f" RPC Request: {params}")

        response = await self.json_rpc_request("search", params)

        if not response:
            logger.error(f" API request returned no response.")
            raise AccumulateError(f"Search query failed: No response received.")

        return response


    async def network_status(self) -> Dict[str, Any]:
        """
        Fetch the current network status from the Accumulate blockchain.

        Returns:
            Dict[str, Any]: JSON response containing network status details.
        """
        try:
            response = await self.json_rpc_request("network-status")

            if not response:
                logger.error("Network status query returned no response.")
                raise AccumulateError("Network status query failed: No response received.")

            logger.info(f"Network Status Response: {response}")
            return response

        except Exception as e:
            logger.error(f"Failed to fetch network status: {e}")
            raise AccumulateError(f"Network status query failed: {e}")


    async def faucet(self, account: str, token_url: Optional[str] = None) -> Submission:
        """Request tokens from the Accumulate faucet using JSON-RPC"""
        if not account:
            raise ValueError("Account URL must be provided")

        params = {"account": account}
        if token_url:
            params["token"] = token_url

        response = await self.json_rpc_request("faucet", params)
        return Submission(**response)

    async def find_service(self, options: FindServiceOptions) -> List[FindServiceResult]:
        """Find available services on the Accumulate network using JSON-RPC"""
        params = options.to_dict()  # Converts to correct JSON-RPC format
        response = await self.json_rpc_request("find-service", params)

        return [FindServiceResult(
            peer_id=res.get("peer_id", ""),
            status=res.get("status", ""),
            addresses=res.get("addresses", [])
        ) for res in response]

    async def metrics(self) -> Dict[str, Any]:
        """Retrieve network metrics such as transactions per second using JSON-RPC"""
        return await self.json_rpc_request("metrics")

    async def list_snapshots(self) -> List[Dict[str, Any]]:
        """List available blockchain snapshots using JSON-RPC"""
        return await self.json_rpc_request("list-snapshots")

    async def close(self):
        """Close the transport connection"""
        logger.debug("🔌 Closing AccumulateClient transport connection")
        await self.transport.close()

close() async

Close the transport connection

Source code in accumulate\api\client.py
361
362
363
364
async def close(self):
    """Close the transport connection"""
    logger.debug("🔌 Closing AccumulateClient transport connection")
    await self.transport.close()

faucet(account, token_url=None) async

Request tokens from the Accumulate faucet using JSON-RPC

Source code in accumulate\api\client.py
330
331
332
333
334
335
336
337
338
339
340
async def faucet(self, account: str, token_url: Optional[str] = None) -> Submission:
    """Request tokens from the Accumulate faucet using JSON-RPC"""
    if not account:
        raise ValueError("Account URL must be provided")

    params = {"account": account}
    if token_url:
        params["token"] = token_url

    response = await self.json_rpc_request("faucet", params)
    return Submission(**response)

find_service(options) async

Find available services on the Accumulate network using JSON-RPC

Source code in accumulate\api\client.py
342
343
344
345
346
347
348
349
350
351
async def find_service(self, options: FindServiceOptions) -> List[FindServiceResult]:
    """Find available services on the Accumulate network using JSON-RPC"""
    params = options.to_dict()  # Converts to correct JSON-RPC format
    response = await self.json_rpc_request("find-service", params)

    return [FindServiceResult(
        peer_id=res.get("peer_id", ""),
        status=res.get("status", ""),
        addresses=res.get("addresses", [])
    ) for res in response]

json_rpc_request(method, params=None) async

Send a JSON-RPC request with an auto-incrementing ID stored persistently.

Source code in accumulate\api\client.py
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
async def json_rpc_request(self, method: str, params: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
    """Send a JSON-RPC request with an auto-incrementing ID stored persistently."""
    rpc_id = self._id_counter
    self._id_counter += 1
    save_counter(self._id_counter)  # Save new counter value

    json_rpc_payload = {
        "jsonrpc": "2.0",
        "method": method,
        "params": params or {},
        "id": rpc_id,
    }

    logger.info(f"RPC Request: {json_rpc_payload}")

    try:
        response = await self.transport.send_request(endpoint="v3", method="POST", data=json_rpc_payload)

        logger.info(f"RPC Response: {response}")

        if "error" in response:
            raise AccumulateError(f"JSON-RPC request failed ({method}): {response['error'].get('message', 'Unknown error')}")

        return response.get("result", {})

    except Exception as e:
        logger.error(f"JSON-RPC request failed ({method}): {e}")
        raise AccumulateError(f"JSON-RPC request failed ({method}): {e}")

list_snapshots() async

List available blockchain snapshots using JSON-RPC

Source code in accumulate\api\client.py
357
358
359
async def list_snapshots(self) -> List[Dict[str, Any]]:
    """List available blockchain snapshots using JSON-RPC"""
    return await self.json_rpc_request("list-snapshots")

metrics() async

Retrieve network metrics such as transactions per second using JSON-RPC

Source code in accumulate\api\client.py
353
354
355
async def metrics(self) -> Dict[str, Any]:
    """Retrieve network metrics such as transactions per second using JSON-RPC"""
    return await self.json_rpc_request("metrics")

network_status() async

Fetch the current network status from the Accumulate blockchain.

Returns:

Type Description
Dict[str, Any]

Dict[str, Any]: JSON response containing network status details.

Source code in accumulate\api\client.py
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
async def network_status(self) -> Dict[str, Any]:
    """
    Fetch the current network status from the Accumulate blockchain.

    Returns:
        Dict[str, Any]: JSON response containing network status details.
    """
    try:
        response = await self.json_rpc_request("network-status")

        if not response:
            logger.error("Network status query returned no response.")
            raise AccumulateError("Network status query failed: No response received.")

        logger.info(f"Network Status Response: {response}")
        return response

    except Exception as e:
        logger.error(f"Failed to fetch network status: {e}")
        raise AccumulateError(f"Network status query failed: {e}")

query(scope, query) async

Submit a query to the Accumulate network using JSON-RPC.

Source code in accumulate\api\client.py
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
async def query(self, scope: str, query: Query) -> Record:
    """Submit a query to the Accumulate network using JSON-RPC."""
    if not query.is_valid():
        raise ValueError("Invalid query.")

    # Convert query to dictionary
    query_dict = query.to_dict()
    query_dict["queryType"] = query.query_type.to_rpc_format()

    params = {"scope": scope, "query": query_dict}

    #  DEBUG: Log raw API request
    logger.debug(f" Sending Query Request: {params}")

    response = await self.json_rpc_request("query", params)

    #  DEBUG: Log raw API response
    logger.debug(f" Raw API Response: {response} (Type: {type(response)})")

    #  If response is a string, try parsing it as JSON
    if isinstance(response, str):
        try:
            response = json.loads(response)
            logger.debug(f" Decoded String Response: {response}")
        except json.JSONDecodeError:
            raise AccumulateError(f"API returned an invalid JSON string: {response}")

    # Ensure response is a dictionary
    if not isinstance(response, dict):
        raise AccumulateError(f"Unexpected API response format: {response} (type: {type(response)})")

    #  Convert API response keys from camelCase to snake_case
    response = {camel_to_snake(k): v for k, v in response.items()}

    if "record_type" not in response:
        raise AccumulateError(f"Unexpected response format: {response}")

    record_type = response["record_type"]

    #  Ensure consistency with RecordType enum
    record_mapping = {
        RecordType.ACCOUNT.name.lower(): AccountRecord,
        RecordType.CHAIN.name.lower(): ChainRecord,
        RecordType.MESSAGE.name.lower(): MessageRecord,
        RecordType.CHAIN_ENTRY.name.lower(): ChainEntryRecord,
        RecordType.KEY.name.lower(): KeyRecord,
        RecordType.SIGNATURE_SET.name.lower(): SignatureSetRecord,
        RecordType.URL.name.lower(): UrlRecord,
        RecordType.TX_ID.name.lower(): TxIDRecord,
        RecordType.RANGE.name.lower(): RecordRange,
        "directory": RecordRange,  # Some APIs might return this as 'directory'
    }

    record_cls = record_mapping.get(record_type, Record)  # Default to Record if unknown

    #  Ensure return type is correct
    record_obj = record_cls.from_dict(response)
    logger.debug(f" Processed Query Response: {record_obj} (Type: {type(record_obj)})")

    return record_obj  # This should return an object with is_valid()

query_block(block_type, index=None, start=None, count=None) async

Query a minor or major block.

Parameters:

Name Type Description Default
block_type str

Either "minor" or "major"

required
index Optional[int]

Block index (if querying a specific block)

None
start Optional[int]

Start index for range queries

None
count Optional[int]

Number of blocks to retrieve in range queries

None

Returns:

Name Type Description
dict dict

JSON response from the API

Source code in accumulate\api\client.py
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
async def query_block(self, block_type: str, index: Optional[int] = None, start: Optional[int] = None, count: Optional[int] = None) -> dict:
    """
    Query a minor or major block.

    Args:
        block_type (str): Either "minor" or "major"
        index (Optional[int]): Block index (if querying a specific block)
        start (Optional[int]): Start index for range queries
        count (Optional[int]): Number of blocks to retrieve in range queries

    Returns:
        dict: JSON response from the API
    """
    if block_type not in ["minor", "major"]:
        raise ValueError("Invalid block type. Must be 'minor' or 'major'.")

    params = {}

    if index is not None:
        #  Query a specific minor or major block
        url = f"{self.transport.base_url}/block/{block_type}/{index}"

        if block_type == "major":
            #  Fetch the first 3 minor blocks within the major block
            params["minor_start"] = 0
            params["minor_count"] = 3
            params["omit_empty"] = True  # Exclude empty minor blocks

    else:
        #  Query a block range
        url = f"{self.transport.base_url}/block/{block_type}"
        if start is not None:
            params["start"] = start
        if count is not None:
            params["count"] = count

    logger.info(f" HTTP Request: GET {url} with params {params}")

    response = await self.transport.send_request(endpoint=url, method="GET", params=params)

    if not response:
        logger.error(f" API request returned no response.")
        raise AccumulateError(f"Block query failed: No response received.")

    if response.get("error"):
        logger.error(f" API request failed: {response['error']}")
        raise AccumulateError(f"Block query failed: {response['error']['message']}")

    return response

search(account_id, search_type, value, extra_params=None) async

Search an account for an anchor, public key, or delegate using JSON-RPC.

Parameters:

Name Type Description Default
account_id str

The account ID to search within.

required
search_type str

The type of search. Must be 'anchor', 'publicKey', or 'delegate'.

required
value str

The value to search for (anchor hash, public key, or delegate URL).

required
extra_params Optional[Dict[str, Any]]

Additional query parameters.

None

Returns:

Name Type Description
dict dict

JSON response from the API.

Source code in accumulate\api\client.py
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
async def search(self, account_id: str, search_type: str, value: str, extra_params: Optional[Dict[str, Any]] = None) -> dict:
    """
    Search an account for an anchor, public key, or delegate using JSON-RPC.

    Args:
        account_id (str): The account ID to search within.
        search_type (str): The type of search. Must be 'anchor', 'publicKey', or 'delegate'.
        value (str): The value to search for (anchor hash, public key, or delegate URL).
        extra_params (Optional[Dict[str, Any]]): Additional query parameters.

    Returns:
        dict: JSON response from the API.
    """
    if search_type not in ["anchor", "publicKey", "delegate"]:
        raise ValueError("Invalid search type. Must be 'anchor', 'publicKey', or 'delegate'.")

    #  Ensure correct JSON-RPC format
    params = {
        "scope": account_id,
        "query": {
            "queryType": search_type,
            "value": value
        }
    }

    #  Merge optional parameters if provided
    if extra_params:
        params["query"].update(extra_params)

    logger.info(f" RPC Request: {params}")

    response = await self.json_rpc_request("search", params)

    if not response:
        logger.error(f" API request returned no response.")
        raise AccumulateError(f"Search query failed: No response received.")

    return response

submit(envelope, verify=True, wait=True) async

Submit a transaction to the Accumulate network.

Parameters:

Name Type Description Default
envelope Dict[str, Any]

The transaction envelope containing transactions and signatures.

required
verify bool

If True, verifies the envelope before submission. Defaults to True.

True
wait bool

If True, blocks until submission is confirmed or rejected. Defaults to True.

True

Returns:

Type Description
Dict[str, Any]

Dict[str, Any]: The API response containing transaction details.

Source code in accumulate\api\client.py
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
async def submit(self, envelope: Dict[str, Any], verify: bool = True, wait: bool = True) -> Dict[str, Any]:
    """
    Submit a transaction to the Accumulate network.

    Args:
        envelope (Dict[str, Any]): The transaction envelope containing transactions and signatures.
        verify (bool, optional): If True, verifies the envelope before submission. Defaults to True.
        wait (bool, optional): If True, blocks until submission is confirmed or rejected. Defaults to True.

    Returns:
        Dict[str, Any]: The API response containing transaction details.
    """
    if not isinstance(envelope, dict):
        raise ValueError("Envelope must be a dictionary.")

    #  Ensure the envelope has "signatures" (FIRST in order)
    if "signatures" not in envelope or not isinstance(envelope["signatures"], list) or not envelope["signatures"]:
        raise ValueError("Envelope must contain at least one signature in a list.")

    #  Ensure "transaction" is correctly structured (SECOND in order)
    if "transaction" not in envelope or not isinstance(envelope["transaction"], list) or not envelope["transaction"]:
        raise ValueError("Envelope must contain at least one transaction in a list.")

    #  Validate transaction structure
    for txn in envelope["transaction"]:
        if not isinstance(txn, dict):
            raise ValueError("Each transaction must be a dictionary.")

        if "header" not in txn or not isinstance(txn["header"], dict):
            raise ValueError("Each transaction must contain a 'header' as a dictionary.")

        if "body" not in txn or not isinstance(txn["body"], dict):
            raise ValueError("Each transaction must contain a 'body' as a dictionary.")

        #  Ensure transactionHash (if exists) is properly formatted
        if "transactionHash" in txn and not isinstance(txn["transactionHash"], str):
            raise ValueError("Transaction hash must be a raw hex string.")

    #  Order envelope fields correctly
    ordered_envelope = {
        "signatures": envelope["signatures"],  # Signatures First
        "transaction": envelope["transaction"],  # Transactions Second
    }

    #  Include messages if they exist (LAST in order)
    if "messages" in envelope and isinstance(envelope["messages"], list):
        ordered_envelope["messages"] = envelope["messages"]

    params = {
        "envelope": ordered_envelope,
        "verify": verify,
        "wait": wait,
    }

    #  Debugging Before JSON Serialization
    for sig in envelope["signatures"]:
        if inspect.iscoroutine(sig):
            raise RuntimeError(f" Signature contains an unawaited coroutine: {sig}")

    json_params = json.dumps(params)  # Convert Python dict to JSON string
    formatted_params = json.loads(json_params)  # Convert JSON string back to dict (ensures double quotes)

    #  Log the final envelope before submission
    logger.info(" Debug: Final Submission Payload")
    logger.info(json.dumps(formatted_params, indent=2))

    return await self.json_rpc_request("submit", formatted_params)

validate(envelope, full=False) async

Validate a transaction envelope against the Accumulate network using JSON-RPC

Source code in accumulate\api\client.py
149
150
151
152
async def validate(self, envelope: Dict[str, Any], full: bool = False) -> Dict[str, Any]:
    """Validate a transaction envelope against the Accumulate network using JSON-RPC"""
    params = {"envelope": envelope, "full": full}
    return await self.json_rpc_request("validate", params)

load_counter()

Load the last used request ID from file or start at 1 if missing.

Source code in accumulate\api\client.py
30
31
32
33
34
35
def load_counter() -> int:
    """Load the last used request ID from file or start at 1 if missing."""
    if os.path.exists(ID_COUNTER_FILE):
        with open(ID_COUNTER_FILE, "r") as f:
            return json.load(f).get("id", 1)
    return 1

save_counter(counter)

Save the current counter value to the file.

Source code in accumulate\api\client.py
37
38
39
40
def save_counter(counter: int):
    """Save the current counter value to the file."""
    with open(ID_COUNTER_FILE, "w") as f:
        json.dump({"id": counter}, f)

API Core (Context, Transport, Querier, Exceptions)

context

RequestContext

Represents the context for a request, including metadata and optional cancellation tokens.

Source code in accumulate\api\context.py
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
class RequestContext:
    """
    Represents the context for a request, including metadata and optional cancellation tokens.
    """

    def __init__(self, metadata: Dict[str, Any] = None):
        """
        Initialize the RequestContext with optional metadata.
        :param metadata: A dictionary containing request-specific metadata.
        """
        self.metadata = metadata or {}

    def get_metadata(self, key: str) -> Any:
        """
        Retrieve a value from the context metadata.
        :param key: Metadata key.
        :return: Metadata value or None if the key does not exist.
        """
        return self.metadata.get(key)

    def set_metadata(self, key: str, value: Any):
        """
        Set a value in the context metadata.
        :param key: Metadata key.
        :param value: Metadata value.
        """
        self.metadata[key] = value

__init__(metadata=None)

Initialize the RequestContext with optional metadata. :param metadata: A dictionary containing request-specific metadata.

Source code in accumulate\api\context.py
11
12
13
14
15
16
def __init__(self, metadata: Dict[str, Any] = None):
    """
    Initialize the RequestContext with optional metadata.
    :param metadata: A dictionary containing request-specific metadata.
    """
    self.metadata = metadata or {}

get_metadata(key)

Retrieve a value from the context metadata. :param key: Metadata key. :return: Metadata value or None if the key does not exist.

Source code in accumulate\api\context.py
18
19
20
21
22
23
24
def get_metadata(self, key: str) -> Any:
    """
    Retrieve a value from the context metadata.
    :param key: Metadata key.
    :return: Metadata value or None if the key does not exist.
    """
    return self.metadata.get(key)

set_metadata(key, value)

Set a value in the context metadata. :param key: Metadata key. :param value: Metadata value.

Source code in accumulate\api\context.py
26
27
28
29
30
31
32
def set_metadata(self, key: str, value: Any):
    """
    Set a value in the context metadata.
    :param key: Metadata key.
    :param value: Metadata value.
    """
    self.metadata[key] = value

transport

RoutedTransport

Handles HTTP transport for the Accumulate RPC API.

Source code in accumulate\api\transport.py
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
class RoutedTransport:
    """Handles HTTP transport for the Accumulate RPC API."""

    def __init__(self, base_url: str, timeout: int = 15):
        """
        Initialize the transport layer.

        Args:
            base_url (str): The base URL of the Accumulate network (e.g., mainnet or testnet).
            timeout (int): Request timeout in seconds.
        """
        self.base_url = base_url
        self.client = httpx.AsyncClient(base_url=base_url, timeout=timeout)

    async def send_request(
        self, endpoint: str, method: str = "GET", params: Dict[str, Any] = None, data: Dict[str, Any] = None, debug: bool = False
    ) -> Dict[str, Any]:
        """
        Print the exact JSON-RPC request without sending it if debug mode is enabled.

        Args:
            endpoint (str): The API endpoint (e.g., "query/{scope}").
            method (str): The HTTP method (e.g., "GET", "POST").
            params (Dict[str, Any], optional): Query parameters for the request.
            data (Dict[str, Any], optional): JSON body for the request.
            debug (bool): If True, print the JSON request instead of sending it.

        Returns:
            Dict[str, Any]: The printed request data as a dictionary.

        Raises:
            Exception: If the request fails or the response contains an error.
        """

        # Construct the exact JSON-RPC request
        rpc_request = {
            "method": method,
            "url": f"{self.base_url}/{endpoint}",
            "params": params,
            "json": data
        }

        # If debug mode is enabled, print the JSON and return without sending
        if debug:
            formatted_json = json.dumps(rpc_request, indent=4)  # Pretty-print JSON
            logger.info(f" RPC Request (Not Sent):\n{formatted_json}")
            return rpc_request  # Return the request object instead of sending

        try:
            response = await self.client.request(
                method=method,
                url=endpoint,
                params=params,
                json=data,
            )
            response.raise_for_status()
            return response.json()

        except httpx.RequestError as e:
            logger.error(f" Request failed: {e}")
            raise Exception(f"Request failed: {e}")

        except httpx.HTTPStatusError as e:
            logger.error(f" HTTP error {e.response.status_code}: {e.response.text}")
            raise Exception(f"HTTP error: {e.response.status_code} - {e.response.text}")

        except ValueError as e:
            logger.error(f" Invalid JSON response: {e}")
            raise Exception(f"Invalid JSON response: {e}")

    async def close(self):
        """Close the transport client."""
        await self.client.aclose()

__init__(base_url, timeout=15)

Initialize the transport layer.

Parameters:

Name Type Description Default
base_url str

The base URL of the Accumulate network (e.g., mainnet or testnet).

required
timeout int

Request timeout in seconds.

15
Source code in accumulate\api\transport.py
15
16
17
18
19
20
21
22
23
24
def __init__(self, base_url: str, timeout: int = 15):
    """
    Initialize the transport layer.

    Args:
        base_url (str): The base URL of the Accumulate network (e.g., mainnet or testnet).
        timeout (int): Request timeout in seconds.
    """
    self.base_url = base_url
    self.client = httpx.AsyncClient(base_url=base_url, timeout=timeout)

close() async

Close the transport client.

Source code in accumulate\api\transport.py
82
83
84
async def close(self):
    """Close the transport client."""
    await self.client.aclose()

send_request(endpoint, method='GET', params=None, data=None, debug=False) async

Print the exact JSON-RPC request without sending it if debug mode is enabled.

Parameters:

Name Type Description Default
endpoint str

The API endpoint (e.g., "query/{scope}").

required
method str

The HTTP method (e.g., "GET", "POST").

'GET'
params Dict[str, Any]

Query parameters for the request.

None
data Dict[str, Any]

JSON body for the request.

None
debug bool

If True, print the JSON request instead of sending it.

False

Returns:

Type Description
Dict[str, Any]

Dict[str, Any]: The printed request data as a dictionary.

Raises:

Type Description
Exception

If the request fails or the response contains an error.

Source code in accumulate\api\transport.py
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
async def send_request(
    self, endpoint: str, method: str = "GET", params: Dict[str, Any] = None, data: Dict[str, Any] = None, debug: bool = False
) -> Dict[str, Any]:
    """
    Print the exact JSON-RPC request without sending it if debug mode is enabled.

    Args:
        endpoint (str): The API endpoint (e.g., "query/{scope}").
        method (str): The HTTP method (e.g., "GET", "POST").
        params (Dict[str, Any], optional): Query parameters for the request.
        data (Dict[str, Any], optional): JSON body for the request.
        debug (bool): If True, print the JSON request instead of sending it.

    Returns:
        Dict[str, Any]: The printed request data as a dictionary.

    Raises:
        Exception: If the request fails or the response contains an error.
    """

    # Construct the exact JSON-RPC request
    rpc_request = {
        "method": method,
        "url": f"{self.base_url}/{endpoint}",
        "params": params,
        "json": data
    }

    # If debug mode is enabled, print the JSON and return without sending
    if debug:
        formatted_json = json.dumps(rpc_request, indent=4)  # Pretty-print JSON
        logger.info(f" RPC Request (Not Sent):\n{formatted_json}")
        return rpc_request  # Return the request object instead of sending

    try:
        response = await self.client.request(
            method=method,
            url=endpoint,
            params=params,
            json=data,
        )
        response.raise_for_status()
        return response.json()

    except httpx.RequestError as e:
        logger.error(f" Request failed: {e}")
        raise Exception(f"Request failed: {e}")

    except httpx.HTTPStatusError as e:
        logger.error(f" HTTP error {e.response.status_code}: {e.response.text}")
        raise Exception(f"HTTP error: {e.response.status_code} - {e.response.text}")

    except ValueError as e:
        logger.error(f" Invalid JSON response: {e}")
        raise Exception(f"Invalid JSON response: {e}")

querier

Querier

Handles queries related to accounts, transactions, records, and events.

Source code in accumulate\api\querier.py
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
class Querier:
    """Handles queries related to accounts, transactions, records, and events."""

    def __init__(self, transport):
        self.transport = transport
        self.logger = logging.getLogger(__name__)
        logging.basicConfig(level=logging.DEBUG)

    async def query(self, ctx: RequestContext, scope: str, query: Query, result_type: Type[T]) -> T:
        """Submit a generic query to the Accumulate network."""
        if not query.is_valid():
            self.logger.error("Invalid query: %s", query)
            raise ValueError("Invalid query.")

        message = {
            "action": "Query",
            "params": {"scope": scope, "type": query.query_type.name, "params": query.to_dict()},
        }

        try:
            self.logger.debug("Sending query: %s", message)
            response = await self.transport.send_message(ctx, message)
            self.logger.debug("Query response: %s", response)
            return self._deserialize_response(response, result_type)
        except Exception as e:
            error_message = f"Query failed: {e}"
            self.logger.error(error_message)
            raise AccumulateError(error_message) from e

    async def query_record(self, ctx: RequestContext, scope: URL, query: Query, result_type: Type[T]) -> T:
        """Submit a query for a specific record type."""
        try:
            self.logger.debug("Querying record for scope: %s, query: %s, result_type: %s", scope, query, result_type)
            response = await self.query(ctx, str(scope), query, result_type)  # Pass result_type here
            self.logger.debug("Record query response: %s", response)
            return self._deserialize_response(response, result_type)
        except Exception as e:
            self.logger.error("Error in query_record: %s", e)
            raise

    async def query_events(self, ctx: RequestContext, scope: URL, query: Query) -> List[Record]:
        """Query for events."""
        try:
            self.logger.debug("Querying events for scope: %s, query: %s", scope, query)
            response = await self.query(ctx, str(scope), query, RecordRange)
            self.logger.debug("Event query response: %s", response)

            events = []
            for record in response.records:
                if not isinstance(record, Record):
                    raise AccumulateError(f"Unexpected record type in events: {type(record)}")

                event_type = record.record_type
                event_class_path = EVENT_TYPE_MAPPING.get(event_type)
                if not event_class_path:
                    # Log and skip unknown event types
                    self.logger.warning(f"Skipping unknown event type: {event_type}")
                    continue

                # Dynamically resolve the class
                module_name, class_name = event_class_path.rsplit(".", 1)
                module = importlib.import_module(module_name)
                event_class = getattr(module, class_name)

                events.append(event_class.from_dict(record.to_dict()))

            return events
        except Exception as e:
            error_message = f"Error in query_events: {e}"
            self.logger.error(error_message)
            raise AccumulateError(error_message) from e


    def _deserialize_response(self, data: Any, result_type: Type[T]) -> T:
        """Deserialize a response into the expected result type."""
        try:
            self.logger.debug("Deserializing response: %s into type: %s", data, result_type)

            if issubclass(result_type, RecordRange):
                if not isinstance(data, RecordRange):
                    raise AccumulateError(f"Expected RecordRange, got {type(data)}") #

                # Use range_of for validation
                return range_of(data, getattr(data, "item_type", Record))

            if not isinstance(data, result_type):
                raise AccumulateError(f"Expected {result_type}, got {type(data)}")
            return data
        except Exception as e:
            self.logger.error("Deserialization failed: %s", e)
            raise AccumulateError(f"Deserialization failed: {e}")

    async def query_generic(self, ctx: RequestContext, scope: URL, query: Query, result_type: Type[T]) -> T:
        """Generic query handler."""
        response = await self.query(ctx, str(scope), query, result_type)  # Pass result_type here
        return self._deserialize_response(response, result_type)

    async def query_account(self, ctx: RequestContext, account: URL, query: Query) -> AccountRecord:
        """Query account details."""
        result = await self.query_record(ctx, account, query, AccountRecord) #
        if not isinstance(result, AccountRecord): #
            raise AccumulateError(f"Unexpected response type: {type(result)} (expected AccountRecord)") #
        return result #

    async def query_chain(self, ctx: RequestContext, scope: URL, query: Query) -> ChainRecord:
        """Query chain details."""
        result = await self.query_record(ctx, scope, query, ChainRecord)
        if not isinstance(result, ChainRecord):
            raise AccumulateError(f"Unexpected response type: {type(result)} (expected ChainRecord)") #
        return result

    async def query_chain_entries(self, ctx: RequestContext, scope: URL, query: Query) -> RecordRange[ChainEntryRecord]:
        """Query chain entries."""
        result = await self.query_record(ctx, scope, query, RecordRange)
        if not isinstance(result, RecordRange) or not all(isinstance(r, ChainEntryRecord) for r in result.records):
            raise AccumulateError(f"Unexpected response type: {type(result)} or invalid nested types") #
        return result

    async def query_transaction(self, ctx: RequestContext, txid: URL, query: Query) -> MessageRecord:
        """Query transaction details."""
        self.logger.debug("Querying transaction for: %s with query: %s", txid, query) #
        result = await self.query_record(ctx, txid, query, MessageRecord) #
        if not isinstance(result, MessageRecord): #
            raise AccumulateError(f"Unexpected response type: {type(result)} (expected MessageRecord)") #
        return result #


    async def query_block(self, ctx: RequestContext, scope: URL, query: Query) -> RecordRange:
        """Query block details."""
        self.logger.debug("Querying block for: %s with query: %s", scope, query) #
        result = await self.query_record(ctx, scope, query, RecordRange) #
        if not isinstance(result, RecordRange): #
            raise AccumulateError(f"Unexpected response type: {type(result)} (expected RecordRange)") #
        return result

_deserialize_response(data, result_type)

Deserialize a response into the expected result type.

Source code in accumulate\api\querier.py
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
def _deserialize_response(self, data: Any, result_type: Type[T]) -> T:
    """Deserialize a response into the expected result type."""
    try:
        self.logger.debug("Deserializing response: %s into type: %s", data, result_type)

        if issubclass(result_type, RecordRange):
            if not isinstance(data, RecordRange):
                raise AccumulateError(f"Expected RecordRange, got {type(data)}") #

            # Use range_of for validation
            return range_of(data, getattr(data, "item_type", Record))

        if not isinstance(data, result_type):
            raise AccumulateError(f"Expected {result_type}, got {type(data)}")
        return data
    except Exception as e:
        self.logger.error("Deserialization failed: %s", e)
        raise AccumulateError(f"Deserialization failed: {e}")

query(ctx, scope, query, result_type) async

Submit a generic query to the Accumulate network.

Source code in accumulate\api\querier.py
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
async def query(self, ctx: RequestContext, scope: str, query: Query, result_type: Type[T]) -> T:
    """Submit a generic query to the Accumulate network."""
    if not query.is_valid():
        self.logger.error("Invalid query: %s", query)
        raise ValueError("Invalid query.")

    message = {
        "action": "Query",
        "params": {"scope": scope, "type": query.query_type.name, "params": query.to_dict()},
    }

    try:
        self.logger.debug("Sending query: %s", message)
        response = await self.transport.send_message(ctx, message)
        self.logger.debug("Query response: %s", response)
        return self._deserialize_response(response, result_type)
    except Exception as e:
        error_message = f"Query failed: {e}"
        self.logger.error(error_message)
        raise AccumulateError(error_message) from e

query_account(ctx, account, query) async

Query account details.

Source code in accumulate\api\querier.py
125
126
127
128
129
130
async def query_account(self, ctx: RequestContext, account: URL, query: Query) -> AccountRecord:
    """Query account details."""
    result = await self.query_record(ctx, account, query, AccountRecord) #
    if not isinstance(result, AccountRecord): #
        raise AccumulateError(f"Unexpected response type: {type(result)} (expected AccountRecord)") #
    return result #

query_block(ctx, scope, query) async

Query block details.

Source code in accumulate\api\querier.py
155
156
157
158
159
160
161
async def query_block(self, ctx: RequestContext, scope: URL, query: Query) -> RecordRange:
    """Query block details."""
    self.logger.debug("Querying block for: %s with query: %s", scope, query) #
    result = await self.query_record(ctx, scope, query, RecordRange) #
    if not isinstance(result, RecordRange): #
        raise AccumulateError(f"Unexpected response type: {type(result)} (expected RecordRange)") #
    return result

query_chain(ctx, scope, query) async

Query chain details.

Source code in accumulate\api\querier.py
132
133
134
135
136
137
async def query_chain(self, ctx: RequestContext, scope: URL, query: Query) -> ChainRecord:
    """Query chain details."""
    result = await self.query_record(ctx, scope, query, ChainRecord)
    if not isinstance(result, ChainRecord):
        raise AccumulateError(f"Unexpected response type: {type(result)} (expected ChainRecord)") #
    return result

query_chain_entries(ctx, scope, query) async

Query chain entries.

Source code in accumulate\api\querier.py
139
140
141
142
143
144
async def query_chain_entries(self, ctx: RequestContext, scope: URL, query: Query) -> RecordRange[ChainEntryRecord]:
    """Query chain entries."""
    result = await self.query_record(ctx, scope, query, RecordRange)
    if not isinstance(result, RecordRange) or not all(isinstance(r, ChainEntryRecord) for r in result.records):
        raise AccumulateError(f"Unexpected response type: {type(result)} or invalid nested types") #
    return result

query_events(ctx, scope, query) async

Query for events.

Source code in accumulate\api\querier.py
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
async def query_events(self, ctx: RequestContext, scope: URL, query: Query) -> List[Record]:
    """Query for events."""
    try:
        self.logger.debug("Querying events for scope: %s, query: %s", scope, query)
        response = await self.query(ctx, str(scope), query, RecordRange)
        self.logger.debug("Event query response: %s", response)

        events = []
        for record in response.records:
            if not isinstance(record, Record):
                raise AccumulateError(f"Unexpected record type in events: {type(record)}")

            event_type = record.record_type
            event_class_path = EVENT_TYPE_MAPPING.get(event_type)
            if not event_class_path:
                # Log and skip unknown event types
                self.logger.warning(f"Skipping unknown event type: {event_type}")
                continue

            # Dynamically resolve the class
            module_name, class_name = event_class_path.rsplit(".", 1)
            module = importlib.import_module(module_name)
            event_class = getattr(module, class_name)

            events.append(event_class.from_dict(record.to_dict()))

        return events
    except Exception as e:
        error_message = f"Error in query_events: {e}"
        self.logger.error(error_message)
        raise AccumulateError(error_message) from e

query_generic(ctx, scope, query, result_type) async

Generic query handler.

Source code in accumulate\api\querier.py
120
121
122
123
async def query_generic(self, ctx: RequestContext, scope: URL, query: Query, result_type: Type[T]) -> T:
    """Generic query handler."""
    response = await self.query(ctx, str(scope), query, result_type)  # Pass result_type here
    return self._deserialize_response(response, result_type)

query_record(ctx, scope, query, result_type) async

Submit a query for a specific record type.

Source code in accumulate\api\querier.py
57
58
59
60
61
62
63
64
65
66
async def query_record(self, ctx: RequestContext, scope: URL, query: Query, result_type: Type[T]) -> T:
    """Submit a query for a specific record type."""
    try:
        self.logger.debug("Querying record for scope: %s, query: %s, result_type: %s", scope, query, result_type)
        response = await self.query(ctx, str(scope), query, result_type)  # Pass result_type here
        self.logger.debug("Record query response: %s", response)
        return self._deserialize_response(response, result_type)
    except Exception as e:
        self.logger.error("Error in query_record: %s", e)
        raise

query_transaction(ctx, txid, query) async

Query transaction details.

Source code in accumulate\api\querier.py
146
147
148
149
150
151
152
async def query_transaction(self, ctx: RequestContext, txid: URL, query: Query) -> MessageRecord:
    """Query transaction details."""
    self.logger.debug("Querying transaction for: %s with query: %s", txid, query) #
    result = await self.query_record(ctx, txid, query, MessageRecord) #
    if not isinstance(result, MessageRecord): #
        raise AccumulateError(f"Unexpected response type: {type(result)} (expected MessageRecord)") #
    return result #

exceptions

AccumulateError

Bases: Exception

Base class for all custom exceptions in the Accumulate client.

Source code in accumulate\api\exceptions.py
3
4
5
class AccumulateError(Exception):
    """Base class for all custom exceptions in the Accumulate client."""
    pass

FaucetError

Bases: AccumulateError

Raised when faucet token requests fail.

Source code in accumulate\api\exceptions.py
23
24
25
class FaucetError(AccumulateError):
    """Raised when faucet token requests fail."""
    pass

QueryError

Bases: AccumulateError

Raised when a query to the RPC API fails.

Source code in accumulate\api\exceptions.py
 8
 9
10
class QueryError(AccumulateError):
    """Raised when a query to the RPC API fails."""
    pass

SubmissionError

Bases: AccumulateError

Raised when a transaction submission fails.

Source code in accumulate\api\exceptions.py
13
14
15
class SubmissionError(AccumulateError):
    """Raised when a transaction submission fails."""
    pass

ValidationError

Bases: AccumulateError

Raised when validation fails.

Source code in accumulate\api\exceptions.py
18
19
20
class ValidationError(AccumulateError):
    """Raised when validation fails."""
    pass

Transactions (Models)

transactions

AddCredits

Bases: TransactionBodyBase

Represents an AddCredits transaction.

Source code in accumulate\models\transactions.py
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
class AddCredits(TransactionBodyBase):
    """
    Represents an AddCredits transaction.
    """

    def __init__(self, client, recipient: Union[str, "URL"], amount: int):
        """
        :param client: Instance of AccumulateClient (can be None when unmarshaling)
        :param recipient: The URL of the account receiving the credits.
        :param amount: The amount of credits to add (in whole credits; this will be multiplied by 1e6).
        """
        self.client = client
        self.oracle = None  # Oracle price will be fetched asynchronously
        self.recipient = self._normalize_recipient(str(recipient))
        self.amount = amount * 2_000_000  # Store amount in microcredits

    async def initialize_oracle(self):
        """Fetch and set the oracle price asynchronously from network status."""
        try:
            status = await self.client.network_status()
            self.oracle = int(status.get("oracle", {}).get("price", 5000))
        except Exception as e:
            logger.error(f"Failed to fetch oracle price: {e}")
            self.oracle = 5000 

    def type(self) -> TransactionType:
        return TransactionType.ADD_CREDITS

    def fields_to_encode(self):
        """Returns the fields to be marshaled."""
        return [
            (1, b'\x0E', lambda x: x),  # Type marker
            (2, self.recipient.encode("utf-8"), lambda x: encode_uvarint(len(x)) + x),  # Recipient
            (3, self.amount, encode_compact_int),  # Amount
            (4, self.oracle if self.oracle is not None else 0, encode_uvarint),  # Oracle price
        ]

    def to_dict(self) -> dict:
        """Convert AddCredits transaction to a dictionary."""
        return {
            **super().to_dict(),
            "recipient": self.recipient,
            "amount": str(self.amount),
            "oracle": self.oracle if self.oracle is not None else 0
        }

    @classmethod
    def unmarshal(cls, data: bytes) -> "AddCredits":
        """Deserialize an AddCredits transaction from bytes."""
        reader = io.BytesIO(data)

        # Field 1: Type marker
        field_id = reader.read(1)
        if field_id != b'\x01':
            raise ValueError("Expected field id 1 for type marker")
        type_marker = reader.read(1)
        if type_marker != b'\x0E':
            raise ValueError("Invalid type marker for AddCredits")

        # Field 2: Recipient
        field_id = reader.read(1)
        if field_id != b'\x02':
            raise ValueError("Expected field id 2 for recipient")
        rec_length = read_uvarint(reader)
        recipient_bytes = reader.read(rec_length)
        recipient = recipient_bytes.decode("utf-8")

        # Field 3: Amount (compact int)
        field_id = reader.read(1)
        if field_id != b'\x03':
            raise ValueError("Expected field id 3 for amount")
        num_bytes_raw = reader.read(1)
        if not num_bytes_raw:
            raise ValueError("Missing compact int length for amount")
        num_bytes = num_bytes_raw[0]
        amount_bytes = reader.read(num_bytes)
        amount = int.from_bytes(amount_bytes, byteorder='big')

        # Field 4: Oracle
        field_id = reader.read(1)
        if field_id != b'\x04':
            raise ValueError("Expected field id 4 for oracle")
        oracle_adjusted = read_uvarint(reader)
        oracle = oracle_adjusted // 100

        # Create instance; note that original amount was multiplied by 2e6
        obj = cls(None, recipient, amount // 2_000_000)
        obj.oracle = oracle
        return obj

    @staticmethod
    def _normalize_recipient(recipient: str) -> str:
        """
        Ensure the recipient is formatted correctly.
        It must start with "acc://", not include ".MAIN", and end with "/acme" (all lowercase).
        """
        recipient = recipient.lower().strip("/")
        if recipient.startswith("acc://"):
            recipient = recipient[6:]
        if not recipient.endswith("/acme"):
            recipient += "/acme"
        return f"acc://{recipient}"

__init__(client, recipient, amount)

:param client: Instance of AccumulateClient (can be None when unmarshaling) :param recipient: The URL of the account receiving the credits. :param amount: The amount of credits to add (in whole credits; this will be multiplied by 1e6).

Source code in accumulate\models\transactions.py
464
465
466
467
468
469
470
471
472
473
def __init__(self, client, recipient: Union[str, "URL"], amount: int):
    """
    :param client: Instance of AccumulateClient (can be None when unmarshaling)
    :param recipient: The URL of the account receiving the credits.
    :param amount: The amount of credits to add (in whole credits; this will be multiplied by 1e6).
    """
    self.client = client
    self.oracle = None  # Oracle price will be fetched asynchronously
    self.recipient = self._normalize_recipient(str(recipient))
    self.amount = amount * 2_000_000  # Store amount in microcredits

_normalize_recipient(recipient) staticmethod

Ensure the recipient is formatted correctly. It must start with "acc://", not include ".MAIN", and end with "/acme" (all lowercase).

Source code in accumulate\models\transactions.py
549
550
551
552
553
554
555
556
557
558
559
560
@staticmethod
def _normalize_recipient(recipient: str) -> str:
    """
    Ensure the recipient is formatted correctly.
    It must start with "acc://", not include ".MAIN", and end with "/acme" (all lowercase).
    """
    recipient = recipient.lower().strip("/")
    if recipient.startswith("acc://"):
        recipient = recipient[6:]
    if not recipient.endswith("/acme"):
        recipient += "/acme"
    return f"acc://{recipient}"

fields_to_encode()

Returns the fields to be marshaled.

Source code in accumulate\models\transactions.py
487
488
489
490
491
492
493
494
def fields_to_encode(self):
    """Returns the fields to be marshaled."""
    return [
        (1, b'\x0E', lambda x: x),  # Type marker
        (2, self.recipient.encode("utf-8"), lambda x: encode_uvarint(len(x)) + x),  # Recipient
        (3, self.amount, encode_compact_int),  # Amount
        (4, self.oracle if self.oracle is not None else 0, encode_uvarint),  # Oracle price
    ]

initialize_oracle() async

Fetch and set the oracle price asynchronously from network status.

Source code in accumulate\models\transactions.py
475
476
477
478
479
480
481
482
async def initialize_oracle(self):
    """Fetch and set the oracle price asynchronously from network status."""
    try:
        status = await self.client.network_status()
        self.oracle = int(status.get("oracle", {}).get("price", 5000))
    except Exception as e:
        logger.error(f"Failed to fetch oracle price: {e}")
        self.oracle = 5000 

to_dict()

Convert AddCredits transaction to a dictionary.

Source code in accumulate\models\transactions.py
496
497
498
499
500
501
502
503
def to_dict(self) -> dict:
    """Convert AddCredits transaction to a dictionary."""
    return {
        **super().to_dict(),
        "recipient": self.recipient,
        "amount": str(self.amount),
        "oracle": self.oracle if self.oracle is not None else 0
    }

unmarshal(data) classmethod

Deserialize an AddCredits transaction from bytes.

Source code in accumulate\models\transactions.py
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
@classmethod
def unmarshal(cls, data: bytes) -> "AddCredits":
    """Deserialize an AddCredits transaction from bytes."""
    reader = io.BytesIO(data)

    # Field 1: Type marker
    field_id = reader.read(1)
    if field_id != b'\x01':
        raise ValueError("Expected field id 1 for type marker")
    type_marker = reader.read(1)
    if type_marker != b'\x0E':
        raise ValueError("Invalid type marker for AddCredits")

    # Field 2: Recipient
    field_id = reader.read(1)
    if field_id != b'\x02':
        raise ValueError("Expected field id 2 for recipient")
    rec_length = read_uvarint(reader)
    recipient_bytes = reader.read(rec_length)
    recipient = recipient_bytes.decode("utf-8")

    # Field 3: Amount (compact int)
    field_id = reader.read(1)
    if field_id != b'\x03':
        raise ValueError("Expected field id 3 for amount")
    num_bytes_raw = reader.read(1)
    if not num_bytes_raw:
        raise ValueError("Missing compact int length for amount")
    num_bytes = num_bytes_raw[0]
    amount_bytes = reader.read(num_bytes)
    amount = int.from_bytes(amount_bytes, byteorder='big')

    # Field 4: Oracle
    field_id = reader.read(1)
    if field_id != b'\x04':
        raise ValueError("Expected field id 4 for oracle")
    oracle_adjusted = read_uvarint(reader)
    oracle = oracle_adjusted // 100

    # Create instance; note that original amount was multiplied by 2e6
    obj = cls(None, recipient, amount // 2_000_000)
    obj.oracle = oracle
    return obj

BurnTokens

Bases: TransactionBodyBase

Represents a Burn Tokens transaction. This class accepts a provided-readable burn amount and then dynamically queries the blockchain to obtain the token's precision. It then calculates the final on-chain amount to burn.

The token account URL is provided, and from it the token issuer URL is obtained.

Source code in accumulate\models\transactions.py
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648
1649
1650
1651
1652
1653
1654
1655
1656
1657
1658
1659
1660
1661
1662
1663
1664
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706
1707
1708
1709
1710
class BurnTokens(TransactionBodyBase):
    """
    Represents a Burn Tokens transaction.
    This class accepts a provided-readable burn amount and then dynamically queries
    the blockchain to obtain the token's precision. It then calculates the final
    on-chain amount to burn.

    The token account URL is provided, and from it the token issuer URL is obtained.
    """

    def __init__(self, token_account_url: URL, provided_amount: int):
        """
        :param token_account_url: The URL of the token account (e.g., acc://.../CTACUST).
        :param provided_amount: The provided-readable number of tokens to burn.
        """
        if not isinstance(token_account_url, URL):
            raise TypeError("token_account_url must be an instance of URL.")
        if not isinstance(provided_amount, int) or provided_amount <= 0:
            raise ValueError("provided_amount must be a positive integer.")
        self.token_account_url = token_account_url
        self.provided_amount = provided_amount
        # These will be set dynamically via initialize()
        self.precision = None  
        self.amount = None  # Final on-chain amount = provided_amount * (10 ** precision)

    def type(self) -> TransactionType:
        return TransactionType.BURN_TOKENS

    async def initialize(self, client):
        """
        Dynamically query the token account and token issuer to obtain the token's precision,
        then calculate the final on-chain burn amount.
        """
        # Use the Query object as in your working example.
        from accumulate.models.queries import Query
        from accumulate.models.enums import QueryType
        query = Query(query_type=QueryType.DEFAULT)

        # Query the token account to get the token issuer URL.
        token_account_response = await client.query(str(self.token_account_url), query)
        token_issuer_url_str = token_account_response.account.get("tokenUrl")
        if not token_issuer_url_str:
            raise ValueError("Token account did not return a tokenUrl")
        token_issuer_url = URL.parse(token_issuer_url_str)

        # Query the token issuer to obtain the token's precision.
        token_issuer_response = await client.query(str(token_issuer_url), query)
        precision = token_issuer_response.account.get("precision")
        if precision is None:
            raise ValueError("Token issuer did not return a precision value")
        self.precision = int(precision)

        # Calculate the final on-chain amount.
        self.amount = self.provided_amount * (10 ** self.precision)

    def _encode_amount(self) -> bytes:
        """
        Encodes the final amount as a raw big-endian number.
        For example, if the final amount is 110000, then:
          big_number_marshal_binary(110000) should yield its minimal big-endian representation.
        """
        if self.amount is None:
            raise ValueError("BurnTokens instance is not initialized. Call initialize(client) first.")
        return big_number_marshal_binary(self.amount)

    def fields_to_encode(self):
        """
        Fields for BurnTokens:
          - Field 1: Transaction type (encoded as varint)
          - Field 2: Amount (encoded as a length-delimited big-endian number)

         NOTE: The token URL is NOT included in the encoded body.
        """
        return [
            (1, encode_uvarint(self.type().value), lambda x: x),
            (2, self._encode_amount(), lambda x: x),
        ]

    @classmethod
    def unmarshal(cls, data: bytes) -> "BurnTokens":
        """
        Deserialize BurnTokens transaction from bytes.
        (Since precision is not encoded, the returned instance will have token_account_url set to None.)
        """
        reader = io.BytesIO(data)
        transaction_type, _ = decode_uvarint(reader.read())
        if transaction_type != TransactionType.BURN_TOKENS.value:
            raise ValueError("Unexpected transaction type")
        amount_bytes = unmarshal_bytes(reader)
        final_amount = int.from_bytes(amount_bytes, byteorder='big')
        instance = cls(None, final_amount)  # token_account_url is unknown from the body
        instance.precision = 0  # unknown precision
        instance.amount = final_amount
        instance.provided_amount = final_amount  # fallback
        return instance

    def to_dict(self) -> dict:
        """
        Convert the BurnTokens transaction to a JSON-serializable dictionary.
        (Note: the token URL is not included in the output JSON.)
        """
        return {
            "type": self._format_transaction_type(self.type().name),
            "amount": str(self.amount) if self.amount is not None else None,
        }

__init__(token_account_url, provided_amount)

:param token_account_url: The URL of the token account (e.g., acc://.../CTACUST). :param provided_amount: The provided-readable number of tokens to burn.

Source code in accumulate\models\transactions.py
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
def __init__(self, token_account_url: URL, provided_amount: int):
    """
    :param token_account_url: The URL of the token account (e.g., acc://.../CTACUST).
    :param provided_amount: The provided-readable number of tokens to burn.
    """
    if not isinstance(token_account_url, URL):
        raise TypeError("token_account_url must be an instance of URL.")
    if not isinstance(provided_amount, int) or provided_amount <= 0:
        raise ValueError("provided_amount must be a positive integer.")
    self.token_account_url = token_account_url
    self.provided_amount = provided_amount
    # These will be set dynamically via initialize()
    self.precision = None  
    self.amount = None  # Final on-chain amount = provided_amount * (10 ** precision)

_encode_amount()

Encodes the final amount as a raw big-endian number. For example, if the final amount is 110000, then: big_number_marshal_binary(110000) should yield its minimal big-endian representation.

Source code in accumulate\models\transactions.py
1661
1662
1663
1664
1665
1666
1667
1668
1669
def _encode_amount(self) -> bytes:
    """
    Encodes the final amount as a raw big-endian number.
    For example, if the final amount is 110000, then:
      big_number_marshal_binary(110000) should yield its minimal big-endian representation.
    """
    if self.amount is None:
        raise ValueError("BurnTokens instance is not initialized. Call initialize(client) first.")
    return big_number_marshal_binary(self.amount)

fields_to_encode()

Fields for BurnTokens
  • Field 1: Transaction type (encoded as varint)
  • Field 2: Amount (encoded as a length-delimited big-endian number)

NOTE: The token URL is NOT included in the encoded body.

Source code in accumulate\models\transactions.py
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
def fields_to_encode(self):
    """
    Fields for BurnTokens:
      - Field 1: Transaction type (encoded as varint)
      - Field 2: Amount (encoded as a length-delimited big-endian number)

     NOTE: The token URL is NOT included in the encoded body.
    """
    return [
        (1, encode_uvarint(self.type().value), lambda x: x),
        (2, self._encode_amount(), lambda x: x),
    ]

initialize(client) async

Dynamically query the token account and token issuer to obtain the token's precision, then calculate the final on-chain burn amount.

Source code in accumulate\models\transactions.py
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648
1649
1650
1651
1652
1653
1654
1655
1656
1657
1658
1659
async def initialize(self, client):
    """
    Dynamically query the token account and token issuer to obtain the token's precision,
    then calculate the final on-chain burn amount.
    """
    # Use the Query object as in your working example.
    from accumulate.models.queries import Query
    from accumulate.models.enums import QueryType
    query = Query(query_type=QueryType.DEFAULT)

    # Query the token account to get the token issuer URL.
    token_account_response = await client.query(str(self.token_account_url), query)
    token_issuer_url_str = token_account_response.account.get("tokenUrl")
    if not token_issuer_url_str:
        raise ValueError("Token account did not return a tokenUrl")
    token_issuer_url = URL.parse(token_issuer_url_str)

    # Query the token issuer to obtain the token's precision.
    token_issuer_response = await client.query(str(token_issuer_url), query)
    precision = token_issuer_response.account.get("precision")
    if precision is None:
        raise ValueError("Token issuer did not return a precision value")
    self.precision = int(precision)

    # Calculate the final on-chain amount.
    self.amount = self.provided_amount * (10 ** self.precision)

to_dict()

Convert the BurnTokens transaction to a JSON-serializable dictionary. (Note: the token URL is not included in the output JSON.)

Source code in accumulate\models\transactions.py
1702
1703
1704
1705
1706
1707
1708
1709
1710
def to_dict(self) -> dict:
    """
    Convert the BurnTokens transaction to a JSON-serializable dictionary.
    (Note: the token URL is not included in the output JSON.)
    """
    return {
        "type": self._format_transaction_type(self.type().name),
        "amount": str(self.amount) if self.amount is not None else None,
    }

unmarshal(data) classmethod

Deserialize BurnTokens transaction from bytes. (Since precision is not encoded, the returned instance will have token_account_url set to None.)

Source code in accumulate\models\transactions.py
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
@classmethod
def unmarshal(cls, data: bytes) -> "BurnTokens":
    """
    Deserialize BurnTokens transaction from bytes.
    (Since precision is not encoded, the returned instance will have token_account_url set to None.)
    """
    reader = io.BytesIO(data)
    transaction_type, _ = decode_uvarint(reader.read())
    if transaction_type != TransactionType.BURN_TOKENS.value:
        raise ValueError("Unexpected transaction type")
    amount_bytes = unmarshal_bytes(reader)
    final_amount = int.from_bytes(amount_bytes, byteorder='big')
    instance = cls(None, final_amount)  # token_account_url is unknown from the body
    instance.precision = 0  # unknown precision
    instance.amount = final_amount
    instance.provided_amount = final_amount  # fallback
    return instance

CreateDataAccount

Bases: TransactionBodyBase

Represents a Create Data Account transaction.

Source code in accumulate\models\transactions.py
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
class CreateDataAccount(TransactionBodyBase):
    """
    Represents a Create Data Account transaction.
    """

    def __init__(self, url: URL, authorities: Optional[List[URL]] = None, metadata: Optional[bytes] = None):
        """
        :param url: The URL of the data account.
        :param authorities: List of authority URLs (optional).
        :param metadata: Optional metadata as bytes (optional).
        """
        if not isinstance(url, URL):
            raise TypeError("url must be an instance of URL.")
        if not url.authority or not url.path:
            raise ValueError(f"Invalid URL: {url}")

        #  Only set authorities if they exist
        self.authorities = authorities if authorities else None  
        self.url = url
        self.metadata = metadata if metadata else None  #  Only set metadata if provided

    def type(self) -> TransactionType:
        """Return transaction type."""
        return TransactionType.CREATE_DATA_ACCOUNT

    def fields_to_encode(self):
        """
        Returns the fields to encode as a list of (field_id, value, encoding_function).
        """
        fields = [
            #  encode type field first
            (1, encode_uvarint(self.type().value), lambda x: x),  

            #  encode URL without double length prefix
            (2, string_marshal_binary(str(self.url)), lambda x: x),  
        ]

        #  Include authorities **only if they exist**
        if self.authorities:
            authorities_encoded = b"".join([
                string_marshal_binary(str(auth))
                for auth in self.authorities
            ])
            fields.append((3, encode_uvarint(len(self.authorities)) + authorities_encoded, lambda x: x))  

        #  Include metadata **only if it exists**
        if self.metadata:
            fields.append((4, bytes_marshal_binary(self.metadata), lambda x: x))  

        return fields

    @classmethod
    def unmarshal(cls, data: bytes) -> "CreateDataAccount":
        """Deserialize CreateDataAccount transaction from bytes."""
        print(f"DEBUG: Unmarshaling CreateDataAccount")

        reader = io.BytesIO(data)

        #  Step 1: Parse Type Field
        type_value = read_uvarint(reader)

        #  Step 2: Parse URL (Read as a string)
        url = unmarshal_string(reader)

        #  Step 3: Parse Authorities (If present)
        authorities = []
        if reader.peek(1)[:1] == b'\x03':  # Check if Authorities field exists
            reader.read(1)  # Consume field identifier
            authorities_count = read_uvarint(reader)
            for _ in range(authorities_count):
                auth_str = unmarshal_string(reader)
                authorities.append(URL.parse(auth_str))

        #  Step 4: Parse Metadata (If present)
        metadata = None
        if reader.peek(1)[:1] == b'\x04':  # Check if Metadata field exists
            reader.read(1)  # Consume field identifier
            metadata = unmarshal_bytes(reader)

        print(f"DEBUG: Parsed CreateDataAccount: Type={type_value}, URL={url}, Authorities={authorities}, Metadata={metadata}")
        return cls(URL.parse(url), authorities if authorities else None, metadata)

    def to_dict(self) -> dict:
        """Convert CreateDataAccount transaction to a dictionary."""
        tx_dict = {
            **super().to_dict(),
            "url": str(self.url),
        }

        #  **Only add authorities if they exist**
        if self.authorities:
            tx_dict["authorities"] = [str(auth) for auth in self.authorities]

        #  **Only add metadata if it exists**
        if self.metadata:
            tx_dict["metadata"] = self.metadata.hex()

        return tx_dict

__init__(url, authorities=None, metadata=None)

:param url: The URL of the data account. :param authorities: List of authority URLs (optional). :param metadata: Optional metadata as bytes (optional).

Source code in accumulate\models\transactions.py
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
def __init__(self, url: URL, authorities: Optional[List[URL]] = None, metadata: Optional[bytes] = None):
    """
    :param url: The URL of the data account.
    :param authorities: List of authority URLs (optional).
    :param metadata: Optional metadata as bytes (optional).
    """
    if not isinstance(url, URL):
        raise TypeError("url must be an instance of URL.")
    if not url.authority or not url.path:
        raise ValueError(f"Invalid URL: {url}")

    #  Only set authorities if they exist
    self.authorities = authorities if authorities else None  
    self.url = url
    self.metadata = metadata if metadata else None  #  Only set metadata if provided

fields_to_encode()

Returns the fields to encode as a list of (field_id, value, encoding_function).

Source code in accumulate\models\transactions.py
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
def fields_to_encode(self):
    """
    Returns the fields to encode as a list of (field_id, value, encoding_function).
    """
    fields = [
        #  encode type field first
        (1, encode_uvarint(self.type().value), lambda x: x),  

        #  encode URL without double length prefix
        (2, string_marshal_binary(str(self.url)), lambda x: x),  
    ]

    #  Include authorities **only if they exist**
    if self.authorities:
        authorities_encoded = b"".join([
            string_marshal_binary(str(auth))
            for auth in self.authorities
        ])
        fields.append((3, encode_uvarint(len(self.authorities)) + authorities_encoded, lambda x: x))  

    #  Include metadata **only if it exists**
    if self.metadata:
        fields.append((4, bytes_marshal_binary(self.metadata), lambda x: x))  

    return fields

to_dict()

Convert CreateDataAccount transaction to a dictionary.

Source code in accumulate\models\transactions.py
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
def to_dict(self) -> dict:
    """Convert CreateDataAccount transaction to a dictionary."""
    tx_dict = {
        **super().to_dict(),
        "url": str(self.url),
    }

    #  **Only add authorities if they exist**
    if self.authorities:
        tx_dict["authorities"] = [str(auth) for auth in self.authorities]

    #  **Only add metadata if it exists**
    if self.metadata:
        tx_dict["metadata"] = self.metadata.hex()

    return tx_dict

type()

Return transaction type.

Source code in accumulate\models\transactions.py
668
669
670
def type(self) -> TransactionType:
    """Return transaction type."""
    return TransactionType.CREATE_DATA_ACCOUNT

unmarshal(data) classmethod

Deserialize CreateDataAccount transaction from bytes.

Source code in accumulate\models\transactions.py
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
@classmethod
def unmarshal(cls, data: bytes) -> "CreateDataAccount":
    """Deserialize CreateDataAccount transaction from bytes."""
    print(f"DEBUG: Unmarshaling CreateDataAccount")

    reader = io.BytesIO(data)

    #  Step 1: Parse Type Field
    type_value = read_uvarint(reader)

    #  Step 2: Parse URL (Read as a string)
    url = unmarshal_string(reader)

    #  Step 3: Parse Authorities (If present)
    authorities = []
    if reader.peek(1)[:1] == b'\x03':  # Check if Authorities field exists
        reader.read(1)  # Consume field identifier
        authorities_count = read_uvarint(reader)
        for _ in range(authorities_count):
            auth_str = unmarshal_string(reader)
            authorities.append(URL.parse(auth_str))

    #  Step 4: Parse Metadata (If present)
    metadata = None
    if reader.peek(1)[:1] == b'\x04':  # Check if Metadata field exists
        reader.read(1)  # Consume field identifier
        metadata = unmarshal_bytes(reader)

    print(f"DEBUG: Parsed CreateDataAccount: Type={type_value}, URL={url}, Authorities={authorities}, Metadata={metadata}")
    return cls(URL.parse(url), authorities if authorities else None, metadata)

CreateIdentity

Bases: TransactionBodyBase

Represents a CreateIdentity transaction, where the key hash is automatically derived.

Source code in accumulate\models\transactions.py
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
class CreateIdentity(TransactionBodyBase):
    """
    Represents a CreateIdentity transaction, where the key hash is automatically derived.
    """

    def __init__(self, url: URL, signer_public_key: bytes, key_book_url: Optional[URL] = None):
        """
        :param url: The URL of the new identity.
        :param signer_public_key: The public key of the principal (used to derive the key hash).
        :param key_book_url: The key book URL (optional).
        """
        if not isinstance(url, URL):
            raise TypeError("url must be an instance of URL.")
        if not isinstance(signer_public_key, bytes) or len(signer_public_key) != 32:
            raise TypeError("signer_public_key must be a 32-byte public key.")
        if key_book_url and not isinstance(key_book_url, URL):
            raise TypeError("keyBookUrl must be an instance of URL if provided.")

        self.url = url
        self.key_hash = hashlib.sha256(signer_public_key).digest()  #  Compute key hash from the public key
        self.key_book_url = key_book_url

    def type(self) -> TransactionType:
        """Return the transaction type in Accumulate's expected format."""
        return TransactionType.CREATE_IDENTITY

    def fields_to_encode(self):
        """Returns the fields to be marshaled in Accumulate format."""
        fields = [
            (1, self.type().value, encode_uvarint),  # Type field (0x01 = CreateIdentity)
            (2, string_marshal_binary(str(self.url)), lambda x: x),  # URL (0x02)
            (3, self.key_hash, bytes_marshal_binary),  # returns (field_num, value, encode_func)
        ]

        if self.key_book_url:
            fields.append((4, string_marshal_binary(str(self.key_book_url)), lambda x: x))  # KeyBookUrl (0x04)

        return fields

    def marshal(self) -> bytes:
        """Encodes the transaction into bytes for submission."""
        serialized = b""
        for field_num, value, encode_func in self.fields_to_encode():
            encoded_value = encode_func(value)
            serialized += field_marshal_binary(field_num, encoded_value)

        return serialized

    def to_dict(self) -> dict:
        """Convert CreateIdentity transaction to a dictionary."""
        return {
            "type": "createIdentity",  #  lowerCamelCase format
            "url": str(self.url),
            "keyHash": self.key_hash.hex(),  #  encode as hex
            "keyBookUrl": str(self.key_book_url) if self.key_book_url else None,
        }

    @classmethod
    def unmarshal(cls, data: bytes) -> "CreateIdentity":
        """Deserialize a CreateIdentity transaction from bytes."""
        reader = io.BytesIO(data)

        # Field 1: Type
        field_id = reader.read(1)
        if field_id != b'\x01':
            raise ValueError("Expected field id 1 for type")
        tx_type = int.from_bytes(reader.read(1), "big")
        if tx_type != TransactionType.CREATE_IDENTITY.value:
            raise ValueError("Invalid transaction type for CreateIdentity")

        # Field 2: URL
        field_id = reader.read(1)
        if field_id != b'\x02':
            raise ValueError("Expected field id 2 for URL")
        url = unmarshal_string(reader)

        # Field 3: Key Hash (Public Key Hash)
        field_id = reader.read(1)
        if field_id != b'\x03':
            raise ValueError("Expected field id 3 for key_hash")
        key_hash = reader.read(32)  #  Read 32-byte key hash

        # Field 4: Key Book URL (Optional)
        key_book_url = None
        if reader.peek(1)[:1] == b'\x04':  #  Peek to check if KeyBookUrl field exists
            reader.read(1)  # Consume field identifier
            key_book_url = unmarshal_string(reader)

        return cls(URL.parse(url), key_hash, URL.parse(key_book_url) if key_book_url else None)

__init__(url, signer_public_key, key_book_url=None)

:param url: The URL of the new identity. :param signer_public_key: The public key of the principal (used to derive the key hash). :param key_book_url: The key book URL (optional).

Source code in accumulate\models\transactions.py
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
def __init__(self, url: URL, signer_public_key: bytes, key_book_url: Optional[URL] = None):
    """
    :param url: The URL of the new identity.
    :param signer_public_key: The public key of the principal (used to derive the key hash).
    :param key_book_url: The key book URL (optional).
    """
    if not isinstance(url, URL):
        raise TypeError("url must be an instance of URL.")
    if not isinstance(signer_public_key, bytes) or len(signer_public_key) != 32:
        raise TypeError("signer_public_key must be a 32-byte public key.")
    if key_book_url and not isinstance(key_book_url, URL):
        raise TypeError("keyBookUrl must be an instance of URL if provided.")

    self.url = url
    self.key_hash = hashlib.sha256(signer_public_key).digest()  #  Compute key hash from the public key
    self.key_book_url = key_book_url

fields_to_encode()

Returns the fields to be marshaled in Accumulate format.

Source code in accumulate\models\transactions.py
395
396
397
398
399
400
401
402
403
404
405
406
def fields_to_encode(self):
    """Returns the fields to be marshaled in Accumulate format."""
    fields = [
        (1, self.type().value, encode_uvarint),  # Type field (0x01 = CreateIdentity)
        (2, string_marshal_binary(str(self.url)), lambda x: x),  # URL (0x02)
        (3, self.key_hash, bytes_marshal_binary),  # returns (field_num, value, encode_func)
    ]

    if self.key_book_url:
        fields.append((4, string_marshal_binary(str(self.key_book_url)), lambda x: x))  # KeyBookUrl (0x04)

    return fields

marshal()

Encodes the transaction into bytes for submission.

Source code in accumulate\models\transactions.py
408
409
410
411
412
413
414
415
def marshal(self) -> bytes:
    """Encodes the transaction into bytes for submission."""
    serialized = b""
    for field_num, value, encode_func in self.fields_to_encode():
        encoded_value = encode_func(value)
        serialized += field_marshal_binary(field_num, encoded_value)

    return serialized

to_dict()

Convert CreateIdentity transaction to a dictionary.

Source code in accumulate\models\transactions.py
417
418
419
420
421
422
423
424
def to_dict(self) -> dict:
    """Convert CreateIdentity transaction to a dictionary."""
    return {
        "type": "createIdentity",  #  lowerCamelCase format
        "url": str(self.url),
        "keyHash": self.key_hash.hex(),  #  encode as hex
        "keyBookUrl": str(self.key_book_url) if self.key_book_url else None,
    }

type()

Return the transaction type in Accumulate's expected format.

Source code in accumulate\models\transactions.py
391
392
393
def type(self) -> TransactionType:
    """Return the transaction type in Accumulate's expected format."""
    return TransactionType.CREATE_IDENTITY

unmarshal(data) classmethod

Deserialize a CreateIdentity transaction from bytes.

Source code in accumulate\models\transactions.py
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
@classmethod
def unmarshal(cls, data: bytes) -> "CreateIdentity":
    """Deserialize a CreateIdentity transaction from bytes."""
    reader = io.BytesIO(data)

    # Field 1: Type
    field_id = reader.read(1)
    if field_id != b'\x01':
        raise ValueError("Expected field id 1 for type")
    tx_type = int.from_bytes(reader.read(1), "big")
    if tx_type != TransactionType.CREATE_IDENTITY.value:
        raise ValueError("Invalid transaction type for CreateIdentity")

    # Field 2: URL
    field_id = reader.read(1)
    if field_id != b'\x02':
        raise ValueError("Expected field id 2 for URL")
    url = unmarshal_string(reader)

    # Field 3: Key Hash (Public Key Hash)
    field_id = reader.read(1)
    if field_id != b'\x03':
        raise ValueError("Expected field id 3 for key_hash")
    key_hash = reader.read(32)  #  Read 32-byte key hash

    # Field 4: Key Book URL (Optional)
    key_book_url = None
    if reader.peek(1)[:1] == b'\x04':  #  Peek to check if KeyBookUrl field exists
        reader.read(1)  # Consume field identifier
        key_book_url = unmarshal_string(reader)

    return cls(URL.parse(url), key_hash, URL.parse(key_book_url) if key_book_url else None)

CreateKeyBook dataclass

Bases: TransactionBodyBase

Represents a Create Key Book transaction.

Source code in accumulate\models\transactions.py
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
@dataclass
class CreateKeyBook(TransactionBodyBase):
    """
    Represents a Create Key Book transaction.
    """
    url: URL
    public_key_hash: bytes  # Must be exactly 32 bytes
    authorities: Optional[List[URL]] = None

    def type(self) -> TransactionType:
        return TransactionType.CREATE_KEY_BOOK

    def fields_to_encode(self):
        """
        Build the fields as follows:
          Field 1: Transaction type (as a varint)
          Field 2: URL (with a length prefix added by string_marshal_binary)
          Field 3: publicKeyHash (with a length prefix, which should be 0x20)
          Optionally, if authorities are provided, include them as fields 4 and 5
        """
        fields = [
            # Field 1: Transaction type (e.g. 0x0d)
            (1, encode_uvarint(self.type().value), lambda x: x),
            # Field 2: URL – string_marshal_binary automatically prepends the length
            (2, string_marshal_binary(str(self.url)), lambda x: x),
            # Field 3: publicKeyHash – hash_marshal_binary checks length (must be 32)
            (3, bytes_marshal_binary(self.public_key_hash), lambda x: x),
        ]
        if self.authorities:
            # Authorities count
            authorities_count = encode_uvarint(len(self.authorities))
            # Encode each authority URL.
            encoded_auths = b"".join([string_marshal_binary(str(auth)) for auth in self.authorities])
            fields.append((4, authorities_count, lambda x: x))
            fields.append((5, encoded_auths, lambda x: x))
        return fields

    def to_dict(self) -> dict:
        """Convert transaction body to a JSON‑serializable dictionary"""
        tx_dict = {
            "type": self._format_transaction_type(self.type().name),
            "url": str(self.url),
            "publicKeyHash": self.public_key_hash.hex(),
        }
        if self.authorities:
            tx_dict["authorities"] = [str(auth) for auth in self.authorities]
        return tx_dict

    @classmethod
    def unmarshal(cls, data: bytes) -> "CreateKeyBook":
        logger.debug(" Unmarshaling CreateKeyBook")
        reader = io.BytesIO(data)
        # Read URL field (field 2)
        url = URL.parse(unmarshal_bytes(reader).decode("utf-8"))
        # Read publicKeyHash field (field 3)
        public_key_hash = reader.read(32)
        if len(public_key_hash) != 32:
            raise ValueError("Invalid public key hash length (must be 32 bytes)")
        authorities = []
        if reader.tell() < len(data):
            authorities_count = read_uvarint(reader)
            for _ in range(authorities_count):
                auth_url = unmarshal_bytes(reader).decode("utf-8")
                authorities.append(URL.parse(auth_url))
        logger.debug(f" Parsed CreateKeyBook: URL={url}, PublicKeyHash={public_key_hash.hex()}, Authorities={authorities}")
        return cls(url, public_key_hash, authorities)

fields_to_encode()

Build the fields as follows

Field 1: Transaction type (as a varint) Field 2: URL (with a length prefix added by string_marshal_binary) Field 3: publicKeyHash (with a length prefix, which should be 0x20) Optionally, if authorities are provided, include them as fields 4 and 5

Source code in accumulate\models\transactions.py
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
def fields_to_encode(self):
    """
    Build the fields as follows:
      Field 1: Transaction type (as a varint)
      Field 2: URL (with a length prefix added by string_marshal_binary)
      Field 3: publicKeyHash (with a length prefix, which should be 0x20)
      Optionally, if authorities are provided, include them as fields 4 and 5
    """
    fields = [
        # Field 1: Transaction type (e.g. 0x0d)
        (1, encode_uvarint(self.type().value), lambda x: x),
        # Field 2: URL – string_marshal_binary automatically prepends the length
        (2, string_marshal_binary(str(self.url)), lambda x: x),
        # Field 3: publicKeyHash – hash_marshal_binary checks length (must be 32)
        (3, bytes_marshal_binary(self.public_key_hash), lambda x: x),
    ]
    if self.authorities:
        # Authorities count
        authorities_count = encode_uvarint(len(self.authorities))
        # Encode each authority URL.
        encoded_auths = b"".join([string_marshal_binary(str(auth)) for auth in self.authorities])
        fields.append((4, authorities_count, lambda x: x))
        fields.append((5, encoded_auths, lambda x: x))
    return fields

to_dict()

Convert transaction body to a JSON‑serializable dictionary

Source code in accumulate\models\transactions.py
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
def to_dict(self) -> dict:
    """Convert transaction body to a JSON‑serializable dictionary"""
    tx_dict = {
        "type": self._format_transaction_type(self.type().name),
        "url": str(self.url),
        "publicKeyHash": self.public_key_hash.hex(),
    }
    if self.authorities:
        tx_dict["authorities"] = [str(auth) for auth in self.authorities]
    return tx_dict

CreateKeyPage dataclass

Bases: TransactionBodyBase

Represents a Create Key Page transaction.

Source code in accumulate\models\transactions.py
 985
 986
 987
 988
 989
 990
 991
 992
 993
 994
 995
 996
 997
 998
 999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
@dataclass
class CreateKeyPage(TransactionBodyBase):
    """
    Represents a Create Key Page transaction.
    """
    keys: List[KeySpecParams]

    def type(self) -> TransactionType:
        return TransactionType.CREATE_KEY_PAGE

    def fields_to_encode(self):
        """
        Define the fields to encode, following structured encoding.
        """
        # Marshal each key (each key's marshal() already adds its own field prefix)
        encoded_keys = b"".join([key.marshal() for key in self.keys])
        logger.debug(f"Encoded keys (hex): {encoded_keys.hex()}")
        logger.debug(f"Encoded keys length: {len(encoded_keys)} bytes")

        # Manually prepend the varint-encoded length of the concatenated keys
        keys_value = encode_uvarint(len(encoded_keys)) + encoded_keys

        # The generic marshal will wrap each value with its field number.
        fields = [
            (1, encode_uvarint(self.type().value), lambda x: x),  # Field 1: Transaction Type
            (2, keys_value, lambda x: x),                         # Field 2: Keys (with length prefix)
        ]
        return fields

    def to_dict(self) -> dict:
        """Convert transaction to a dictionary with correct type formatting, including keys."""
        return {
            "type": self._format_transaction_type(self.type().name),
            "keys": [{"keyHash": key.key_hash.hex()} for key in self.keys]
        }

    @classmethod
    def unmarshal(cls, data: bytes) -> "CreateKeyPage":
        logger.debug(f" Unmarshaling CreateKeyPage")
        reader = io.BytesIO(data)
        # Read the keys field: first, the varint length, then the concatenated key bytes
        keys_length = read_uvarint(reader)
        keys_data = reader.read(keys_length)

        # To extract individual keys, we assume they were marshaled sequentially
        # (If multiple keys were present, you might need to loop until keys_data is exhausted)
        # For this example, we assume a single key or that you have a known count
        # Here, we assume the count equals the number of keys concatenated
        # (For a robust implementation, include a count field)
        keys = []
        sub_reader = io.BytesIO(keys_data)
        while sub_reader.tell() < len(keys_data):
            key_bytes = unmarshal_bytes(sub_reader)
            keys.append(KeySpecParams.unmarshal(key_bytes))

        logger.debug(f" Parsed CreateKeyPage: keys={keys}")
        return cls(keys)

fields_to_encode()

Define the fields to encode, following structured encoding.

Source code in accumulate\models\transactions.py
 995
 996
 997
 998
 999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
def fields_to_encode(self):
    """
    Define the fields to encode, following structured encoding.
    """
    # Marshal each key (each key's marshal() already adds its own field prefix)
    encoded_keys = b"".join([key.marshal() for key in self.keys])
    logger.debug(f"Encoded keys (hex): {encoded_keys.hex()}")
    logger.debug(f"Encoded keys length: {len(encoded_keys)} bytes")

    # Manually prepend the varint-encoded length of the concatenated keys
    keys_value = encode_uvarint(len(encoded_keys)) + encoded_keys

    # The generic marshal will wrap each value with its field number.
    fields = [
        (1, encode_uvarint(self.type().value), lambda x: x),  # Field 1: Transaction Type
        (2, keys_value, lambda x: x),                         # Field 2: Keys (with length prefix)
    ]
    return fields

to_dict()

Convert transaction to a dictionary with correct type formatting, including keys.

Source code in accumulate\models\transactions.py
1014
1015
1016
1017
1018
1019
def to_dict(self) -> dict:
    """Convert transaction to a dictionary with correct type formatting, including keys."""
    return {
        "type": self._format_transaction_type(self.type().name),
        "keys": [{"keyHash": key.key_hash.hex()} for key in self.keys]
    }

CreateToken

Bases: TransactionBodyBase

Represents a Create Token transaction.

Source code in accumulate\models\transactions.py
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
class CreateToken(TransactionBodyBase):
    """
    Represents a Create Token transaction.
    """

    def __init__(self, url: URL, symbol: str, precision: int, supply_limit: Optional[int] = None, authorities: Optional[List[URL]] = None):
        if not isinstance(url, URL):
            raise TypeError("url must be an instance of URL.")
        if not isinstance(symbol, str) or not symbol:
            raise ValueError("symbol must be a non-empty string.")
        if not isinstance(precision, int) or not (0 <= precision <= 18):
            raise ValueError("precision must be an integer between 0 and 18.")
        if supply_limit is not None and not isinstance(supply_limit, int):
            raise ValueError("supplyLimit must be an integer or None.")

        self.url = url
        self.symbol = symbol
        self.precision = precision
        self.supply_limit = supply_limit  # Original (provided-readable) supply limit
        # Dynamically adjust the supplyLimit: multiply by 10^precision
        self.adjusted_supply_limit = supply_limit * (10 ** precision) if supply_limit is not None else None
        self.authorities = authorities or []

    def type(self) -> TransactionType:
        return TransactionType.CREATE_TOKEN

    def _encode_supply_limit(self) -> bytes:
        """
        Encode the adjusted supply limit using a variable-length encoding.
        First, compute the minimal number of bytes needed to represent the adjusted value,
        then prefix that with its length encoded as a varint.
        """
        value = self.adjusted_supply_limit
        # Determine the minimum number of bytes (at least 1)
        num_bytes = (value.bit_length() + 7) // 8 or 1
        supply_bytes = value.to_bytes(num_bytes, byteorder="big")
        return encode_uvarint(num_bytes) + supply_bytes

    def fields_to_encode(self):
        """
        Expected official encoding:
          Field 1: Transaction Type (CREATE_TOKEN) -> 01 08
          Field 2: Token URL -> 02 + length + url bytes
          Field 4: Symbol -> 04 + length + symbol bytes
          Field 5: Precision -> 05 + varint(precision)
          Field 7: Supply Limit -> 07 + (length varint + supply limit bytes) [variable length]
          Field 9: Authorities -> if provided.
        """
        fields = [
            (1, encode_uvarint(self.type().value), lambda x: x),  # Transaction Type
            (2, string_marshal_binary(str(self.url)), lambda x: x),  # Token URL
            (4, string_marshal_binary(self.symbol), lambda x: x),  # Symbol
            (5, encode_uvarint(self.precision), lambda x: x),  # Precision
            (7, self._encode_supply_limit(), lambda x: x) if self.adjusted_supply_limit is not None else None,
            (9, b"".join([string_marshal_binary(str(auth)) for auth in self.authorities]), lambda x: x) if self.authorities else None,
        ]
        return [field for field in fields if field is not None]

    def to_dict(self) -> dict:
        """
        Convert the transaction into a JSON-compatible dictionary.
        Outputs the dynamically adjusted supply limit (the on-chain value).
        """
        data = {
            "type": self._format_transaction_type(self.type().name),
            "url": str(self.url),
            "symbol": self.symbol,
            "precision": self.precision
        }
        if self.adjusted_supply_limit is not None:
            data["supplyLimit"] = str(self.adjusted_supply_limit)
        if self.authorities:
            data["authorities"] = [str(auth) for auth in self.authorities]
        return data

    @classmethod
    def unmarshal(cls, data: bytes) -> "CreateToken":
        """
        Deserialize CreateToken transaction from bytes.
        """
        reader = io.BytesIO(data)

        # Step 1: Parse Transaction Type (should be CREATE_TOKEN)
        transaction_type, _ = decode_uvarint(reader.read())
        if transaction_type != TransactionType.CREATE_TOKEN.value:
            raise ValueError(f"Unexpected transaction type: {transaction_type}")

        # Step 2: Parse Token URL
        token_url = URL.parse(unmarshal_bytes(reader).decode("utf-8"))

        # Step 3: Parse Symbol
        symbol = unmarshal_bytes(reader).decode("utf-8")

        # Step 4: Parse Precision
        precision, _ = decode_uvarint(reader.read())

        # Step 5: Parse Supply Limit (if available)
        supply_limit = None
        if reader.tell() < len(data):
            length, _ = decode_uvarint(reader.read())
            # Read 'length' bytes for the adjusted supply limit
            supply_bytes = reader.read(length)
            adjusted_supply_limit = int.from_bytes(supply_bytes, byteorder="big")
            # Convert back to the original supply limit by dividing by 10^precision
            supply_limit = adjusted_supply_limit // (10 ** precision)

        # Step 6: Parse Authorities (if available)
        authorities = []
        while reader.tell() < len(data):
            authorities.append(URL.parse(unmarshal_bytes(reader).decode("utf-8")))

        return cls(token_url, symbol, precision, supply_limit, authorities)

_encode_supply_limit()

Encode the adjusted supply limit using a variable-length encoding. First, compute the minimal number of bytes needed to represent the adjusted value, then prefix that with its length encoded as a varint.

Source code in accumulate\models\transactions.py
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
def _encode_supply_limit(self) -> bytes:
    """
    Encode the adjusted supply limit using a variable-length encoding.
    First, compute the minimal number of bytes needed to represent the adjusted value,
    then prefix that with its length encoded as a varint.
    """
    value = self.adjusted_supply_limit
    # Determine the minimum number of bytes (at least 1)
    num_bytes = (value.bit_length() + 7) // 8 or 1
    supply_bytes = value.to_bytes(num_bytes, byteorder="big")
    return encode_uvarint(num_bytes) + supply_bytes

fields_to_encode()

Expected official encoding

Field 1: Transaction Type (CREATE_TOKEN) -> 01 08 Field 2: Token URL -> 02 + length + url bytes Field 4: Symbol -> 04 + length + symbol bytes Field 5: Precision -> 05 + varint(precision) Field 7: Supply Limit -> 07 + (length varint + supply limit bytes) [variable length] Field 9: Authorities -> if provided.

Source code in accumulate\models\transactions.py
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
def fields_to_encode(self):
    """
    Expected official encoding:
      Field 1: Transaction Type (CREATE_TOKEN) -> 01 08
      Field 2: Token URL -> 02 + length + url bytes
      Field 4: Symbol -> 04 + length + symbol bytes
      Field 5: Precision -> 05 + varint(precision)
      Field 7: Supply Limit -> 07 + (length varint + supply limit bytes) [variable length]
      Field 9: Authorities -> if provided.
    """
    fields = [
        (1, encode_uvarint(self.type().value), lambda x: x),  # Transaction Type
        (2, string_marshal_binary(str(self.url)), lambda x: x),  # Token URL
        (4, string_marshal_binary(self.symbol), lambda x: x),  # Symbol
        (5, encode_uvarint(self.precision), lambda x: x),  # Precision
        (7, self._encode_supply_limit(), lambda x: x) if self.adjusted_supply_limit is not None else None,
        (9, b"".join([string_marshal_binary(str(auth)) for auth in self.authorities]), lambda x: x) if self.authorities else None,
    ]
    return [field for field in fields if field is not None]

to_dict()

Convert the transaction into a JSON-compatible dictionary. Outputs the dynamically adjusted supply limit (the on-chain value).

Source code in accumulate\models\transactions.py
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
def to_dict(self) -> dict:
    """
    Convert the transaction into a JSON-compatible dictionary.
    Outputs the dynamically adjusted supply limit (the on-chain value).
    """
    data = {
        "type": self._format_transaction_type(self.type().name),
        "url": str(self.url),
        "symbol": self.symbol,
        "precision": self.precision
    }
    if self.adjusted_supply_limit is not None:
        data["supplyLimit"] = str(self.adjusted_supply_limit)
    if self.authorities:
        data["authorities"] = [str(auth) for auth in self.authorities]
    return data

unmarshal(data) classmethod

Deserialize CreateToken transaction from bytes.

Source code in accumulate\models\transactions.py
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
@classmethod
def unmarshal(cls, data: bytes) -> "CreateToken":
    """
    Deserialize CreateToken transaction from bytes.
    """
    reader = io.BytesIO(data)

    # Step 1: Parse Transaction Type (should be CREATE_TOKEN)
    transaction_type, _ = decode_uvarint(reader.read())
    if transaction_type != TransactionType.CREATE_TOKEN.value:
        raise ValueError(f"Unexpected transaction type: {transaction_type}")

    # Step 2: Parse Token URL
    token_url = URL.parse(unmarshal_bytes(reader).decode("utf-8"))

    # Step 3: Parse Symbol
    symbol = unmarshal_bytes(reader).decode("utf-8")

    # Step 4: Parse Precision
    precision, _ = decode_uvarint(reader.read())

    # Step 5: Parse Supply Limit (if available)
    supply_limit = None
    if reader.tell() < len(data):
        length, _ = decode_uvarint(reader.read())
        # Read 'length' bytes for the adjusted supply limit
        supply_bytes = reader.read(length)
        adjusted_supply_limit = int.from_bytes(supply_bytes, byteorder="big")
        # Convert back to the original supply limit by dividing by 10^precision
        supply_limit = adjusted_supply_limit // (10 ** precision)

    # Step 6: Parse Authorities (if available)
    authorities = []
    while reader.tell() < len(data):
        authorities.append(URL.parse(unmarshal_bytes(reader).decode("utf-8")))

    return cls(token_url, symbol, precision, supply_limit, authorities)

CreateTokenAccount

Bases: TransactionBodyBase

Represents a Create Token Account transaction.

Source code in accumulate\models\transactions.py
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
class CreateTokenAccount(TransactionBodyBase):
    """
    Represents a Create Token Account transaction.
    """

    def __init__(self, url: URL, token_url: URL, authorities: Optional[List[URL]] = None):
        """
        :param url: The URL of the token account.
        :param token_url: The URL of the token issuer.
        :param authorities: List of authorities for the token account (optional).
        """
        if not isinstance(url, URL):
            raise TypeError("url must be an instance of URL.")
        if not isinstance(token_url, URL):
            raise TypeError("token_url must be an instance of URL.")

        self.url = url
        self.token_url = token_url
        self.authorities = authorities if authorities else None  # Set to None if empty

    def type(self) -> TransactionType:
        """Return transaction type."""
        return TransactionType.CREATE_TOKEN_ACCOUNT

    def fields_to_encode(self):
        """
        Returns the fields to encode as a list of (field_id, value, encoding_function).
        """
        fields = [
            #  **encode Type Field (Field 1)**
            (1, encode_uvarint(self.type().value), lambda x: x),  # Type (0x01)

            #  **field order (URL first, then Token URL)**
            (2, string_marshal_binary(str(self.url)), lambda x: x),  # URL (0x02)
            (3, string_marshal_binary(str(self.token_url)), lambda x: x),  # Token URL (0x03)
        ]

        #  Only include authorities if provided
        if self.authorities:
            authorities_encoded = b"".join([
                string_marshal_binary(str(auth)) for auth in self.authorities
            ])
            fields.append((4, encode_uvarint(len(self.authorities)) + authorities_encoded, lambda x: x))  # Authorities (0x04)

        return fields

    @classmethod
    def unmarshal(cls, data: bytes) -> "CreateTokenAccount":
        """Deserialize CreateTokenAccount transaction from bytes."""
        reader = io.BytesIO(data)

        #  Step 1: Parse Type Field (Required)
        type_value = read_uvarint(reader)  # Read type value

        #  Step 2: Parse URL
        url_str = unmarshal_string(reader)
        url = URL.parse(url_str)

        #  Step 3: Parse Token URL
        token_url_str = unmarshal_string(reader)
        token_url = URL.parse(token_url_str)

        #  Step 4: Parse Authorities (if present)
        authorities = []
        if reader.peek(1)[:1] == b'\x04':  # Check if Authorities field exists
            reader.read(1)  # Consume field identifier
            authorities_count = read_uvarint(reader)  # Read count
            for _ in range(authorities_count):
                auth_str = unmarshal_string(reader)
                authorities.append(URL.parse(auth_str))

        return cls(url, token_url, authorities if authorities else None)  # Set to None if empty

    def to_dict(self) -> dict:
        """Convert CreateTokenAccount transaction to a dictionary."""
        tx_dict = {
            **super().to_dict(),
            "url": str(self.url),
            "tokenUrl": str(self.token_url),
        }

        #  Only include authorities in dict if it's not empty
        if self.authorities:
            tx_dict["authorities"] = [str(auth) for auth in self.authorities]

        return tx_dict

__init__(url, token_url, authorities=None)

:param url: The URL of the token account. :param token_url: The URL of the token issuer. :param authorities: List of authorities for the token account (optional).

Source code in accumulate\models\transactions.py
752
753
754
755
756
757
758
759
760
761
762
763
764
765
def __init__(self, url: URL, token_url: URL, authorities: Optional[List[URL]] = None):
    """
    :param url: The URL of the token account.
    :param token_url: The URL of the token issuer.
    :param authorities: List of authorities for the token account (optional).
    """
    if not isinstance(url, URL):
        raise TypeError("url must be an instance of URL.")
    if not isinstance(token_url, URL):
        raise TypeError("token_url must be an instance of URL.")

    self.url = url
    self.token_url = token_url
    self.authorities = authorities if authorities else None  # Set to None if empty

fields_to_encode()

Returns the fields to encode as a list of (field_id, value, encoding_function).

Source code in accumulate\models\transactions.py
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
def fields_to_encode(self):
    """
    Returns the fields to encode as a list of (field_id, value, encoding_function).
    """
    fields = [
        #  **encode Type Field (Field 1)**
        (1, encode_uvarint(self.type().value), lambda x: x),  # Type (0x01)

        #  **field order (URL first, then Token URL)**
        (2, string_marshal_binary(str(self.url)), lambda x: x),  # URL (0x02)
        (3, string_marshal_binary(str(self.token_url)), lambda x: x),  # Token URL (0x03)
    ]

    #  Only include authorities if provided
    if self.authorities:
        authorities_encoded = b"".join([
            string_marshal_binary(str(auth)) for auth in self.authorities
        ])
        fields.append((4, encode_uvarint(len(self.authorities)) + authorities_encoded, lambda x: x))  # Authorities (0x04)

    return fields

to_dict()

Convert CreateTokenAccount transaction to a dictionary.

Source code in accumulate\models\transactions.py
820
821
822
823
824
825
826
827
828
829
830
831
832
def to_dict(self) -> dict:
    """Convert CreateTokenAccount transaction to a dictionary."""
    tx_dict = {
        **super().to_dict(),
        "url": str(self.url),
        "tokenUrl": str(self.token_url),
    }

    #  Only include authorities in dict if it's not empty
    if self.authorities:
        tx_dict["authorities"] = [str(auth) for auth in self.authorities]

    return tx_dict

type()

Return transaction type.

Source code in accumulate\models\transactions.py
767
768
769
def type(self) -> TransactionType:
    """Return transaction type."""
    return TransactionType.CREATE_TOKEN_ACCOUNT

unmarshal(data) classmethod

Deserialize CreateTokenAccount transaction from bytes.

Source code in accumulate\models\transactions.py
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
@classmethod
def unmarshal(cls, data: bytes) -> "CreateTokenAccount":
    """Deserialize CreateTokenAccount transaction from bytes."""
    reader = io.BytesIO(data)

    #  Step 1: Parse Type Field (Required)
    type_value = read_uvarint(reader)  # Read type value

    #  Step 2: Parse URL
    url_str = unmarshal_string(reader)
    url = URL.parse(url_str)

    #  Step 3: Parse Token URL
    token_url_str = unmarshal_string(reader)
    token_url = URL.parse(token_url_str)

    #  Step 4: Parse Authorities (if present)
    authorities = []
    if reader.peek(1)[:1] == b'\x04':  # Check if Authorities field exists
        reader.read(1)  # Consume field identifier
        authorities_count = read_uvarint(reader)  # Read count
        for _ in range(authorities_count):
            auth_str = unmarshal_string(reader)
            authorities.append(URL.parse(auth_str))

    return cls(url, token_url, authorities if authorities else None)  # Set to None if empty

IssueTokens

Bases: TransactionBodyBase

Represents an Issue Tokens transaction. This version includes only a list of token recipients.

Source code in accumulate\models\transactions.py
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
class IssueTokens(TransactionBodyBase):
    """
    Represents an Issue Tokens transaction.
    This version includes only a list of token recipients.
    """

    def __init__(self, recipients: List["TokenRecipient"]):
        """
        :param recipients: A list of TokenRecipient instances.
        """
        if not isinstance(recipients, list) or not all(isinstance(recipient, TokenRecipient) for recipient in recipients):
            raise TypeError("recipients must be a list of TokenRecipient instances.")
        self.recipients = recipients

    def type(self) -> TransactionType:
        return TransactionType.ISSUE_TOKENS

    def fields_to_encode(self):
        """
        Fields for IssueTokens:
          Field 1: Transaction type (encoded as varint).
          Field 4: Recipients (as a length-prefixed list of recipient fields).
        """
        fields = [
            (1, encode_uvarint(self.type().value), lambda x: x),
            (4, self._marshal_recipients(), lambda x: x),
        ]
        return fields

    def _marshal_recipients(self) -> bytes:
        """
        Serialize recipients as a length-prefixed list.
        Each recipient is encoded as:
          - Field 1: URL (as a length-prefixed string)
          - Field 2: Amount (as a big-number, using big_number_marshal_binary)
        The recipient fields are concatenated (without an extra length wrapper per recipient)
        and the entire recipients block is prefixed with a varint length.
        """
        # Encode each recipient without extra wrapping:
        recipient_entries = []
        for recipient in self.recipients:
            # Encode field 1: recipient URL
            url_field = field_marshal_binary(1, string_marshal_binary(str(recipient.url)))
            # Encode field 2: recipient amount
            amount_field = field_marshal_binary(2, big_number_marshal_binary(recipient.amount))
            recipient_entries.append(url_field + amount_field)
        recipients_data = b"".join(recipient_entries)
        length_prefix = encode_uvarint(len(recipients_data))
        return length_prefix + recipients_data

    @classmethod
    def unmarshal(cls, data: bytes) -> "IssueTokens":
        """
        Deserialize IssueTokens transaction from bytes.
        """
        reader = io.BytesIO(data)
        # Field 1: Transaction type
        transaction_type, _ = decode_uvarint(reader.read())
        if transaction_type != TransactionType.ISSUE_TOKENS.value:
            raise ValueError("Unexpected transaction type")
        # Field 4: Recipients list
        recipients_length, _ = decode_uvarint(reader.read())
        recipients_data = reader.read(recipients_length)
        recipients = cls._unmarshal_recipients(recipients_data)
        return cls(recipients)

    @staticmethod
    def _unmarshal_recipients(data: bytes) -> List["TokenRecipient"]:
        """
        Deserialize the recipients list from a byte stream.
        Each recipient is encoded as:
          - Field 1: URL (length-prefixed string)
          - Field 2: Amount (big-number bytes)
        """
        recipients = []
        reader = io.BytesIO(data)
        while reader.tell() < len(data):
            # Expect field id 1 for URL.
            field_id = reader.read(1)
            if field_id != b'\x01':
                raise ValueError("Expected field id 1 for recipient URL")
            recipient_url = unmarshal_string(reader)
            # Expect field id 2 for amount.
            field_id = reader.read(1)
            if field_id != b'\x02':
                raise ValueError("Expected field id 2 for recipient amount")
            recipient_amount = int.from_bytes(unmarshal_bytes(reader), byteorder='big')
            recipients.append(TokenRecipient(URL.parse(recipient_url), recipient_amount))
        return recipients

    def to_dict(self) -> dict:
        """
        Convert the IssueTokens transaction to a JSON‑serializable dictionary.
        The recipients are output under the key "to".
        """
        return {
            "type": self._format_transaction_type(self.type().name),
            "to": [recipient.to_dict() for recipient in self.recipients],
        }

__init__(recipients)

:param recipients: A list of TokenRecipient instances.

Source code in accumulate\models\transactions.py
1512
1513
1514
1515
1516
1517
1518
def __init__(self, recipients: List["TokenRecipient"]):
    """
    :param recipients: A list of TokenRecipient instances.
    """
    if not isinstance(recipients, list) or not all(isinstance(recipient, TokenRecipient) for recipient in recipients):
        raise TypeError("recipients must be a list of TokenRecipient instances.")
    self.recipients = recipients

_marshal_recipients()

Serialize recipients as a length-prefixed list. Each recipient is encoded as: - Field 1: URL (as a length-prefixed string) - Field 2: Amount (as a big-number, using big_number_marshal_binary) The recipient fields are concatenated (without an extra length wrapper per recipient) and the entire recipients block is prefixed with a varint length.

Source code in accumulate\models\transactions.py
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
def _marshal_recipients(self) -> bytes:
    """
    Serialize recipients as a length-prefixed list.
    Each recipient is encoded as:
      - Field 1: URL (as a length-prefixed string)
      - Field 2: Amount (as a big-number, using big_number_marshal_binary)
    The recipient fields are concatenated (without an extra length wrapper per recipient)
    and the entire recipients block is prefixed with a varint length.
    """
    # Encode each recipient without extra wrapping:
    recipient_entries = []
    for recipient in self.recipients:
        # Encode field 1: recipient URL
        url_field = field_marshal_binary(1, string_marshal_binary(str(recipient.url)))
        # Encode field 2: recipient amount
        amount_field = field_marshal_binary(2, big_number_marshal_binary(recipient.amount))
        recipient_entries.append(url_field + amount_field)
    recipients_data = b"".join(recipient_entries)
    length_prefix = encode_uvarint(len(recipients_data))
    return length_prefix + recipients_data

_unmarshal_recipients(data) staticmethod

Deserialize the recipients list from a byte stream. Each recipient is encoded as: - Field 1: URL (length-prefixed string) - Field 2: Amount (big-number bytes)

Source code in accumulate\models\transactions.py
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
@staticmethod
def _unmarshal_recipients(data: bytes) -> List["TokenRecipient"]:
    """
    Deserialize the recipients list from a byte stream.
    Each recipient is encoded as:
      - Field 1: URL (length-prefixed string)
      - Field 2: Amount (big-number bytes)
    """
    recipients = []
    reader = io.BytesIO(data)
    while reader.tell() < len(data):
        # Expect field id 1 for URL.
        field_id = reader.read(1)
        if field_id != b'\x01':
            raise ValueError("Expected field id 1 for recipient URL")
        recipient_url = unmarshal_string(reader)
        # Expect field id 2 for amount.
        field_id = reader.read(1)
        if field_id != b'\x02':
            raise ValueError("Expected field id 2 for recipient amount")
        recipient_amount = int.from_bytes(unmarshal_bytes(reader), byteorder='big')
        recipients.append(TokenRecipient(URL.parse(recipient_url), recipient_amount))
    return recipients

fields_to_encode()

Fields for IssueTokens

Field 1: Transaction type (encoded as varint). Field 4: Recipients (as a length-prefixed list of recipient fields).

Source code in accumulate\models\transactions.py
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
def fields_to_encode(self):
    """
    Fields for IssueTokens:
      Field 1: Transaction type (encoded as varint).
      Field 4: Recipients (as a length-prefixed list of recipient fields).
    """
    fields = [
        (1, encode_uvarint(self.type().value), lambda x: x),
        (4, self._marshal_recipients(), lambda x: x),
    ]
    return fields

to_dict()

Convert the IssueTokens transaction to a JSON‑serializable dictionary. The recipients are output under the key "to".

Source code in accumulate\models\transactions.py
1596
1597
1598
1599
1600
1601
1602
1603
1604
def to_dict(self) -> dict:
    """
    Convert the IssueTokens transaction to a JSON‑serializable dictionary.
    The recipients are output under the key "to".
    """
    return {
        "type": self._format_transaction_type(self.type().name),
        "to": [recipient.to_dict() for recipient in self.recipients],
    }

unmarshal(data) classmethod

Deserialize IssueTokens transaction from bytes.

Source code in accumulate\models\transactions.py
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
@classmethod
def unmarshal(cls, data: bytes) -> "IssueTokens":
    """
    Deserialize IssueTokens transaction from bytes.
    """
    reader = io.BytesIO(data)
    # Field 1: Transaction type
    transaction_type, _ = decode_uvarint(reader.read())
    if transaction_type != TransactionType.ISSUE_TOKENS.value:
        raise ValueError("Unexpected transaction type")
    # Field 4: Recipients list
    recipients_length, _ = decode_uvarint(reader.read())
    recipients_data = reader.read(recipients_length)
    recipients = cls._unmarshal_recipients(recipients_data)
    return cls(recipients)

RemoteTransaction

Bases: TransactionBodyBase

Represents a Remote Transaction, which references another transaction by its hash.

Source code in accumulate\models\transactions.py
1762
1763
1764
1765
1766
1767
1768
1769
1770
1771
1772
1773
1774
1775
1776
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
1790
1791
1792
1793
1794
1795
1796
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
1810
1811
1812
1813
1814
1815
class RemoteTransaction(TransactionBodyBase):
    """
    Represents a Remote Transaction, which references another transaction by its hash.
    """

    def __init__(self, hash: bytes):
        """
        :param hash: The 32-byte transaction hash being referenced.
        """
        if not isinstance(hash, bytes) or len(hash) != 32:
            raise ValueError("hash must be a 32-byte value.")

        self.hash = hash  # Store the transaction hash

    def type(self) -> TransactionType:
        """Return the transaction type."""
        return TransactionType.REMOTE

    def fields_to_encode(self):
        """
        Fields to encode:
          Field 1: Transaction Type (remoteTransaction)
          Field 2: Transaction Hash (32-byte binary)
        """
        return [
            (1, self.type().value, bytes_marshal_binary),  # Transaction Type
            (2, self.hash, bytes_marshal_binary),  # Transaction Hash
        ]

    @classmethod
    def unmarshal(cls, data: bytes) -> "RemoteTransaction":
        """
        Deserialize RemoteTransaction from bytes.
        """
        reader = io.BytesIO(data)

        # Read the type field
        tx_type = reader.read(1)
        if int.from_bytes(tx_type, "big") != TransactionType.REMOTE.value:
            raise ValueError("Unexpected transaction type for RemoteTransaction")

        # Read the transaction hash
        hash_bytes = reader.read(32)
        if len(hash_bytes) != 32:
            raise ValueError("Invalid hash length (must be 32 bytes).")

        return cls(hash_bytes)

    def to_dict(self) -> dict:
        """Convert RemoteTransaction to a dictionary."""
        return {
            "type": "remoteTransaction",
            "hash": self.hash.hex(),  # Convert bytes to hex for JSON serialization
        }

__init__(hash)

:param hash: The 32-byte transaction hash being referenced.

Source code in accumulate\models\transactions.py
1767
1768
1769
1770
1771
1772
1773
1774
def __init__(self, hash: bytes):
    """
    :param hash: The 32-byte transaction hash being referenced.
    """
    if not isinstance(hash, bytes) or len(hash) != 32:
        raise ValueError("hash must be a 32-byte value.")

    self.hash = hash  # Store the transaction hash

fields_to_encode()

Fields to encode

Field 1: Transaction Type (remoteTransaction) Field 2: Transaction Hash (32-byte binary)

Source code in accumulate\models\transactions.py
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
def fields_to_encode(self):
    """
    Fields to encode:
      Field 1: Transaction Type (remoteTransaction)
      Field 2: Transaction Hash (32-byte binary)
    """
    return [
        (1, self.type().value, bytes_marshal_binary),  # Transaction Type
        (2, self.hash, bytes_marshal_binary),  # Transaction Hash
    ]

to_dict()

Convert RemoteTransaction to a dictionary.

Source code in accumulate\models\transactions.py
1810
1811
1812
1813
1814
1815
def to_dict(self) -> dict:
    """Convert RemoteTransaction to a dictionary."""
    return {
        "type": "remoteTransaction",
        "hash": self.hash.hex(),  # Convert bytes to hex for JSON serialization
    }

type()

Return the transaction type.

Source code in accumulate\models\transactions.py
1776
1777
1778
def type(self) -> TransactionType:
    """Return the transaction type."""
    return TransactionType.REMOTE

unmarshal(data) classmethod

Deserialize RemoteTransaction from bytes.

Source code in accumulate\models\transactions.py
1791
1792
1793
1794
1795
1796
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806
1807
1808
@classmethod
def unmarshal(cls, data: bytes) -> "RemoteTransaction":
    """
    Deserialize RemoteTransaction from bytes.
    """
    reader = io.BytesIO(data)

    # Read the type field
    tx_type = reader.read(1)
    if int.from_bytes(tx_type, "big") != TransactionType.REMOTE.value:
        raise ValueError("Unexpected transaction type for RemoteTransaction")

    # Read the transaction hash
    hash_bytes = reader.read(32)
    if len(hash_bytes) != 32:
        raise ValueError("Invalid hash length (must be 32 bytes).")

    return cls(hash_bytes)

SendTokens

Bases: TransactionBodyBase

Represents a SendTokens transaction, supporting multiple recipients.

Source code in accumulate\models\transactions.py
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
class SendTokens(TransactionBodyBase):
    """
    Represents a SendTokens transaction, supporting multiple recipients.
    """

    MICRO_UNITS_PER_ACME = 10**8  # 1 ACME = 100,000,000 micro-units

    def __init__(self, recipients: Optional[List[TokenRecipient]] = None):
        self.recipients = recipients or []

    def add_recipient(self, to: URL, amount: int) -> None:
        """Add a recipient to the transaction, converting ACME to micro-units."""
        if amount <= 0:
            raise ValueError("Amount must be greater than zero")

        #  Convert ACME to micro-units before storing
        micro_units = amount * self.MICRO_UNITS_PER_ACME  

        recipient = TokenRecipient(to, micro_units)
        self.recipients.append(recipient)

    def type(self) -> TransactionType:
        """Return the transaction type."""
        return TransactionType.SEND_TOKENS

    def fields_to_encode(self):
        return [
            (1, self.type().value, encode_uvarint),  # Type field
            (4, self._marshal_recipients(), lambda x: x),  # "to" field (will be wrapped by the generic marshal loop)
        ]

    def _marshal_recipients(self) -> bytes:
        """Encodes the list of TokenRecipients without an extra field wrapper.

        Returns a varint length prefix followed by the concatenated recipient fields.
        """
        recipients_encoded = b"".join([
            field_marshal_binary(1, string_marshal_binary(str(recipient.url))) +
            field_marshal_binary(2, big_number_marshal_binary(recipient.amount))  # Now stores micro-units
            for recipient in self.recipients
        ])
        length_prefix = encode_uvarint(len(recipients_encoded))
        return length_prefix + recipients_encoded

    def to_dict(self) -> dict:
        """Convert SendTokens transaction to a dictionary."""
        return {
            **super().to_dict(),
            "to": [recipient.to_dict() for recipient in self.recipients]
        }

    @classmethod
    def unmarshal(cls, data: bytes) -> "SendTokens":
        """Deserialize a SendTokens transaction from bytes."""
        reader = io.BytesIO(data)
        recipients = []

        # Field 1: Type (should be "sendTokens")
        field_id = reader.read(1)
        if field_id != b'\x01':
            raise ValueError("Expected field id 1 for type")
        type_value = unmarshal_string(reader)
        if type_value != "sendTokens":
            raise ValueError("Invalid type marker for SendTokens")

        # Field 4: Recipients
        while reader.read(1) == b'\x04':  # Check if the next field is 'to'
            # Field 1: URL
            field_id = reader.read(1)
            if field_id != b'\x01':
                raise ValueError("Expected field id 1 for recipient URL")
            recipient_url = unmarshal_string(reader)

            # Field 2: Amount (micro-units)
            field_id = reader.read(1)
            if field_id != b'\x02':
                raise ValueError("Expected field id 2 for recipient amount")
            recipient_amount = int.from_bytes(unmarshal_bytes(reader), byteorder='big')

            recipients.append(TokenRecipient(URL.parse(recipient_url), recipient_amount))

        return cls(recipients)

_marshal_recipients()

Encodes the list of TokenRecipients without an extra field wrapper.

Returns a varint length prefix followed by the concatenated recipient fields.

Source code in accumulate\models\transactions.py
594
595
596
597
598
599
600
601
602
603
604
605
def _marshal_recipients(self) -> bytes:
    """Encodes the list of TokenRecipients without an extra field wrapper.

    Returns a varint length prefix followed by the concatenated recipient fields.
    """
    recipients_encoded = b"".join([
        field_marshal_binary(1, string_marshal_binary(str(recipient.url))) +
        field_marshal_binary(2, big_number_marshal_binary(recipient.amount))  # Now stores micro-units
        for recipient in self.recipients
    ])
    length_prefix = encode_uvarint(len(recipients_encoded))
    return length_prefix + recipients_encoded

add_recipient(to, amount)

Add a recipient to the transaction, converting ACME to micro-units.

Source code in accumulate\models\transactions.py
573
574
575
576
577
578
579
580
581
582
def add_recipient(self, to: URL, amount: int) -> None:
    """Add a recipient to the transaction, converting ACME to micro-units."""
    if amount <= 0:
        raise ValueError("Amount must be greater than zero")

    #  Convert ACME to micro-units before storing
    micro_units = amount * self.MICRO_UNITS_PER_ACME  

    recipient = TokenRecipient(to, micro_units)
    self.recipients.append(recipient)

to_dict()

Convert SendTokens transaction to a dictionary.

Source code in accumulate\models\transactions.py
607
608
609
610
611
612
def to_dict(self) -> dict:
    """Convert SendTokens transaction to a dictionary."""
    return {
        **super().to_dict(),
        "to": [recipient.to_dict() for recipient in self.recipients]
    }

type()

Return the transaction type.

Source code in accumulate\models\transactions.py
584
585
586
def type(self) -> TransactionType:
    """Return the transaction type."""
    return TransactionType.SEND_TOKENS

unmarshal(data) classmethod

Deserialize a SendTokens transaction from bytes.

Source code in accumulate\models\transactions.py
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
@classmethod
def unmarshal(cls, data: bytes) -> "SendTokens":
    """Deserialize a SendTokens transaction from bytes."""
    reader = io.BytesIO(data)
    recipients = []

    # Field 1: Type (should be "sendTokens")
    field_id = reader.read(1)
    if field_id != b'\x01':
        raise ValueError("Expected field id 1 for type")
    type_value = unmarshal_string(reader)
    if type_value != "sendTokens":
        raise ValueError("Invalid type marker for SendTokens")

    # Field 4: Recipients
    while reader.read(1) == b'\x04':  # Check if the next field is 'to'
        # Field 1: URL
        field_id = reader.read(1)
        if field_id != b'\x01':
            raise ValueError("Expected field id 1 for recipient URL")
        recipient_url = unmarshal_string(reader)

        # Field 2: Amount (micro-units)
        field_id = reader.read(1)
        if field_id != b'\x02':
            raise ValueError("Expected field id 2 for recipient amount")
        recipient_amount = int.from_bytes(unmarshal_bytes(reader), byteorder='big')

        recipients.append(TokenRecipient(URL.parse(recipient_url), recipient_amount))

    return cls(recipients)

Transaction

Source code in accumulate\models\transactions.py
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
class Transaction:
    def __init__(self, header: "TransactionHeader", body: Optional["TransactionBodyBase"] = None):
        self.header = header
        self.body = body
        self.hash: Optional[bytes] = None
        self.signers: List["Signer"] = []


    def is_remote(self) -> bool:
        """Check if this transaction is a RemoteTransaction."""
        return isinstance(self.body, RemoteTransaction)

    @classmethod
    async def create(cls, client, signer: "Signer", transaction_type: TransactionType, *args, **kwargs) -> "Transaction":
        """
        Fully constructs a transaction, including the header and body.

        :param client: AccumulateClient instance
        :param signer: Signer instance
        :param transaction_type: The type of transaction to create
        :param args: Additional arguments passed to the transaction body
        :param kwargs: Additional keyword arguments for the transaction body
        :return: A fully constructed Transaction instance
        """
        # Extract public key from signer
        public_key = signer.get_public_key()
        recipient = kwargs.get("recipient")

        #  Create Transaction Header (handles initiator hash internally)
        tx_header = await TransactionHeader.create(recipient, public_key, signer)

        #  Create Transaction Body using the Factory
        tx_body = await TransactionBodyFactory.create(client, transaction_type, *args, **kwargs)

        return cls(header=tx_header, body=tx_body)


    def add_signer(self, url: "URL", version: int) -> None:
        """Add a signer dynamically."""
        signer = get_signer()(url, version)
        self.signers.append(signer)

    def get_signer(self, url: "URL") -> Optional["Signer"]:
        """Retrieve a signer dynamically."""
        return next((signer for signer in self.signers if signer.get_url() == url), None)

    def is_user(self) -> bool:
        """Check if the transaction is initiated by a user."""
        return self.body is not None and self.body.type().is_user()


    def get_hash(self) -> bytes:
        """Compute transaction hash ensuring Accumulate's hashing order."""

        #  If it's a Remote Transaction, return its referenced hash
        if isinstance(self.body, RemoteTransaction):
            logger.debug(" Using referenced hash for RemoteTransaction")
            return self.body.hash  # Remote transactions use the referenced hash directly

        #  Compute transaction hash if not already cached
        if not self.hash:
            logger.debug(" Computing transaction hash...")

            #  Step 1: Hash the header
            header_bytes = self.header.marshal_binary()
            header_hash = hashlib.sha256(header_bytes).digest()
            logger.info(f" Hashed Header: {header_hash.hex()}")  # Log Header Hash

            #  Step 2: Special handling for WriteData transactions
            if isinstance(self.body, WriteData):
                logger.debug(" Special WriteData hashing logic applied")

                #  Hash body WITHOUT the entry
                body_without_entry = self.body.marshal_without_entry()
                body_hash = hashlib.sha256(body_without_entry).digest()
                logger.info(f" Hashed Body (without entry): {body_hash.hex()}")  # Log Body Hash (without entry)

                #  Hash entry separately
                entry_hash = self.body.hash_tree()
                logger.info(f" Hashed Entry (Merkle + SHA-256): {entry_hash.hex()}")  # Log Entry Hash

                #  Step 3: Combine and hash again
                final_body_hash = hashlib.sha256(body_hash + entry_hash).digest()
                logger.info(f" Final Hashed Body: {final_body_hash.hex()}")  # Log Final Body Hash
            else:
                #  Standard transactions
                body_bytes = self.body.marshal() if self.body else b""
                final_body_hash = hashlib.sha256(body_bytes).digest()
                logger.info(f" Standard Hashed Body: {final_body_hash.hex()}")  # Log Body Hash

            #  Final hash: H(H(header) + H(body))
            self.hash = hashlib.sha256(header_hash + final_body_hash).digest()
            logger.info(f" FINAL Transaction Hash: {self.hash.hex()}")  # Log Final Transaction Hash

        return self.hash


    def to_dict(self) -> dict:
        """Convert a Transaction into a dictionary format suitable for submission."""
        if not self.header or not self.body:
            raise ValueError("Transaction must have both header and body set.")

        return {
            "header": self.header.to_dict(),
            "body": self.body.to_dict() if self.body else None,
        }

    def get_id(self) -> TxID:
        """Get the transaction ID based on its hash and principal URL."""
        url = URL.parse(self.header.principal) if self.header.principal else URL(authority="unknown", path="")
        return TxID(url=url, tx_hash=self.get_hash())

    def marshal(self) -> bytes:
        """
        Serialize the transaction to bytes.
        Format:
          [header length (varint)] + [header bytes]
          [body length (varint)] + [body bytes]
        """
        header_data = self.header.marshal_binary()
        header_length = encode_uvarint(len(header_data))

        body_data = self.body.marshal() if self.body else b""
        body_length = encode_uvarint(len(body_data))

        return header_length + header_data + body_length + body_data

    @staticmethod
    def unmarshal(data: bytes) -> "Transaction":
        """
        Deserialize a Transaction from bytes.
        Format:
          [header length (varint)] + [header bytes]
          [body length (varint)] + [body bytes]
        """
        reader = io.BytesIO(data)

        # Read header length
        header_length = read_uvarint(reader)
        header_data = reader.read(header_length)
        header = TransactionHeader.unmarshal(header_data)

        # Read body length
        body_length = read_uvarint(reader)
        body_data = reader.read(body_length) if body_length > 0 else b""
        body = TransactionBodyBase.unmarshal(body_data) if body_data else None

        return Transaction(header, body)

    def get_body_hash(self) -> bytes:
        """Compute the hash of the transaction body separately for debugging."""
        if not self.body:
            return hashlib.sha256(b"").digest()

        body_bytes = self.body.marshal()
        return hashlib.sha256(body_bytes).digest()

add_signer(url, version)

Add a signer dynamically.

Source code in accumulate\models\transactions.py
101
102
103
104
def add_signer(self, url: "URL", version: int) -> None:
    """Add a signer dynamically."""
    signer = get_signer()(url, version)
    self.signers.append(signer)

create(client, signer, transaction_type, *args, **kwargs) async classmethod

Fully constructs a transaction, including the header and body.

:param client: AccumulateClient instance :param signer: Signer instance :param transaction_type: The type of transaction to create :param args: Additional arguments passed to the transaction body :param kwargs: Additional keyword arguments for the transaction body :return: A fully constructed Transaction instance

Source code in accumulate\models\transactions.py
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
@classmethod
async def create(cls, client, signer: "Signer", transaction_type: TransactionType, *args, **kwargs) -> "Transaction":
    """
    Fully constructs a transaction, including the header and body.

    :param client: AccumulateClient instance
    :param signer: Signer instance
    :param transaction_type: The type of transaction to create
    :param args: Additional arguments passed to the transaction body
    :param kwargs: Additional keyword arguments for the transaction body
    :return: A fully constructed Transaction instance
    """
    # Extract public key from signer
    public_key = signer.get_public_key()
    recipient = kwargs.get("recipient")

    #  Create Transaction Header (handles initiator hash internally)
    tx_header = await TransactionHeader.create(recipient, public_key, signer)

    #  Create Transaction Body using the Factory
    tx_body = await TransactionBodyFactory.create(client, transaction_type, *args, **kwargs)

    return cls(header=tx_header, body=tx_body)

get_body_hash()

Compute the hash of the transaction body separately for debugging.

Source code in accumulate\models\transactions.py
213
214
215
216
217
218
219
def get_body_hash(self) -> bytes:
    """Compute the hash of the transaction body separately for debugging."""
    if not self.body:
        return hashlib.sha256(b"").digest()

    body_bytes = self.body.marshal()
    return hashlib.sha256(body_bytes).digest()

get_hash()

Compute transaction hash ensuring Accumulate's hashing order.

Source code in accumulate\models\transactions.py
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
def get_hash(self) -> bytes:
    """Compute transaction hash ensuring Accumulate's hashing order."""

    #  If it's a Remote Transaction, return its referenced hash
    if isinstance(self.body, RemoteTransaction):
        logger.debug(" Using referenced hash for RemoteTransaction")
        return self.body.hash  # Remote transactions use the referenced hash directly

    #  Compute transaction hash if not already cached
    if not self.hash:
        logger.debug(" Computing transaction hash...")

        #  Step 1: Hash the header
        header_bytes = self.header.marshal_binary()
        header_hash = hashlib.sha256(header_bytes).digest()
        logger.info(f" Hashed Header: {header_hash.hex()}")  # Log Header Hash

        #  Step 2: Special handling for WriteData transactions
        if isinstance(self.body, WriteData):
            logger.debug(" Special WriteData hashing logic applied")

            #  Hash body WITHOUT the entry
            body_without_entry = self.body.marshal_without_entry()
            body_hash = hashlib.sha256(body_without_entry).digest()
            logger.info(f" Hashed Body (without entry): {body_hash.hex()}")  # Log Body Hash (without entry)

            #  Hash entry separately
            entry_hash = self.body.hash_tree()
            logger.info(f" Hashed Entry (Merkle + SHA-256): {entry_hash.hex()}")  # Log Entry Hash

            #  Step 3: Combine and hash again
            final_body_hash = hashlib.sha256(body_hash + entry_hash).digest()
            logger.info(f" Final Hashed Body: {final_body_hash.hex()}")  # Log Final Body Hash
        else:
            #  Standard transactions
            body_bytes = self.body.marshal() if self.body else b""
            final_body_hash = hashlib.sha256(body_bytes).digest()
            logger.info(f" Standard Hashed Body: {final_body_hash.hex()}")  # Log Body Hash

        #  Final hash: H(H(header) + H(body))
        self.hash = hashlib.sha256(header_hash + final_body_hash).digest()
        logger.info(f" FINAL Transaction Hash: {self.hash.hex()}")  # Log Final Transaction Hash

    return self.hash

get_id()

Get the transaction ID based on its hash and principal URL.

Source code in accumulate\models\transactions.py
171
172
173
174
def get_id(self) -> TxID:
    """Get the transaction ID based on its hash and principal URL."""
    url = URL.parse(self.header.principal) if self.header.principal else URL(authority="unknown", path="")
    return TxID(url=url, tx_hash=self.get_hash())

get_signer(url)

Retrieve a signer dynamically.

Source code in accumulate\models\transactions.py
106
107
108
def get_signer(self, url: "URL") -> Optional["Signer"]:
    """Retrieve a signer dynamically."""
    return next((signer for signer in self.signers if signer.get_url() == url), None)

is_remote()

Check if this transaction is a RemoteTransaction.

Source code in accumulate\models\transactions.py
72
73
74
def is_remote(self) -> bool:
    """Check if this transaction is a RemoteTransaction."""
    return isinstance(self.body, RemoteTransaction)

is_user()

Check if the transaction is initiated by a user.

Source code in accumulate\models\transactions.py
110
111
112
def is_user(self) -> bool:
    """Check if the transaction is initiated by a user."""
    return self.body is not None and self.body.type().is_user()

marshal()

Serialize the transaction to bytes. Format: [header length (varint)] + [header bytes] [body length (varint)] + [body bytes]

Source code in accumulate\models\transactions.py
176
177
178
179
180
181
182
183
184
185
186
187
188
189
def marshal(self) -> bytes:
    """
    Serialize the transaction to bytes.
    Format:
      [header length (varint)] + [header bytes]
      [body length (varint)] + [body bytes]
    """
    header_data = self.header.marshal_binary()
    header_length = encode_uvarint(len(header_data))

    body_data = self.body.marshal() if self.body else b""
    body_length = encode_uvarint(len(body_data))

    return header_length + header_data + body_length + body_data

to_dict()

Convert a Transaction into a dictionary format suitable for submission.

Source code in accumulate\models\transactions.py
161
162
163
164
165
166
167
168
169
def to_dict(self) -> dict:
    """Convert a Transaction into a dictionary format suitable for submission."""
    if not self.header or not self.body:
        raise ValueError("Transaction must have both header and body set.")

    return {
        "header": self.header.to_dict(),
        "body": self.body.to_dict() if self.body else None,
    }

unmarshal(data) staticmethod

Deserialize a Transaction from bytes. Format: [header length (varint)] + [header bytes] [body length (varint)] + [body bytes]

Source code in accumulate\models\transactions.py
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
@staticmethod
def unmarshal(data: bytes) -> "Transaction":
    """
    Deserialize a Transaction from bytes.
    Format:
      [header length (varint)] + [header bytes]
      [body length (varint)] + [body bytes]
    """
    reader = io.BytesIO(data)

    # Read header length
    header_length = read_uvarint(reader)
    header_data = reader.read(header_length)
    header = TransactionHeader.unmarshal(header_data)

    # Read body length
    body_length = read_uvarint(reader)
    body_data = reader.read(body_length) if body_length > 0 else b""
    body = TransactionBodyBase.unmarshal(body_data) if body_data else None

    return Transaction(header, body)

TransactionStatus

Source code in accumulate\models\transactions.py
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
class TransactionStatus:
    def __init__(
        self,
        tx_id: Optional[str] = None,
        code: int = 0,
        error: Optional["AccumulateError"] = None,
        result: Optional["TransactionResult"] = None,
        received: Optional[int] = None,
        initiator: Optional["URL"] = None,
    ):
        self.tx_id = tx_id
        self.code = code
        self.error = error
        self.result = result
        self.received = received
        self.initiator = initiator
        self.signers: List["Signer"] = []

    def type(self) -> TransactionType:
        return TransactionType.TRANSACTION_STATUS

    def to_dict(self) -> dict:
        """Serialize the TransactionStatus to a dictionary."""
        return {
            "tx_id": self.tx_id,
            "code": self.code,
            "error": str(self.error) if self.error else None,
            "result": self.result.to_dict() if self.result else None,
            "received": self.received,
            "initiator": str(self.initiator) if self.initiator else None,
            "signers": [signer.to_dict() for signer in self.signers] if self.signers else [],
        }

    def marshal(self) -> bytes:
        """Serialize TransactionStatus to bytes using Accumulate encoding."""
        print("DEBUG: Marshaling TransactionStatus")

        # Serialize tx_id (string with length prefix)
        tx_id_data = string_marshal_binary(self.tx_id) if self.tx_id else b""

        # Serialize code (varint)
        code_data = encode_uvarint(self.code)

        # Serialize error (string with length prefix)
        error_data = string_marshal_binary(str(self.error)) if self.error else b""

        # Serialize result (JSON-like structure)
        result_data = bytes_marshal_binary(json.dumps(self.result.to_dict()).encode()) if self.result else b""

        # Serialize received timestamp (varint)
        received_data = encode_uvarint(self.received) if self.received else b""

        # Serialize initiator (URL as string)
        initiator_data = string_marshal_binary(str(self.initiator)) if self.initiator else b""

        # Serialize signers (list of signers)
        signers_data = b"".join([signer.marshal() for signer in self.signers])
        signers_length = encode_uvarint(len(self.signers))  # Prefix with number of signers

        # Combine all components
        serialized = (
            tx_id_data + code_data + error_data + result_data +
            received_data + initiator_data + signers_length + signers_data
        )

        print(f"DEBUG: Marshaled TransactionStatus: {serialized.hex()}")
        return serialized

    @staticmethod
    def unmarshal(data: bytes) -> "TransactionStatus":
        """Deserialize TransactionStatus from bytes."""
        print("DEBUG: Unmarshaling TransactionStatus")
        reader = io.BytesIO(data)

        # Read tx_id
        tx_id = unmarshal_string(reader.read())

        # Read code (varint)
        code, _ = decode_uvarint(reader.read())

        # Read error
        error_str = unmarshal_string(reader.read())
        error = AccumulateError(error_str) if error_str else None

        # Read result
        result_data = unmarshal_bytes(reader.read())
        result = TransactionResult(json.loads(result_data.decode())) if result_data else None

        # Read received timestamp
        received, _ = decode_uvarint(reader.read())

        # Read initiator
        initiator_str = unmarshal_string(reader.read())
        initiator = URL.parse(initiator_str) if initiator_str else None

        # Read signers
        signers_count, _ = decode_uvarint(reader.read())
        signers = []
        for _ in range(signers_count):
            signer = "Signer".unmarshal(reader.read())
            signers.append(signer)

        print(f"DEBUG: Parsed TransactionStatus: tx_id={tx_id}, code={code}, error={error}, "
              f"result={result}, received={received}, initiator={initiator}, signers={signers}")

        return TransactionStatus(
            tx_id=tx_id, code=code, error=error, result=result,
            received=received, initiator=initiator, signers=signers
        )

    def delivered(self) -> bool:
        return self.code == ErrorCode.OK.value

    def remote(self) -> bool:
        return self.code == ErrorCode.FAILED.value

    def pending(self) -> bool:
        return self.code == ErrorCode.DID_PANIC.value

    def failed(self) -> bool:
        return self.code != ErrorCode.OK.value

    def set(self, error: Optional[AccumulateError]) -> None:
        """Set the error and update the status code based on the provided error."""
        self.error = error
        if error and error.code:
            self.code = error.code.value
        else:
            self.code = ErrorCode.UNKNOWN_ERROR.value

    def as_error(self) -> Optional[Exception]:
        return self.error if self.error else None

    def add_signer(self, url: "URL", version: int) -> None:
        """Add a signer dynamically."""
        signer = get_signer()(url, version)
        existing = next((s for s in self.signers if s.get_url() == signer.get_url()), None)
        if not existing or signer.get_version() > existing.get_version():
            self.signers.append(signer)

    def get_signer(self, url: "URL") -> Optional["Signer"]:
        """Retrieve a signer dynamically"""
        for signer in self.signers:
            if signer.get_url() == url:
                return signer
        return None

add_signer(url, version)

Add a signer dynamically.

Source code in accumulate\models\transactions.py
354
355
356
357
358
359
def add_signer(self, url: "URL", version: int) -> None:
    """Add a signer dynamically."""
    signer = get_signer()(url, version)
    existing = next((s for s in self.signers if s.get_url() == signer.get_url()), None)
    if not existing or signer.get_version() > existing.get_version():
        self.signers.append(signer)

get_signer(url)

Retrieve a signer dynamically

Source code in accumulate\models\transactions.py
361
362
363
364
365
366
def get_signer(self, url: "URL") -> Optional["Signer"]:
    """Retrieve a signer dynamically"""
    for signer in self.signers:
        if signer.get_url() == url:
            return signer
    return None

marshal()

Serialize TransactionStatus to bytes using Accumulate encoding.

Source code in accumulate\models\transactions.py
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
def marshal(self) -> bytes:
    """Serialize TransactionStatus to bytes using Accumulate encoding."""
    print("DEBUG: Marshaling TransactionStatus")

    # Serialize tx_id (string with length prefix)
    tx_id_data = string_marshal_binary(self.tx_id) if self.tx_id else b""

    # Serialize code (varint)
    code_data = encode_uvarint(self.code)

    # Serialize error (string with length prefix)
    error_data = string_marshal_binary(str(self.error)) if self.error else b""

    # Serialize result (JSON-like structure)
    result_data = bytes_marshal_binary(json.dumps(self.result.to_dict()).encode()) if self.result else b""

    # Serialize received timestamp (varint)
    received_data = encode_uvarint(self.received) if self.received else b""

    # Serialize initiator (URL as string)
    initiator_data = string_marshal_binary(str(self.initiator)) if self.initiator else b""

    # Serialize signers (list of signers)
    signers_data = b"".join([signer.marshal() for signer in self.signers])
    signers_length = encode_uvarint(len(self.signers))  # Prefix with number of signers

    # Combine all components
    serialized = (
        tx_id_data + code_data + error_data + result_data +
        received_data + initiator_data + signers_length + signers_data
    )

    print(f"DEBUG: Marshaled TransactionStatus: {serialized.hex()}")
    return serialized

set(error)

Set the error and update the status code based on the provided error.

Source code in accumulate\models\transactions.py
343
344
345
346
347
348
349
def set(self, error: Optional[AccumulateError]) -> None:
    """Set the error and update the status code based on the provided error."""
    self.error = error
    if error and error.code:
        self.code = error.code.value
    else:
        self.code = ErrorCode.UNKNOWN_ERROR.value

to_dict()

Serialize the TransactionStatus to a dictionary.

Source code in accumulate\models\transactions.py
242
243
244
245
246
247
248
249
250
251
252
def to_dict(self) -> dict:
    """Serialize the TransactionStatus to a dictionary."""
    return {
        "tx_id": self.tx_id,
        "code": self.code,
        "error": str(self.error) if self.error else None,
        "result": self.result.to_dict() if self.result else None,
        "received": self.received,
        "initiator": str(self.initiator) if self.initiator else None,
        "signers": [signer.to_dict() for signer in self.signers] if self.signers else [],
    }

unmarshal(data) staticmethod

Deserialize TransactionStatus from bytes.

Source code in accumulate\models\transactions.py
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
@staticmethod
def unmarshal(data: bytes) -> "TransactionStatus":
    """Deserialize TransactionStatus from bytes."""
    print("DEBUG: Unmarshaling TransactionStatus")
    reader = io.BytesIO(data)

    # Read tx_id
    tx_id = unmarshal_string(reader.read())

    # Read code (varint)
    code, _ = decode_uvarint(reader.read())

    # Read error
    error_str = unmarshal_string(reader.read())
    error = AccumulateError(error_str) if error_str else None

    # Read result
    result_data = unmarshal_bytes(reader.read())
    result = TransactionResult(json.loads(result_data.decode())) if result_data else None

    # Read received timestamp
    received, _ = decode_uvarint(reader.read())

    # Read initiator
    initiator_str = unmarshal_string(reader.read())
    initiator = URL.parse(initiator_str) if initiator_str else None

    # Read signers
    signers_count, _ = decode_uvarint(reader.read())
    signers = []
    for _ in range(signers_count):
        signer = "Signer".unmarshal(reader.read())
        signers.append(signer)

    print(f"DEBUG: Parsed TransactionStatus: tx_id={tx_id}, code={code}, error={error}, "
          f"result={result}, received={received}, initiator={initiator}, signers={signers}")

    return TransactionStatus(
        tx_id=tx_id, code=code, error=error, result=result,
        received=received, initiator=initiator, signers=signers
    )

TransferCredits

Bases: TransactionBodyBase

Source code in accumulate\models\transactions.py
1714
1715
1716
1717
1718
1719
1720
1721
1722
1723
1724
1725
1726
1727
1728
1729
1730
1731
1732
1733
1734
1735
1736
1737
1738
1739
1740
1741
1742
1743
1744
1745
1746
1747
1748
1749
1750
1751
1752
1753
1754
1755
1756
1757
1758
1759
1760
class TransferCredits(TransactionBodyBase):
    def __init__(self, to: List[CreditRecipient]):
        """
        Represents a Transfer Credits transaction.

        :param to: A list of CreditRecipient objects.
        """
        if not isinstance(to, list) or not all(isinstance(recipient, CreditRecipient) for recipient in to):
            raise TypeError("to must be a list of CreditRecipient instances.")
        self.to = to

    def type(self) -> TransactionType:
        return TransactionType.TRANSFER_CREDITS

    def marshal(self) -> bytes:
        """Serialize TransferCredits transaction to bytes."""
        print("DEBUG: Marshaling TransferCredits")

        # Serialize number of recipients
        recipients_count = encode_uvarint(len(self.to))

        # Serialize each recipient
        recipients_data = b"".join([bytes_marshal_binary(recipient.marshal()) for recipient in self.to])

        # Combine all marshaled components
        serialized = recipients_count + recipients_data
        print(f"DEBUG: Marshaled TransferCredits: {serialized.hex()}")
        return serialized

    @staticmethod
    def unmarshal(data: bytes) -> "TransferCredits":
        """Deserialize TransferCredits transaction from bytes."""
        print("DEBUG: Unmarshaling TransferCredits")

        reader = io.BytesIO(data)

        # Read number of recipients
        recipients_count, _ = decode_uvarint(reader.read())

        recipients = []
        for _ in range(recipients_count):
            recipient_length, _ = decode_uvarint(reader.read())  # Read recipient length
            recipient_data = reader.read(recipient_length)  # Read recipient data
            recipients.append(CreditRecipient.unmarshal(recipient_data))

        print(f"DEBUG: Parsed TransferCredits: recipients={recipients}")
        return TransferCredits(recipients)

__init__(to)

Represents a Transfer Credits transaction.

:param to: A list of CreditRecipient objects.

Source code in accumulate\models\transactions.py
1715
1716
1717
1718
1719
1720
1721
1722
1723
def __init__(self, to: List[CreditRecipient]):
    """
    Represents a Transfer Credits transaction.

    :param to: A list of CreditRecipient objects.
    """
    if not isinstance(to, list) or not all(isinstance(recipient, CreditRecipient) for recipient in to):
        raise TypeError("to must be a list of CreditRecipient instances.")
    self.to = to

marshal()

Serialize TransferCredits transaction to bytes.

Source code in accumulate\models\transactions.py
1728
1729
1730
1731
1732
1733
1734
1735
1736
1737
1738
1739
1740
1741
def marshal(self) -> bytes:
    """Serialize TransferCredits transaction to bytes."""
    print("DEBUG: Marshaling TransferCredits")

    # Serialize number of recipients
    recipients_count = encode_uvarint(len(self.to))

    # Serialize each recipient
    recipients_data = b"".join([bytes_marshal_binary(recipient.marshal()) for recipient in self.to])

    # Combine all marshaled components
    serialized = recipients_count + recipients_data
    print(f"DEBUG: Marshaled TransferCredits: {serialized.hex()}")
    return serialized

unmarshal(data) staticmethod

Deserialize TransferCredits transaction from bytes.

Source code in accumulate\models\transactions.py
1743
1744
1745
1746
1747
1748
1749
1750
1751
1752
1753
1754
1755
1756
1757
1758
1759
1760
@staticmethod
def unmarshal(data: bytes) -> "TransferCredits":
    """Deserialize TransferCredits transaction from bytes."""
    print("DEBUG: Unmarshaling TransferCredits")

    reader = io.BytesIO(data)

    # Read number of recipients
    recipients_count, _ = decode_uvarint(reader.read())

    recipients = []
    for _ in range(recipients_count):
        recipient_length, _ = decode_uvarint(reader.read())  # Read recipient length
        recipient_data = reader.read(recipient_length)  # Read recipient data
        recipients.append(CreditRecipient.unmarshal(recipient_data))

    print(f"DEBUG: Parsed TransferCredits: recipients={recipients}")
    return TransferCredits(recipients)

UpdateAccountAuth dataclass

Bases: TransactionBodyBase

Represents an Update Account Auth transaction.

Source code in accumulate\models\transactions.py
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
@dataclass
class UpdateAccountAuth(TransactionBodyBase):
    """
    Represents an Update Account Auth transaction.
    """
    account_url: URL
    operations: List[Dict[str, str]]  # Each dict must have keys "type" and "authority"

    def type(self) -> TransactionType:
        return TransactionType.UPDATE_ACCOUNT_AUTH

    def fields_to_encode(self):
        """
        Field 1: Transaction type as a varint.
        Field 2: Operations as a length-prefixed list.
        """
        fields = [
            # Field 1: Transaction type (updateAccountAuth) encoded as a varint
            (1, encode_uvarint(self.type().value), lambda x: x),
            # Field 2: Operations
            (2, self._marshal_operations(), lambda x: x) if self.operations else None,
        ]
        return [field for field in fields if field is not None]

    def _marshal_operations(self) -> bytes:
        """Serialize operations as a length-prefixed binary format."""
        if not self.operations:
            return b""
        operations_data = b"".join([self._marshal_operation(op) for op in self.operations])
        operations_length = encode_uvarint(len(operations_data))
        return operations_length + operations_data

    @staticmethod
    def _marshal_operation(operation: Dict[str, str]) -> bytes:
        """
        Serialize a single operation into bytes.
        Expected structure for addAuthority:
          - Nested field 1 (tag 0x01): Operation type (varint).
          - Nested field 2 (tag 0x02): Authority (length-prefixed string).
        """
        if "type" not in operation or "authority" not in operation:
            raise ValueError(f"Invalid operation entry: missing 'type' or 'authority' field in {operation}")

        # Normalize and lookup the enum value.
        normalized_type = normalize_operation_type(operation["type"])
        try:
            operation_type_enum = AccountAuthOperationType[normalized_type]
        except KeyError as e:
            raise ValueError(f"Operation type '{operation.get('type')}' is not valid: {e}")

        # Nested field 1: Operation type (tag 0x01)
        op_type_field = b'\x01' + encode_uvarint(operation_type_enum.value)
        # Nested field 2: Authority (tag 0x02, using string_marshal_binary for proper length prefix)
        auth_field = b'\x02' + string_marshal_binary(operation["authority"])
        return op_type_field + auth_field

    def to_dict(self) -> dict:
        """Convert transaction body to a JSON‑serializable dictionary."""
        return {
            "type": self._format_transaction_type(self.type().name),
            "operations": [
                {
                    "type": operation["type"],
                    "authority": operation["authority"]
                }
                for operation in self.operations
            ],
        }

    @classmethod
    def unmarshal(cls, data: bytes) -> "UpdateAccountAuth":
        """Deserialize UpdateAccountAuth transaction from bytes."""
        logger.debug(f" Unmarshaling UpdateAccountAuth: {data.hex()}")
        reader = io.BytesIO(data)

        # Field 1: Transaction type (we ignore the value here)
        _ = decode_uvarint(unmarshal_bytes(reader))
        # Field 2: Operations
        operations_length, _ = decode_uvarint(reader.read())
        operations_data = reader.read(operations_length)
        operations = cls._unmarshal_operations(operations_data)
        logger.debug(f" Parsed UpdateAccountAuth: Operations={operations}")
        # The account_url is not encoded in the body, it is typically set in the header
        return cls(account_url=None, operations=operations)  # account_url may be set elsewhere

    @staticmethod
    def _unmarshal_operations(data: bytes) -> List[Dict[str, str]]:
        """Deserialize operations from a byte stream."""
        operations = []
        reader = io.BytesIO(data)
        while reader.tell() < len(data):
            # Nested field 1: Operation type
            op_type = decode_uvarint(reader.read())[0]
            op_type_str = AccountAuthOperationType(op_type).name.lower()
            # Nested field 2: Authority
            authority = unmarshal_bytes(reader).decode("utf-8")
            operations.append({"type": op_type_str, "authority": authority})
        return operations

_marshal_operation(operation) staticmethod

Serialize a single operation into bytes. Expected structure for addAuthority: - Nested field 1 (tag 0x01): Operation type (varint). - Nested field 2 (tag 0x02): Authority (length-prefixed string).

Source code in accumulate\models\transactions.py
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
@staticmethod
def _marshal_operation(operation: Dict[str, str]) -> bytes:
    """
    Serialize a single operation into bytes.
    Expected structure for addAuthority:
      - Nested field 1 (tag 0x01): Operation type (varint).
      - Nested field 2 (tag 0x02): Authority (length-prefixed string).
    """
    if "type" not in operation or "authority" not in operation:
        raise ValueError(f"Invalid operation entry: missing 'type' or 'authority' field in {operation}")

    # Normalize and lookup the enum value.
    normalized_type = normalize_operation_type(operation["type"])
    try:
        operation_type_enum = AccountAuthOperationType[normalized_type]
    except KeyError as e:
        raise ValueError(f"Operation type '{operation.get('type')}' is not valid: {e}")

    # Nested field 1: Operation type (tag 0x01)
    op_type_field = b'\x01' + encode_uvarint(operation_type_enum.value)
    # Nested field 2: Authority (tag 0x02, using string_marshal_binary for proper length prefix)
    auth_field = b'\x02' + string_marshal_binary(operation["authority"])
    return op_type_field + auth_field

_marshal_operations()

Serialize operations as a length-prefixed binary format.

Source code in accumulate\models\transactions.py
1318
1319
1320
1321
1322
1323
1324
def _marshal_operations(self) -> bytes:
    """Serialize operations as a length-prefixed binary format."""
    if not self.operations:
        return b""
    operations_data = b"".join([self._marshal_operation(op) for op in self.operations])
    operations_length = encode_uvarint(len(operations_data))
    return operations_length + operations_data

_unmarshal_operations(data) staticmethod

Deserialize operations from a byte stream.

Source code in accumulate\models\transactions.py
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
@staticmethod
def _unmarshal_operations(data: bytes) -> List[Dict[str, str]]:
    """Deserialize operations from a byte stream."""
    operations = []
    reader = io.BytesIO(data)
    while reader.tell() < len(data):
        # Nested field 1: Operation type
        op_type = decode_uvarint(reader.read())[0]
        op_type_str = AccountAuthOperationType(op_type).name.lower()
        # Nested field 2: Authority
        authority = unmarshal_bytes(reader).decode("utf-8")
        operations.append({"type": op_type_str, "authority": authority})
    return operations

fields_to_encode()

Field 1: Transaction type as a varint. Field 2: Operations as a length-prefixed list.

Source code in accumulate\models\transactions.py
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
def fields_to_encode(self):
    """
    Field 1: Transaction type as a varint.
    Field 2: Operations as a length-prefixed list.
    """
    fields = [
        # Field 1: Transaction type (updateAccountAuth) encoded as a varint
        (1, encode_uvarint(self.type().value), lambda x: x),
        # Field 2: Operations
        (2, self._marshal_operations(), lambda x: x) if self.operations else None,
    ]
    return [field for field in fields if field is not None]

to_dict()

Convert transaction body to a JSON‑serializable dictionary.

Source code in accumulate\models\transactions.py
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
def to_dict(self) -> dict:
    """Convert transaction body to a JSON‑serializable dictionary."""
    return {
        "type": self._format_transaction_type(self.type().name),
        "operations": [
            {
                "type": operation["type"],
                "authority": operation["authority"]
            }
            for operation in self.operations
        ],
    }

unmarshal(data) classmethod

Deserialize UpdateAccountAuth transaction from bytes.

Source code in accumulate\models\transactions.py
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
@classmethod
def unmarshal(cls, data: bytes) -> "UpdateAccountAuth":
    """Deserialize UpdateAccountAuth transaction from bytes."""
    logger.debug(f" Unmarshaling UpdateAccountAuth: {data.hex()}")
    reader = io.BytesIO(data)

    # Field 1: Transaction type (we ignore the value here)
    _ = decode_uvarint(unmarshal_bytes(reader))
    # Field 2: Operations
    operations_length, _ = decode_uvarint(reader.read())
    operations_data = reader.read(operations_length)
    operations = cls._unmarshal_operations(operations_data)
    logger.debug(f" Parsed UpdateAccountAuth: Operations={operations}")
    # The account_url is not encoded in the body, it is typically set in the header
    return cls(account_url=None, operations=operations)  # account_url may be set elsewhere

UpdateKeyPage dataclass

Bases: TransactionBodyBase

Represents an Update Key Page transaction.

Source code in accumulate\models\transactions.py
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
@dataclass
class UpdateKeyPage(TransactionBodyBase):
    """
    Represents an Update Key Page transaction.
    """
    url: URL
    operations: List[Dict[str, Dict[str, bytes]]]  #  encodes `entry` inside `operation`

    def type(self) -> TransactionType:
        return TransactionType.UPDATE_KEY_PAGE

    def fields_to_encode(self):
        """
        Define the fields to encode for the transaction.
        """
        fields = [
            # Field 1: Transaction type (updateKeyPage) – encoded as a varint
            (1, encode_uvarint(self.type().value), lambda x: x),
            # Field 2: Operations – marshaled as a length-prefixed list
            (2, self._marshal_operations(), lambda x: x),
        ]
        return fields

    def _marshal_operations(self) -> bytes:
        ops = [self._marshal_operation(op) for op in self.operations]
        operations_data = b"".join(ops)
        operations_length = encode_uvarint(len(operations_data))
        return operations_length + operations_data

    @staticmethod
    def _marshal_operation(operation: Dict[str, Any]) -> bytes:
        """
        Serialize an operation dictionary into bytes.
        Handles standard operations (with keyHash or delegate), threshold operations 
        (setThreshold, setRejectThreshold, setResponseThreshold) and update operations.
        """
        op_type_lower = operation["type"].lower()

        if op_type_lower in ["setthreshold", "setrejectthreshold", "setresponsethreshold"]:
            # For threshold operations, return only the inner payload
            numeric_value = operation.get("threshold")
            if numeric_value is None:
                raise ValueError("Missing threshold value for threshold operation.")
            # payload: Tag 0x01, fixed length 0x04, then Tag 0x02 followed by encode_uvarint(numeric_value)
            # For example, for numeric_value = 2 (and if encode_uvarint(2) returns b'\x02')
            # this produces: b'\x01' + b'\x04' + b'\x02' + b'\x02' → hex: 01 04 02 02
            return b'\x01' + b'\x04' + b'\x02' + encode_uvarint(numeric_value)

        elif op_type_lower == "update":
            # Handle update operations normally.
            op_type = b'\x01' + encode_uvarint(KeyPageOperationType["UPDATE"].value)
            old_entry = operation.get("oldEntry")
            new_entry = operation.get("newEntry")
            if not old_entry or not new_entry or "keyHash" not in old_entry or "keyHash" not in new_entry:
                raise ValueError("Invalid update operation: must contain both 'oldEntry' and 'newEntry' with a 'keyHash'.")
            old_data = b'\x01' + encode_uvarint(32) + old_entry["keyHash"]
            new_data = b'\x01' + encode_uvarint(32) + new_entry["keyHash"]
            old_field = b'\x02' + encode_uvarint(len(old_data)) + old_data
            new_field = b'\x03' + encode_uvarint(len(new_data)) + new_data
            return op_type + old_field + new_field

        else:
            # For standard operations.
            op_type = b'\x01' + encode_uvarint(KeyPageOperationType[operation["type"].upper()].value)
            entry = operation.get("entry", {})
            if "keyHash" in entry:
                key_data = b'\x01' + encode_uvarint(32) + entry["keyHash"]
            elif "delegate" in entry:
                delegate_data = string_marshal_binary(entry["delegate"])
                key_data = b'\x02' + delegate_data
            else:
                raise ValueError("Invalid operation entry: must contain either 'keyHash' or 'delegate'.")
            entry_field = b'\x02' + encode_uvarint(len(key_data)) + key_data
            return op_type + entry_field





    def to_dict(self) -> dict:
        """Convert transaction body to a JSON‑serializable dictionary."""
        op_list = []
        for operation in self.operations:
            op_type = operation["type"].lower()
            if op_type == "update":
                op_list.append({
                    "type": "update",
                    "oldEntry": {"keyHash": operation["oldEntry"]["keyHash"].hex()},
                    "newEntry": {"keyHash": operation["newEntry"]["keyHash"].hex()}
                })
            elif op_type in ["setthreshold", "setrejectthreshold", "setresponsethreshold"]:
                # For threshold operations, check top-level threshold.
                numeric_value = operation.get("threshold")
                if numeric_value is None:
                    numeric_value = operation.get("entry", {}).get("threshold")
                if numeric_value is None:
                    raise ValueError("Missing threshold value in operation.")
                if op_type == "setthreshold":
                    op_name = "setThreshold"
                elif op_type == "setrejectthreshold":
                    op_name = "setRejectThreshold"
                elif op_type == "setresponsethreshold":
                    op_name = "setResponseThreshold"
                op_list.append({
                    "type": op_name,
                    "threshold": numeric_value
                })
            else:
                op_list.append({
                    "type": operation["type"],
                    "entry": (
                        {"keyHash": operation["entry"]["keyHash"].hex()}
                        if "keyHash" in operation["entry"]
                        else {"delegate": operation["entry"]["delegate"]}
                    ),
                })
        return {
            "type": self._format_transaction_type(self.type().name),
            "operation": op_list,
        }




    @classmethod
    def unmarshal(cls, data: bytes) -> "UpdateKeyPage":
        """Deserialize UpdateKeyPage transaction from bytes."""
        logger.debug(f" Unmarshaling UpdateKeyPage: {data.hex()}")

        reader = io.BytesIO(data)

        # Step 1: Read URL
        url = URL.parse(unmarshal_bytes(reader).decode("utf-8"))

        # Step 2: Read Operations
        operations_length, _ = read_uvarint(reader)  # Read length prefix
        operations_data = reader.read(operations_length)  # Read operations
        operations = cls._unmarshal_operations(operations_data)

        logger.debug(f" Parsed UpdateKeyPage: URL={url}, Operations={operations}")
        return cls(url, operations)


    @staticmethod
    def _unmarshal_operations(data: bytes) -> List[Dict[str, Dict[str, bytes]]]:
        """Deserialize operations from a byte stream."""
        operations = []
        reader = io.BytesIO(data)

        while reader.tell() < len(data):
            # Extract operation type (as an int)
            operation_type, _ = read_uvarint(reader)

            # Extract entry
            entry = {}
            # Peek at the next byte to determine the entry type
            entry_type_byte, _ = read_uvarint(reader)
            if entry_type_byte == 1:  # KeyHash
                key_hash = reader.read(32)
                if len(key_hash) != 32:
                    raise ValueError("Invalid keyHash length (must be 32 bytes).")
                entry["keyHash"] = key_hash
            elif entry_type_byte == 2:  # Delegate
                delegate_url = unmarshal_bytes(reader).decode("utf-8")
                entry["delegate"] = delegate_url
            elif entry_type_byte == 3:  # Numeric value (e.g., threshold)
                # Decode numeric value (assuming uvarint)
                numeric_value, _ = read_uvarint(reader)
                # You will need to decide which numeric key to use based on the operation type
                # For simplicity, we'll set "threshold". In a complete implementation, check operation type
                entry["threshold"] = numeric_value
            else:
                raise ValueError("Unknown entry type in UpdateKeyPage.")

            # Wrap the operation in a dictionary; here we assume non-update operations
            # For update operations, you’d handle them separately
            operations.append({
                "type": KeyPageOperationType(operation_type).name.lower(),
                "entry": entry
            })

        return operations

_marshal_operation(operation) staticmethod

Serialize an operation dictionary into bytes. Handles standard operations (with keyHash or delegate), threshold operations (setThreshold, setRejectThreshold, setResponseThreshold) and update operations.

Source code in accumulate\models\transactions.py
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
@staticmethod
def _marshal_operation(operation: Dict[str, Any]) -> bytes:
    """
    Serialize an operation dictionary into bytes.
    Handles standard operations (with keyHash or delegate), threshold operations 
    (setThreshold, setRejectThreshold, setResponseThreshold) and update operations.
    """
    op_type_lower = operation["type"].lower()

    if op_type_lower in ["setthreshold", "setrejectthreshold", "setresponsethreshold"]:
        # For threshold operations, return only the inner payload
        numeric_value = operation.get("threshold")
        if numeric_value is None:
            raise ValueError("Missing threshold value for threshold operation.")
        # payload: Tag 0x01, fixed length 0x04, then Tag 0x02 followed by encode_uvarint(numeric_value)
        # For example, for numeric_value = 2 (and if encode_uvarint(2) returns b'\x02')
        # this produces: b'\x01' + b'\x04' + b'\x02' + b'\x02' → hex: 01 04 02 02
        return b'\x01' + b'\x04' + b'\x02' + encode_uvarint(numeric_value)

    elif op_type_lower == "update":
        # Handle update operations normally.
        op_type = b'\x01' + encode_uvarint(KeyPageOperationType["UPDATE"].value)
        old_entry = operation.get("oldEntry")
        new_entry = operation.get("newEntry")
        if not old_entry or not new_entry or "keyHash" not in old_entry or "keyHash" not in new_entry:
            raise ValueError("Invalid update operation: must contain both 'oldEntry' and 'newEntry' with a 'keyHash'.")
        old_data = b'\x01' + encode_uvarint(32) + old_entry["keyHash"]
        new_data = b'\x01' + encode_uvarint(32) + new_entry["keyHash"]
        old_field = b'\x02' + encode_uvarint(len(old_data)) + old_data
        new_field = b'\x03' + encode_uvarint(len(new_data)) + new_data
        return op_type + old_field + new_field

    else:
        # For standard operations.
        op_type = b'\x01' + encode_uvarint(KeyPageOperationType[operation["type"].upper()].value)
        entry = operation.get("entry", {})
        if "keyHash" in entry:
            key_data = b'\x01' + encode_uvarint(32) + entry["keyHash"]
        elif "delegate" in entry:
            delegate_data = string_marshal_binary(entry["delegate"])
            key_data = b'\x02' + delegate_data
        else:
            raise ValueError("Invalid operation entry: must contain either 'keyHash' or 'delegate'.")
        entry_field = b'\x02' + encode_uvarint(len(key_data)) + key_data
        return op_type + entry_field

_unmarshal_operations(data) staticmethod

Deserialize operations from a byte stream.

Source code in accumulate\models\transactions.py
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
@staticmethod
def _unmarshal_operations(data: bytes) -> List[Dict[str, Dict[str, bytes]]]:
    """Deserialize operations from a byte stream."""
    operations = []
    reader = io.BytesIO(data)

    while reader.tell() < len(data):
        # Extract operation type (as an int)
        operation_type, _ = read_uvarint(reader)

        # Extract entry
        entry = {}
        # Peek at the next byte to determine the entry type
        entry_type_byte, _ = read_uvarint(reader)
        if entry_type_byte == 1:  # KeyHash
            key_hash = reader.read(32)
            if len(key_hash) != 32:
                raise ValueError("Invalid keyHash length (must be 32 bytes).")
            entry["keyHash"] = key_hash
        elif entry_type_byte == 2:  # Delegate
            delegate_url = unmarshal_bytes(reader).decode("utf-8")
            entry["delegate"] = delegate_url
        elif entry_type_byte == 3:  # Numeric value (e.g., threshold)
            # Decode numeric value (assuming uvarint)
            numeric_value, _ = read_uvarint(reader)
            # You will need to decide which numeric key to use based on the operation type
            # For simplicity, we'll set "threshold". In a complete implementation, check operation type
            entry["threshold"] = numeric_value
        else:
            raise ValueError("Unknown entry type in UpdateKeyPage.")

        # Wrap the operation in a dictionary; here we assume non-update operations
        # For update operations, you’d handle them separately
        operations.append({
            "type": KeyPageOperationType(operation_type).name.lower(),
            "entry": entry
        })

    return operations

fields_to_encode()

Define the fields to encode for the transaction.

Source code in accumulate\models\transactions.py
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
def fields_to_encode(self):
    """
    Define the fields to encode for the transaction.
    """
    fields = [
        # Field 1: Transaction type (updateKeyPage) – encoded as a varint
        (1, encode_uvarint(self.type().value), lambda x: x),
        # Field 2: Operations – marshaled as a length-prefixed list
        (2, self._marshal_operations(), lambda x: x),
    ]
    return fields

to_dict()

Convert transaction body to a JSON‑serializable dictionary.

Source code in accumulate\models\transactions.py
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
def to_dict(self) -> dict:
    """Convert transaction body to a JSON‑serializable dictionary."""
    op_list = []
    for operation in self.operations:
        op_type = operation["type"].lower()
        if op_type == "update":
            op_list.append({
                "type": "update",
                "oldEntry": {"keyHash": operation["oldEntry"]["keyHash"].hex()},
                "newEntry": {"keyHash": operation["newEntry"]["keyHash"].hex()}
            })
        elif op_type in ["setthreshold", "setrejectthreshold", "setresponsethreshold"]:
            # For threshold operations, check top-level threshold.
            numeric_value = operation.get("threshold")
            if numeric_value is None:
                numeric_value = operation.get("entry", {}).get("threshold")
            if numeric_value is None:
                raise ValueError("Missing threshold value in operation.")
            if op_type == "setthreshold":
                op_name = "setThreshold"
            elif op_type == "setrejectthreshold":
                op_name = "setRejectThreshold"
            elif op_type == "setresponsethreshold":
                op_name = "setResponseThreshold"
            op_list.append({
                "type": op_name,
                "threshold": numeric_value
            })
        else:
            op_list.append({
                "type": operation["type"],
                "entry": (
                    {"keyHash": operation["entry"]["keyHash"].hex()}
                    if "keyHash" in operation["entry"]
                    else {"delegate": operation["entry"]["delegate"]}
                ),
            })
    return {
        "type": self._format_transaction_type(self.type().name),
        "operation": op_list,
    }

unmarshal(data) classmethod

Deserialize UpdateKeyPage transaction from bytes.

Source code in accumulate\models\transactions.py
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
@classmethod
def unmarshal(cls, data: bytes) -> "UpdateKeyPage":
    """Deserialize UpdateKeyPage transaction from bytes."""
    logger.debug(f" Unmarshaling UpdateKeyPage: {data.hex()}")

    reader = io.BytesIO(data)

    # Step 1: Read URL
    url = URL.parse(unmarshal_bytes(reader).decode("utf-8"))

    # Step 2: Read Operations
    operations_length, _ = read_uvarint(reader)  # Read length prefix
    operations_data = reader.read(operations_length)  # Read operations
    operations = cls._unmarshal_operations(operations_data)

    logger.debug(f" Parsed UpdateKeyPage: URL={url}, Operations={operations}")
    return cls(url, operations)

WriteData

Bases: TransactionBodyBase

Represents a Write Data transaction.

Source code in accumulate\models\transactions.py
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
class WriteData(TransactionBodyBase):
    """
    Represents a Write Data transaction.
    """

    def __init__(self, entry: DataEntry, scratch: Optional[bool] = None, write_to_state: Optional[bool] = None):
        """
        :param entry: The data entry (must be `AccumulateDataEntry` or `DoubleHashDataEntry`).
        :param scratch: Flag indicating whether it's a scratch write.
        :param write_to_state: Flag indicating whether it writes to state.
        """
        if not isinstance(entry, (AccumulateDataEntry, DoubleHashDataEntry)):  #  Support multiple types
            raise TypeError("entry must be an instance of AccumulateDataEntry or DoubleHashDataEntry.")

        self.entry = entry
        self.scratch = scratch if scratch is not None else False
        self.write_to_state = write_to_state if write_to_state is not None else False

    def type(self) -> TransactionType:
        """Return transaction type."""
        return TransactionType.WRITE_DATA


    def fields_to_encode(self):
        #  Step 1: Marshal the entry as a length-prefixed structure
        entry_marshal = self.entry.marshal()

        #   Prefix the entry with total length (no extra `+ 1`)
        entry_length = encode_uvarint(len(entry_marshal))  #  length prefix
        entry_encoded = entry_length + entry_marshal  #  No extra nested field

        fields = [
            (1, encode_uvarint(self.type().value), lambda x: x),  #  Transaction Type
            (2, entry_encoded, lambda x: x),  #  Marshal entire entry properly
        ]

        #  Only include scratch if True
        if self.scratch:
            fields.append((3, boolean_marshal_binary(self.scratch), lambda x: x))

        #  Only include writeToState if False
        if not self.write_to_state:
            fields.append((4, boolean_marshal_binary(self.write_to_state), lambda x: x))

        return fields


    def marshal_without_entry(self) -> bytes:
        """
        Marshal WriteData without the `entry` field.
        Needed to match Go SDK hashing logic.
        """
        logger.debug(" Marshaling WriteData WITHOUT Entry Field")

        serialized = b""
        fields = [
            (1, encode_uvarint(self.type().value), lambda x: x),  #  Type field
        ]

        if self.scratch:
            fields.append((3, boolean_marshal_binary(self.scratch), lambda x: x))

        if not self.write_to_state:
            fields.append((4, boolean_marshal_binary(self.write_to_state), lambda x: x))

        #  Debugging: Log each field separately
        for field_num, value, encode_func in fields:
            encoded_value = encode_func(value)
            logger.debug(f" Encoding Field {field_num}: {encoded_value.hex() if isinstance(encoded_value, bytes) else encoded_value}")
            serialized += field_marshal_binary(field_num, encoded_value)

        logger.debug(f" FINAL Marshaled WriteData WITHOUT Entry (HEX): {serialized.hex()}")

        return serialized


    @classmethod
    def unmarshal(cls, data: bytes) -> "WriteData":
        """Deserialize WriteData transaction from bytes."""
        logger.debug(f" Unmarshaling WriteData")

        reader = io.BytesIO(data)

        #  Step 1: Read Type Field
        type_value = read_uvarint(reader)

        #  Step 2: Read and Unmarshal Data Entry
        entry_data = unmarshal_bytes(reader)
        entry = DataEntry.unmarshal(entry_data)  #  Use DataEntry unmarshal to detect type

        #  Step 3: Read Boolean Flags
        scratch_flag = bool(reader.read(1)[0])  # Read single byte for scratch flag
        state_flag = bool(reader.read(1)[0])  # Read single byte for write_to_state flag

        logger.debug(f" Parsed WriteData: type={type_value}, scratch={scratch_flag}, write_to_state={state_flag}, entry={entry}")
        return cls(entry, scratch_flag, state_flag)

    def to_dict(self) -> dict:
        """
         Convert WriteData transaction to a dictionary, ensuring that default values (scratch=False, writeToState=True) are omitted.
        """
        data = {
            **super().to_dict(),
            "entry": self.entry.to_dict(),
        }

        #  Only include `scratch` if True
        if self.scratch:
            data["scratch"] = self.scratch

        #  Only include `writeToState` if False
        if not self.write_to_state:
            data["writeToState"] = self.write_to_state

        return data


    def hash_tree(self) -> bytes:
        """
        Compute the Merkle tree hash of the data entry.
        Go SDK uses `sha256(sha256(MerkleRoot(entry_data)))`
        """
        logger.debug(" Computing Merkle Tree Hash for Entry Data")

        #  Compute initial SHA-256 hashes of each chunk
        data_hashes = [hashlib.sha256(chunk).digest() for chunk in self.entry.get_data()]

        #  Compute the Merkle root
        if len(data_hashes) == 1:
            merkle_root = data_hashes[0]
        else:
            while len(data_hashes) > 1:
                temp_hashes = []
                for i in range(0, len(data_hashes), 2):
                    if i + 1 < len(data_hashes):
                        combined = data_hashes[i] + data_hashes[i + 1]
                    else:
                        combined = data_hashes[i]  # Handle odd number of elements
                    temp_hashes.append(hashlib.sha256(combined).digest())
                data_hashes = temp_hashes
            merkle_root = data_hashes[0]

        #  Double-hash the Merkle root for `DoubleHashDataEntry`
        final_hash = hashlib.sha256(merkle_root).digest()
        logger.debug(f" Merkle Root SHA-256 Hash: {merkle_root.hex()}")
        logger.debug(f" FINAL Double Hash (SHA-256): {final_hash.hex()}")

        return final_hash

__init__(entry, scratch=None, write_to_state=None)

:param entry: The data entry (must be AccumulateDataEntry or DoubleHashDataEntry). :param scratch: Flag indicating whether it's a scratch write. :param write_to_state: Flag indicating whether it writes to state.

Source code in accumulate\models\transactions.py
840
841
842
843
844
845
846
847
848
849
850
851
def __init__(self, entry: DataEntry, scratch: Optional[bool] = None, write_to_state: Optional[bool] = None):
    """
    :param entry: The data entry (must be `AccumulateDataEntry` or `DoubleHashDataEntry`).
    :param scratch: Flag indicating whether it's a scratch write.
    :param write_to_state: Flag indicating whether it writes to state.
    """
    if not isinstance(entry, (AccumulateDataEntry, DoubleHashDataEntry)):  #  Support multiple types
        raise TypeError("entry must be an instance of AccumulateDataEntry or DoubleHashDataEntry.")

    self.entry = entry
    self.scratch = scratch if scratch is not None else False
    self.write_to_state = write_to_state if write_to_state is not None else False

hash_tree()

Compute the Merkle tree hash of the data entry. Go SDK uses sha256(sha256(MerkleRoot(entry_data)))

Source code in accumulate\models\transactions.py
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
def hash_tree(self) -> bytes:
    """
    Compute the Merkle tree hash of the data entry.
    Go SDK uses `sha256(sha256(MerkleRoot(entry_data)))`
    """
    logger.debug(" Computing Merkle Tree Hash for Entry Data")

    #  Compute initial SHA-256 hashes of each chunk
    data_hashes = [hashlib.sha256(chunk).digest() for chunk in self.entry.get_data()]

    #  Compute the Merkle root
    if len(data_hashes) == 1:
        merkle_root = data_hashes[0]
    else:
        while len(data_hashes) > 1:
            temp_hashes = []
            for i in range(0, len(data_hashes), 2):
                if i + 1 < len(data_hashes):
                    combined = data_hashes[i] + data_hashes[i + 1]
                else:
                    combined = data_hashes[i]  # Handle odd number of elements
                temp_hashes.append(hashlib.sha256(combined).digest())
            data_hashes = temp_hashes
        merkle_root = data_hashes[0]

    #  Double-hash the Merkle root for `DoubleHashDataEntry`
    final_hash = hashlib.sha256(merkle_root).digest()
    logger.debug(f" Merkle Root SHA-256 Hash: {merkle_root.hex()}")
    logger.debug(f" FINAL Double Hash (SHA-256): {final_hash.hex()}")

    return final_hash

marshal_without_entry()

Marshal WriteData without the entry field. Needed to match Go SDK hashing logic.

Source code in accumulate\models\transactions.py
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
def marshal_without_entry(self) -> bytes:
    """
    Marshal WriteData without the `entry` field.
    Needed to match Go SDK hashing logic.
    """
    logger.debug(" Marshaling WriteData WITHOUT Entry Field")

    serialized = b""
    fields = [
        (1, encode_uvarint(self.type().value), lambda x: x),  #  Type field
    ]

    if self.scratch:
        fields.append((3, boolean_marshal_binary(self.scratch), lambda x: x))

    if not self.write_to_state:
        fields.append((4, boolean_marshal_binary(self.write_to_state), lambda x: x))

    #  Debugging: Log each field separately
    for field_num, value, encode_func in fields:
        encoded_value = encode_func(value)
        logger.debug(f" Encoding Field {field_num}: {encoded_value.hex() if isinstance(encoded_value, bytes) else encoded_value}")
        serialized += field_marshal_binary(field_num, encoded_value)

    logger.debug(f" FINAL Marshaled WriteData WITHOUT Entry (HEX): {serialized.hex()}")

    return serialized

to_dict()

Convert WriteData transaction to a dictionary, ensuring that default values (scratch=False, writeToState=True) are omitted.

Source code in accumulate\models\transactions.py
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
def to_dict(self) -> dict:
    """
     Convert WriteData transaction to a dictionary, ensuring that default values (scratch=False, writeToState=True) are omitted.
    """
    data = {
        **super().to_dict(),
        "entry": self.entry.to_dict(),
    }

    #  Only include `scratch` if True
    if self.scratch:
        data["scratch"] = self.scratch

    #  Only include `writeToState` if False
    if not self.write_to_state:
        data["writeToState"] = self.write_to_state

    return data

type()

Return transaction type.

Source code in accumulate\models\transactions.py
853
854
855
def type(self) -> TransactionType:
    """Return transaction type."""
    return TransactionType.WRITE_DATA

unmarshal(data) classmethod

Deserialize WriteData transaction from bytes.

Source code in accumulate\models\transactions.py
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
@classmethod
def unmarshal(cls, data: bytes) -> "WriteData":
    """Deserialize WriteData transaction from bytes."""
    logger.debug(f" Unmarshaling WriteData")

    reader = io.BytesIO(data)

    #  Step 1: Read Type Field
    type_value = read_uvarint(reader)

    #  Step 2: Read and Unmarshal Data Entry
    entry_data = unmarshal_bytes(reader)
    entry = DataEntry.unmarshal(entry_data)  #  Use DataEntry unmarshal to detect type

    #  Step 3: Read Boolean Flags
    scratch_flag = bool(reader.read(1)[0])  # Read single byte for scratch flag
    state_flag = bool(reader.read(1)[0])  # Read single byte for write_to_state flag

    logger.debug(f" Parsed WriteData: type={type_value}, scratch={scratch_flag}, write_to_state={state_flag}, entry={entry}")
    return cls(entry, scratch_flag, state_flag)

Queries (Models)

queries

AccumulateError

Bases: Exception

Base class for all custom exceptions in the Accumulate client.

Source code in accumulate\models\queries.py
14
15
16
class AccumulateError(Exception):
    """Base class for all custom exceptions in the Accumulate client."""
    pass

AnchorSearchQuery

Bases: Query

Represents an anchor search query.

Source code in accumulate\models\queries.py
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
class AnchorSearchQuery(Query):
    """Represents an anchor search query."""

    def __init__(self, anchor: bytes, include_receipt: Optional[ReceiptOptions] = None):
        super().__init__(QueryType.ANCHOR_SEARCH)
        self.anchor = anchor
        self.include_receipt = include_receipt

    def is_valid(self):
        """Validate the anchor search query."""
        if not self.anchor:
            raise AccumulateError("Anchor is required for an anchor search query.") #
        if self.include_receipt and not (
            self.include_receipt.for_any or self.include_receipt.for_height is not None
        ):
            raise AccumulateError("Invalid ReceiptOptions: Must specify `for_any` or `for_height`.") #

    def to_dict(self) -> dict:
        data = super().to_dict()
        data.update({
            "anchor": self.anchor.hex() if self.anchor else None,
            "include_receipt": self.include_receipt.to_dict() if self.include_receipt else None,
        })
        return data

is_valid()

Validate the anchor search query.

Source code in accumulate\models\queries.py
259
260
261
262
263
264
265
266
def is_valid(self):
    """Validate the anchor search query."""
    if not self.anchor:
        raise AccumulateError("Anchor is required for an anchor search query.") #
    if self.include_receipt and not (
        self.include_receipt.for_any or self.include_receipt.for_height is not None
    ):
        raise AccumulateError("Invalid ReceiptOptions: Must specify `for_any` or `for_height`.") #

BlockQuery

Bases: Query

Represents a block query.

Source code in accumulate\models\queries.py
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
class BlockQuery(Query):
    """Represents a block query."""

    def __init__(
        self,
        minor: Optional[int] = None,
        major: Optional[int] = None,
        minor_range: Optional[RangeOptions] = None,
        major_range: Optional[RangeOptions] = None,
        entry_range: Optional[RangeOptions] = None,
        omit_empty: Optional[bool] = None,
    ):
        super().__init__(QueryType.BLOCK)
        self.minor = minor
        self.major = major
        self.minor_range = minor_range
        self.major_range = major_range
        self.entry_range = entry_range
        self.omit_empty = omit_empty

    def is_valid(self):
        """Validate the block query. Ensure at least one required field is set."""
        if not (self.minor or self.major or self.minor_range or self.major_range):
            raise AccumulateError(
                "BlockQuery must specify at least one of: minor, major, minor_range, or major_range."
            )
        if self.minor and self.minor_range:
            raise AccumulateError("Cannot specify both minor and minor_range.")
        if self.major and self.major_range:
            raise AccumulateError("Cannot specify both major and major_range.")
        if self.entry_range and (self.minor_range or self.major_range):
            raise AccumulateError("EntryRange cannot be used with minor/major ranges.")
        if self.entry_range and not (self.entry_range.start or self.entry_range.count):
            raise AccumulateError("EntryRange must specify `start` or `count`.")

    def to_dict(self) -> dict:
        """Convert BlockQuery to the API-compatible format."""
        data = super().to_dict()
        query_params = {}

        if self.minor is not None:
            query_params["minor"] = self.minor
        if self.major is not None:
            query_params["major"] = self.major
        if self.minor_range:
            query_params["minor_range"] = self.minor_range.to_dict()
        if self.major_range:
            query_params["major_range"] = self.major_range.to_dict()
        if self.entry_range:
            query_params["entry_range"] = self.entry_range.to_dict()
        if self.omit_empty is not None:
            query_params["omit_empty"] = self.omit_empty  # True/False

        data.update(query_params)
        return data

is_valid()

Validate the block query. Ensure at least one required field is set.

Source code in accumulate\models\queries.py
214
215
216
217
218
219
220
221
222
223
224
225
226
227
def is_valid(self):
    """Validate the block query. Ensure at least one required field is set."""
    if not (self.minor or self.major or self.minor_range or self.major_range):
        raise AccumulateError(
            "BlockQuery must specify at least one of: minor, major, minor_range, or major_range."
        )
    if self.minor and self.minor_range:
        raise AccumulateError("Cannot specify both minor and minor_range.")
    if self.major and self.major_range:
        raise AccumulateError("Cannot specify both major and major_range.")
    if self.entry_range and (self.minor_range or self.major_range):
        raise AccumulateError("EntryRange cannot be used with minor/major ranges.")
    if self.entry_range and not (self.entry_range.start or self.entry_range.count):
        raise AccumulateError("EntryRange must specify `start` or `count`.")

to_dict()

Convert BlockQuery to the API-compatible format.

Source code in accumulate\models\queries.py
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
def to_dict(self) -> dict:
    """Convert BlockQuery to the API-compatible format."""
    data = super().to_dict()
    query_params = {}

    if self.minor is not None:
        query_params["minor"] = self.minor
    if self.major is not None:
        query_params["major"] = self.major
    if self.minor_range:
        query_params["minor_range"] = self.minor_range.to_dict()
    if self.major_range:
        query_params["major_range"] = self.major_range.to_dict()
    if self.entry_range:
        query_params["entry_range"] = self.entry_range.to_dict()
    if self.omit_empty is not None:
        query_params["omit_empty"] = self.omit_empty  # True/False

    data.update(query_params)
    return data

ChainQuery

Bases: Query

Represents a chain query.

Source code in accumulate\models\queries.py
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
class ChainQuery(Query):
    """Represents a chain query."""

    def __init__(
        self,
        name: Optional[str] = None,
        index: Optional[int] = None,
        entry: Optional[bytes] = None,
        range: Optional[RangeOptions] = None,
        include_receipt: Optional[ReceiptOptions] = None,
    ):
        super().__init__(QueryType.CHAIN)
        self.name = name
        self.index = index
        self.entry = entry
        self.range = range
        self.include_receipt = include_receipt

    def is_valid(self):
        """Validate the chain query."""
        if self.range and (self.index or self.entry):
            raise AccumulateError("Range is mutually exclusive with index and entry.")
        if not self.name and (self.index or self.entry or self.range):
            raise AccumulateError("Name is required when querying by index, entry, or range.")
        if self.include_receipt and not self.include_receipt.is_valid():
            raise AccumulateError("Invalid ReceiptOptions.")


    def to_dict(self) -> dict:
        """Ensure `name` is always included in the query."""
        data = super().to_dict()
        data.update({
            "name": self.name if self.name else "main",
            "index": self.index,
            "entry": self.entry,
            "range": self.range.to_dict() if self.range else None,
            "include_receipt": self.include_receipt.to_dict() if self.include_receipt else None,
        })
        return data

is_valid()

Validate the chain query.

Source code in accumulate\models\queries.py
80
81
82
83
84
85
86
87
def is_valid(self):
    """Validate the chain query."""
    if self.range and (self.index or self.entry):
        raise AccumulateError("Range is mutually exclusive with index and entry.")
    if not self.name and (self.index or self.entry or self.range):
        raise AccumulateError("Name is required when querying by index, entry, or range.")
    if self.include_receipt and not self.include_receipt.is_valid():
        raise AccumulateError("Invalid ReceiptOptions.")

to_dict()

Ensure name is always included in the query.

Source code in accumulate\models\queries.py
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
def to_dict(self) -> dict:
    """Ensure `name` is always included in the query."""
    data = super().to_dict()
    data.update({
        "name": self.name if self.name else "main",
        "index": self.index,
        "entry": self.entry,
        "range": self.range.to_dict() if self.range else None,
        "include_receipt": self.include_receipt.to_dict() if self.include_receipt else None,
    })
    return data

DataQuery

Bases: Query

Represents a data query.

Source code in accumulate\models\queries.py
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
class DataQuery(Query):
    """Represents a data query."""

    def __init__(
        self,
        index: Optional[int] = None,
        entry: Optional[bytes] = None,
        range: Optional[RangeOptions] = None,
    ):
        super().__init__(QueryType.DATA)
        self.index = index
        self.entry = entry
        self.range = range

    def is_valid(self):
        """Validate the data query."""
        if self.range and (self.index or self.entry):
            raise AccumulateError("Range is mutually exclusive with index and entry.")

    def to_dict(self) -> dict:
        data = super().to_dict()
        data.update({
            "index": self.index,
            "entry": self.entry,
            "range": self.range.to_dict() if self.range else None,
        })
        return data

is_valid()

Validate the data query.

Source code in accumulate\models\queries.py
119
120
121
122
def is_valid(self):
    """Validate the data query."""
    if self.range and (self.index or self.entry):
        raise AccumulateError("Range is mutually exclusive with index and entry.")

DefaultQuery

Bases: Query

Represents the default query type.

Source code in accumulate\models\queries.py
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
class DefaultQuery(Query):
    """Represents the default query type."""

    def __init__(self, include_receipt: Optional[ReceiptOptions] = None):
        super().__init__(QueryType.DEFAULT)
        self.include_receipt = include_receipt

    def is_valid(self):
        """Validate the default query."""
        if self.include_receipt and not (
            self.include_receipt.for_any or self.include_receipt.for_height is not None
        ):
            raise AccumulateError("Invalid ReceiptOptions: Must specify `for_any` or `for_height`.")


    def to_dict(self) -> dict:
        data = super().to_dict()
        data.update({
            "include_receipt": self.include_receipt.to_dict() if self.include_receipt else None,
        })
        return data

is_valid()

Validate the default query.

Source code in accumulate\models\queries.py
46
47
48
49
50
51
def is_valid(self):
    """Validate the default query."""
    if self.include_receipt and not (
        self.include_receipt.for_any or self.include_receipt.for_height is not None
    ):
        raise AccumulateError("Invalid ReceiptOptions: Must specify `for_any` or `for_height`.")

DelegateSearchQuery

Bases: Query

Represents a delegate search query.

Source code in accumulate\models\queries.py
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
class DelegateSearchQuery(Query):
    """Represents a delegate search query."""

    def __init__(self, delegate: str):
        super().__init__(QueryType.DELEGATE_SEARCH)
        self.delegate = delegate

    def is_valid(self):
        """Validate the delegate search query."""
        if not self.delegate:
            raise AccumulateError("Delegate is required for a delegate search query.") #
        # Additional validation for delegate (e.g., valid URL format) could be added.

    def to_dict(self) -> dict:
        data = super().to_dict()
        data.update({
            "delegate": self.delegate,
        })
        return data

is_valid()

Validate the delegate search query.

Source code in accumulate\models\queries.py
375
376
377
378
def is_valid(self):
    """Validate the delegate search query."""
    if not self.delegate:
        raise AccumulateError("Delegate is required for a delegate search query.") #

DirectoryQuery

Bases: Query

Represents a directory query.

Source code in accumulate\models\queries.py
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
class DirectoryQuery(Query):
    """Represents a directory query."""

    def __init__(self, range: Optional[RangeOptions] = None):
        super().__init__(QueryType.DIRECTORY)
        self.range = range

    def is_valid(self):
        """Validate the directory query."""
        if self.range and not (
            self.range.start is not None or self.range.count is not None
        ):
            raise AccumulateError("Invalid RangeOptions: Must include `start` or `count`.")

    def to_dict(self) -> dict:
        """Ensure the query outputs correctly formatted range parameters."""
        data = super().to_dict()

        #  Always include `range` (Fix for API error)
        data["range"] = {
            "start": self.range.start if self.range and self.range.start is not None else 0,
            "count": self.range.count if self.range and self.range.count is not None else 10,
            "from_end": self.range.from_end if self.range and self.range.from_end is not None else False,
            "expand": self.range.expand if self.range and self.range.expand is not None else False,
        }

        return data

is_valid()

Validate the directory query.

Source code in accumulate\models\queries.py
141
142
143
144
145
146
def is_valid(self):
    """Validate the directory query."""
    if self.range and not (
        self.range.start is not None or self.range.count is not None
    ):
        raise AccumulateError("Invalid RangeOptions: Must include `start` or `count`.")

to_dict()

Ensure the query outputs correctly formatted range parameters.

Source code in accumulate\models\queries.py
148
149
150
151
152
153
154
155
156
157
158
159
160
def to_dict(self) -> dict:
    """Ensure the query outputs correctly formatted range parameters."""
    data = super().to_dict()

    #  Always include `range` (Fix for API error)
    data["range"] = {
        "start": self.range.start if self.range and self.range.start is not None else 0,
        "count": self.range.count if self.range and self.range.count is not None else 10,
        "from_end": self.range.from_end if self.range and self.range.from_end is not None else False,
        "expand": self.range.expand if self.range and self.range.expand is not None else False,
    }

    return data

MessageHashSearchQuery

Bases: Query

Represents a message hash search query.

Source code in accumulate\models\queries.py
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
class MessageHashSearchQuery(Query):
    """Represents a message hash search query."""

    def __init__(self, hash: bytes):
        super().__init__(QueryType.MESSAGE_HASH_SEARCH)
        self.hash = hash

    def is_valid(self):
        """Validate the message hash search query."""
        if not self.hash:
            raise AccumulateError("Hash is required for a message hash search query.") #

    def to_dict(self) -> dict:
        data = super().to_dict()
        data.update({
            "hash": self.hash.hex() if self.hash else None,
        })
        return data

is_valid()

Validate the message hash search query.

Source code in accumulate\models\queries.py
396
397
398
399
def is_valid(self):
    """Validate the message hash search query."""
    if not self.hash:
        raise AccumulateError("Hash is required for a message hash search query.") #

PendingQuery

Bases: Query

Represents a pending query.

Source code in accumulate\models\queries.py
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
class PendingQuery(Query):
    """Represents a pending query."""

    def __init__(self, range: Optional[RangeOptions] = None):
        super().__init__(QueryType.PENDING)
        self.range = range

    def is_valid(self):
        """Validate the pending query."""
        if self.range and not (
            self.range.start is not None or self.range.count is not None
        ):
            raise AccumulateError("Invalid RangeOptions: Must include `start` or `count`.")

    def to_dict(self) -> dict:
        """Ensure the query outputs correctly formatted range parameters."""
        data = super().to_dict()

        #  Always include `range` (Fix for API error)
        data["range"] = {
            "start": self.range.start if self.range and self.range.start is not None else 0,
            "count": self.range.count if self.range and self.range.count is not None else 10,
            "from_end": self.range.from_end if self.range and self.range.from_end is not None else False,
            "expand": self.range.expand if self.range and self.range.expand is not None else False,
        }

        return data

is_valid()

Validate the pending query.

Source code in accumulate\models\queries.py
171
172
173
174
175
176
def is_valid(self):
    """Validate the pending query."""
    if self.range and not (
        self.range.start is not None or self.range.count is not None
    ):
        raise AccumulateError("Invalid RangeOptions: Must include `start` or `count`.")

to_dict()

Ensure the query outputs correctly formatted range parameters.

Source code in accumulate\models\queries.py
178
179
180
181
182
183
184
185
186
187
188
189
190
def to_dict(self) -> dict:
    """Ensure the query outputs correctly formatted range parameters."""
    data = super().to_dict()

    #  Always include `range` (Fix for API error)
    data["range"] = {
        "start": self.range.start if self.range and self.range.start is not None else 0,
        "count": self.range.count if self.range and self.range.count is not None else 10,
        "from_end": self.range.from_end if self.range and self.range.from_end is not None else False,
        "expand": self.range.expand if self.range and self.range.expand is not None else False,
    }

    return data

PublicKeyHashSearchQuery

Bases: Query

Represents a public key hash search query.

Source code in accumulate\models\queries.py
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
class PublicKeyHashSearchQuery(Query):
    """Represents a public key hash search query."""

    def __init__(self, public_key_hash: str):
        super().__init__(QueryType.PUBLIC_KEY_HASH_SEARCH)
        self.public_key_hash = public_key_hash

    def is_valid(self):
        """Validate the public key hash search query."""
        if not self.public_key_hash:
            raise AccumulateError("Public key hash is required for a public key hash search query.")

    def to_dict(self) -> dict:
        """Convert the query to a dictionary ensuring correct JSON-RPC format."""
        return {
            "queryType": self.query_type.to_rpc_format(),  #  Ensure camelCase format
            "publicKeyHash": self.public_key_hash,  #  Ensure hex format (string)
        }

is_valid()

Validate the public key hash search query.

Source code in accumulate\models\queries.py
354
355
356
357
def is_valid(self):
    """Validate the public key hash search query."""
    if not self.public_key_hash:
        raise AccumulateError("Public key hash is required for a public key hash search query.")

to_dict()

Convert the query to a dictionary ensuring correct JSON-RPC format.

Source code in accumulate\models\queries.py
359
360
361
362
363
364
def to_dict(self) -> dict:
    """Convert the query to a dictionary ensuring correct JSON-RPC format."""
    return {
        "queryType": self.query_type.to_rpc_format(),  #  Ensure camelCase format
        "publicKeyHash": self.public_key_hash,  #  Ensure hex format (string)
    }

PublicKeySearchQuery

Bases: Query

Represents a public key search query.

Source code in accumulate\models\queries.py
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
class PublicKeySearchQuery(Query):
    """Represents a public key search query."""

    def __init__(self, public_key: str, signature_type: Optional[Union[int, SignatureType]] = SignatureType.ED25519):
        super().__init__(QueryType.PUBLIC_KEY_SEARCH)

        logger.debug(f" Received Public Key: {public_key}")

        #  Auto-detect and convert the public key to HEX format
        self.public_key = self._convert_to_hex(public_key)

        #  Ensure signature_type is stored as a SignatureType enum and converted to RPC format
        if isinstance(signature_type, int):
            self.signature_type = SignatureType.from_value(signature_type)
        elif isinstance(signature_type, SignatureType):
            self.signature_type = signature_type
        else:
            raise ValueError(f"Invalid signature type: {signature_type}")

    def _convert_to_hex(self, public_key: str) -> bytes:
        """Detect and convert the provided public key to HEX."""
        if public_key.startswith("MH"):  
            parsed = parse_mh_address(public_key)
            if not isinstance(parsed.hash, bytes):
                raise ValueError(f" Invalid MH address: {public_key}")
            logger.debug(f" Parsed MH public key: {parsed.hash.hex()}")
            return parsed.hash

        elif public_key.startswith("0x"):  
            logger.debug(f" Parsed hex address: {public_key[2:]}")
            return bytes.fromhex(public_key[2:])  #  Remove '0x' and convert hex

        elif re.fullmatch(r"[0-9a-fA-F]+", public_key):  
            logger.debug(f" Parsed raw hex: {public_key}")
            return bytes.fromhex(public_key)  #  Convert raw hex

        else:
            try:
                #  Convert Base58 public key to HEX
                decoded_bytes = base58.b58decode(public_key)  
                logger.debug(f" Parsed Base58 public key (converted to HEX): {decoded_bytes.hex()}")
                return decoded_bytes  # Store as bytes
            except Exception as e:
                logger.error(f" Invalid public key format: {public_key}, Error: {e}")
                raise ValueError(f"Invalid public key format: {public_key}")

    def is_valid(self):
        """Validate the public key search query."""
        if not self.public_key:
            logger.error(" Public key is required for a public key search query.")
            raise AccumulateError("Public key is required for a public key search query.")
        if not isinstance(self.signature_type, SignatureType):
            logger.error(" Signature type must be a valid SignatureType enum.")
            raise AccumulateError("Signature type must be a valid SignatureType enum.")

    def to_dict(self) -> dict:
        """Ensure the query outputs HEX for `publicKey` and correctly formatted `Type` field."""
        public_key_hex = self.public_key.hex()  #  Convert bytes to HEX
        signature_type_rpc = self.signature_type.to_rpc_format()  #  Convert signature type to expected RPC format

        logger.debug(f" FINAL HEX public key before sending: {public_key_hex}")  
        logger.debug(f" FINAL SignatureType before sending: {signature_type_rpc}")  

        return {
            "queryType": "publicKeySearch",  #  Explicit string
            "publicKey": public_key_hex,  #  Send HEX, NOT Base58
            "Type": signature_type_rpc,  #  Send SignatureType as a string
        }

_convert_to_hex(public_key)

Detect and convert the provided public key to HEX.

Source code in accumulate\models\queries.py
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
def _convert_to_hex(self, public_key: str) -> bytes:
    """Detect and convert the provided public key to HEX."""
    if public_key.startswith("MH"):  
        parsed = parse_mh_address(public_key)
        if not isinstance(parsed.hash, bytes):
            raise ValueError(f" Invalid MH address: {public_key}")
        logger.debug(f" Parsed MH public key: {parsed.hash.hex()}")
        return parsed.hash

    elif public_key.startswith("0x"):  
        logger.debug(f" Parsed hex address: {public_key[2:]}")
        return bytes.fromhex(public_key[2:])  #  Remove '0x' and convert hex

    elif re.fullmatch(r"[0-9a-fA-F]+", public_key):  
        logger.debug(f" Parsed raw hex: {public_key}")
        return bytes.fromhex(public_key)  #  Convert raw hex

    else:
        try:
            #  Convert Base58 public key to HEX
            decoded_bytes = base58.b58decode(public_key)  
            logger.debug(f" Parsed Base58 public key (converted to HEX): {decoded_bytes.hex()}")
            return decoded_bytes  # Store as bytes
        except Exception as e:
            logger.error(f" Invalid public key format: {public_key}, Error: {e}")
            raise ValueError(f"Invalid public key format: {public_key}")

is_valid()

Validate the public key search query.

Source code in accumulate\models\queries.py
323
324
325
326
327
328
329
330
def is_valid(self):
    """Validate the public key search query."""
    if not self.public_key:
        logger.error(" Public key is required for a public key search query.")
        raise AccumulateError("Public key is required for a public key search query.")
    if not isinstance(self.signature_type, SignatureType):
        logger.error(" Signature type must be a valid SignatureType enum.")
        raise AccumulateError("Signature type must be a valid SignatureType enum.")

to_dict()

Ensure the query outputs HEX for publicKey and correctly formatted Type field.

Source code in accumulate\models\queries.py
332
333
334
335
336
337
338
339
340
341
342
343
344
def to_dict(self) -> dict:
    """Ensure the query outputs HEX for `publicKey` and correctly formatted `Type` field."""
    public_key_hex = self.public_key.hex()  #  Convert bytes to HEX
    signature_type_rpc = self.signature_type.to_rpc_format()  #  Convert signature type to expected RPC format

    logger.debug(f" FINAL HEX public key before sending: {public_key_hex}")  
    logger.debug(f" FINAL SignatureType before sending: {signature_type_rpc}")  

    return {
        "queryType": "publicKeySearch",  #  Explicit string
        "publicKey": public_key_hex,  #  Send HEX, NOT Base58
        "Type": signature_type_rpc,  #  Send SignatureType as a string
    }

Query

Base class for all query types.

Source code in accumulate\models\queries.py
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
class Query:
    """Base class for all query types."""

    def __init__(self, query_type: QueryType, params: Optional[dict] = None):
        self.query_type = query_type
        self.params = params or {}

    def is_valid(self) -> bool:
        """Validate the query parameters."""
        return bool(self.query_type)

    def to_dict(self) -> dict:
        """Convert the query to a dictionary ensuring queryType is formatted correctly."""
        query_dict = {
            "queryType": self.query_type.to_rpc_format(),  # Convert to lowercase string
        }
        query_dict.update(self.params)  # Merge additional parameters
        return query_dict

is_valid()

Validate the query parameters.

Source code in accumulate\models\queries.py
26
27
28
def is_valid(self) -> bool:
    """Validate the query parameters."""
    return bool(self.query_type)

to_dict()

Convert the query to a dictionary ensuring queryType is formatted correctly.

Source code in accumulate\models\queries.py
30
31
32
33
34
35
36
def to_dict(self) -> dict:
    """Convert the query to a dictionary ensuring queryType is formatted correctly."""
    query_dict = {
        "queryType": self.query_type.to_rpc_format(),  # Convert to lowercase string
    }
    query_dict.update(self.params)  # Merge additional parameters
    return query_dict

Models & Schemas

records

AccountRecord dataclass

Bases: Record

Represents an account record.

Source code in accumulate\models\records.py
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
@dataclass
class AccountRecord(Record):
    """Represents an account record."""
    account: Dict[str, Any] = field(default_factory=dict)
    directory: Optional[RecordRange[UrlRecord]] = field(default=None)
    pending: Optional[RecordRange[TxIDRecord]] = field(default=None)
    receipt: Optional[Dict[str, Any]] = field(default_factory=dict)
    last_block_time: Optional[datetime] = field(default=None)

    def to_dict(self) -> dict:
        return {
            "account": self.account,
            "directory": self.directory.to_dict() if self.directory else None,
            "pending": self.pending.to_dict() if self.pending else None,
            "receipt": self.receipt,
            "last_block_time": self.last_block_time.isoformat() if self.last_block_time else None,
        }

    @classmethod
    def from_dict(cls, data: dict) -> "AccountRecord":
        return cls(
            account=data["account"],
            directory=RecordRange.from_dict(data["directory"], UrlRecord) if data.get("directory") else None,
            pending=RecordRange.from_dict(data["records"], TxIDRecord) if data.get("records") else None,
            receipt=data.get("receipt", {}),
            last_block_time=datetime.fromisoformat(data["last_block_time"]) if data.get("last_block_time") else None,
        )

    @property
    def balance(self) -> float:
        """Convert raw balance (stored in micro-ACME) to actual ACME tokens."""
        raw_balance = int(self.account.get("balance", 0))  # Get the raw integer balance
        return raw_balance / 1e8  # Convert micro-ACME (1 ACME = 100,000,000 units)

balance property

Convert raw balance (stored in micro-ACME) to actual ACME tokens.

AccumulateError

Bases: Exception

Base class for all custom exceptions in the Accumulate client.

Source code in accumulate\models\records.py
10
11
12
class AccumulateError(Exception):
    """Base class for all custom exceptions in the Accumulate client."""
    pass

ChainEntryRecord dataclass

Bases: Record

Represents a chain entry record.

Source code in accumulate\models\records.py
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
@dataclass
class ChainEntryRecord(Record):
    """Represents a chain entry record."""
    name: Optional[str] = None
    type: Optional[str] = None
    count: Optional[int] = None
    state: List[bytes] = field(default_factory=list)
    account: Optional[str] = None
    index: Optional[int] = None
    entry: Optional[str] = None
    receipt: Optional[Dict[str, Any]] = field(default_factory=dict)
    last_block_time: Optional[datetime] = None

    def to_dict(self) -> dict:
        return {
            "name": self.name,
            "type": self.type,
            "count": self.count,
            "state": self.state,
            "account": self.account,
            "index": self.index,
            "entry": self.entry,
            "receipt": self.receipt,
            "last_block_time": self.last_block_time.isoformat() if self.last_block_time else None,
        }

    @classmethod
    def from_dict(cls, data: dict) -> "ChainEntryRecord":
        return cls(
            name=data.get("name"),
            type=data.get("type"),
            count=data.get("count"),
            state=data.get("state", []),
            account=data.get("account"),
            index=data.get("index"),
            entry=data.get("entry"),
            receipt=data.get("receipt", {}),
            last_block_time=datetime.fromisoformat(data["last_block_time"]) if data.get("last_block_time") else None,
        )

ChainRecord dataclass

Bases: Record

Represents a chain record.

Source code in accumulate\models\records.py
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
@dataclass
class ChainRecord(Record):
    """Represents a chain record."""
    name: Optional[str] = None  # Optional to resolve field ordering
    type: Optional[str] = None  # Optional to resolve field ordering
    count: Optional[int] = None  # Optional to resolve field ordering
    state: List[bytes] = field(default_factory=list)  # Optional with a default factory
    last_block_time: Optional[datetime] = None  # Optional field

    def to_dict(self) -> dict:
        """Converts the ChainRecord to a dictionary."""
        return {
            "name": self.name,
            "type": self.type,
            "count": self.count,
            "state": self.state,
            "last_block_time": self.last_block_time.isoformat() if self.last_block_time else None,
        }

    @classmethod
    def from_dict(cls, data: dict) -> "ChainRecord":
        """Creates a ChainRecord instance from a dictionary."""
        return cls(
            name=data.get("name"),
            type=data.get("type"),
            count=data.get("count"),
            state=data.get("state", []),  # Defaults to an empty list if not provided
            last_block_time=datetime.fromisoformat(data["last_block_time"]) if data.get("last_block_time") else None,
        )

from_dict(data) classmethod

Creates a ChainRecord instance from a dictionary.

Source code in accumulate\models\records.py
155
156
157
158
159
160
161
162
163
164
@classmethod
def from_dict(cls, data: dict) -> "ChainRecord":
    """Creates a ChainRecord instance from a dictionary."""
    return cls(
        name=data.get("name"),
        type=data.get("type"),
        count=data.get("count"),
        state=data.get("state", []),  # Defaults to an empty list if not provided
        last_block_time=datetime.fromisoformat(data["last_block_time"]) if data.get("last_block_time") else None,
    )

to_dict()

Converts the ChainRecord to a dictionary.

Source code in accumulate\models\records.py
145
146
147
148
149
150
151
152
153
def to_dict(self) -> dict:
    """Converts the ChainRecord to a dictionary."""
    return {
        "name": self.name,
        "type": self.type,
        "count": self.count,
        "state": self.state,
        "last_block_time": self.last_block_time.isoformat() if self.last_block_time else None,
    }

KeyRecord dataclass

Bases: Record

Represents a key record.

Source code in accumulate\models\records.py
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
@dataclass
class KeyRecord(Record):
    """Represents a key record."""
    authority: Optional[str] = None
    signer: Optional[str] = None
    version: Optional[int] = None
    index: Optional[int] = None
    entry: Optional[Dict[str, Any]] = field(default_factory=dict)

    def to_dict(self) -> dict:
        return {
            "authority": self.authority,
            "signer": self.signer,
            "version": self.version,
            "index": self.index,
            "entry": self.entry,
        }

    @classmethod
    def from_dict(cls, data: dict) -> "KeyRecord":
        return cls(
            authority=data.get("authority"),
            signer=data.get("signer"),
            version=data.get("version"),
            index=data.get("index"),
            entry=data.get("entry"),
        )

MessageRecord dataclass

Bases: Record

Represents a message record.

Source code in accumulate\models\records.py
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
@dataclass
class MessageRecord(Record):
    """Represents a message record."""
    id: Optional[str] = None
    message: Optional[dict] = field(default_factory=dict)
    status: Optional[str] = None
    result: Optional[dict] = field(default_factory=dict)
    received: Optional[int] = None
    produced: Optional[RecordRange[TxIDRecord]] = field(default=None)
    cause: Optional[RecordRange[TxIDRecord]] = field(default=None)
    signatures: Optional[RecordRange["SignatureSetRecord"]] = field(default=None)
    historical: Optional[bool] = None
    last_block_time: Optional[datetime] = None

    def to_dict(self) -> dict:
        result = {
            "id": self.id,
            "message": self.message or {},
            "status": self.status,
            "result": self.result or {},
            "received": self.received,
            "produced": self.produced.to_dict() if self.produced else None,
            "cause": self.cause.to_dict() if self.cause else None,
            "signatures": self.signatures.to_dict() if self.signatures else None,
            "historical": self.historical,
            "last_block_time": self.last_block_time.isoformat() if self.last_block_time else None,
        }
        # Remove fields with None values
        return {key: value for key, value in result.items() if value is not None}

    @classmethod
    def from_dict(cls, data: dict) -> "MessageRecord":
        return cls(
            id=data.get("id"),
            message=data.get("message", {}),
            status=data.get("status"),
            result=data.get("result", {}),
            received=data.get("received"),
            produced=RecordRange.from_dict(data["produced"], TxIDRecord) if "produced" in data else None,
            cause=RecordRange.from_dict(data["cause"], TxIDRecord) if "cause" in data else None,
            signatures=RecordRange.from_dict(data["signatures"], SignatureSetRecord) if "signatures" in data else None,
            historical=data.get("historical"),
            last_block_time=datetime.fromisoformat(data["last_block_time"]) if data.get("last_block_time") else None,
        )

Record dataclass

Base class for records in the Accumulate blockchain.

Source code in accumulate\models\records.py
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
@dataclass
class Record:
    """Base class for records in the Accumulate blockchain."""
    record_type: str = "UNKNOWN"
    data: Dict[str, Any] = field(default_factory=dict)

    def to_dict(self) -> Dict[str, Any]:
        return {
            "record_type": self.record_type,
            "data": self.data,
        }

    @classmethod
    def from_dict(cls, data: Dict[str, Any]) -> "Record":
        # Lazy import to resolve circular dependency
        RecordType = __import__("accumulate.models.enums").models.enums.RecordType
        return cls(
            record_type=data.get("record_type", "UNKNOWN"),
            data=data.get("data", {}),
        )

RecordRange dataclass

Bases: Generic[T]

Represents a range of records.

Source code in accumulate\models\records.py
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
@dataclass
class RecordRange(Generic[T]):
    """Represents a range of records."""
    records: List[T] = field(default_factory=list)
    start: Optional[int] = None
    total: Optional[int] = None
    last_block_time: Optional[datetime] = None
    item_type: Type[T] = Record  # Add this field to define the type explicitly

    def to_dict(self) -> dict:
        return {
            "records": [record.to_dict() for record in self.records],
            "start": self.start,
            "total": self.total,
            "last_block_time": self.last_block_time.isoformat() if self.last_block_time else None,
        }

    @classmethod
    def from_dict(cls, data: Optional[dict], record_cls: Type[T]) -> "RecordRange[T]":
        if data is None:  # Handle None gracefully
            return cls(records=[], start=None, total=None, last_block_time=None, item_type=record_cls)
        return cls(
            records=[record_cls.from_dict(record) for record in data.get("records", [])],
            start=data.get("start"),
            total=data.get("total"),
            last_block_time=datetime.fromisoformat(data["last_block_time"]) if data.get("last_block_time") else None,
            item_type=record_cls
        )

SignatureSetRecord dataclass

Bases: Record

Represents a signature set record.

Source code in accumulate\models\records.py
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
@dataclass
class SignatureSetRecord(Record):
    """Represents a signature set record."""
    account: Optional[dict] = field(default_factory=dict)
    signatures: Optional[RecordRange[MessageRecord]] = field(default=None)

    def to_dict(self) -> dict:
        return {
            "account": self.account,
            "signatures": self.signatures.to_dict() if self.signatures else None,
        }

    @classmethod
    def from_dict(cls, data: dict) -> "SignatureSetRecord":
        return cls(
            account=data.get("account", {}),
            signatures=RecordRange.from_dict(data["signatures"], MessageRecord) if "signatures" in data else None,
        )

TxIDRecord dataclass

Bases: Record

Represents a TxID record.

Source code in accumulate\models\records.py
59
60
61
62
63
64
65
66
67
68
69
@dataclass
class TxIDRecord(Record):
    """Represents a TxID record."""
    value: Optional[str] = None

    def to_dict(self) -> dict:
        return {"value": self.value}

    @classmethod
    def from_dict(cls, data: dict) -> "TxIDRecord":
        return cls(value=data.get("value"))

UrlRecord dataclass

Bases: Record

Represents a URL record.

Source code in accumulate\models\records.py
46
47
48
49
50
51
52
53
54
55
56
@dataclass
class UrlRecord(Record):
    """Represents a URL record."""
    value: Optional[str] = None

    def to_dict(self) -> dict:
        return {"value": self.value}

    @classmethod
    def from_dict(cls, data: dict) -> "UrlRecord":
        return cls(value=data.get("value"))

range_of(record_range, item_type)

Validate and cast a RecordRange to a specific item type.

Source code in accumulate\models\records.py
14
15
16
17
18
19
20
21
def range_of(record_range: "RecordRange", item_type: Type[T]) -> "RecordRange[T]":
    """Validate and cast a RecordRange to a specific item type."""
    if not all(isinstance(record, item_type) for record in record_range.records):
        raise AccumulateError(
            f"RecordRange contains items of an incorrect type. Expected {item_type}, "
            f"but got {[type(record) for record in record_range.records]}"
        )
    return record_range

accounts

Account

Bases: UnionValue

Base class for all account types.

Source code in accumulate\models\accounts.py
12
13
14
15
16
17
18
19
20
21
22
class Account(UnionValue):
    """Base class for all account types."""

    def type(self) -> str:
        raise NotImplementedError("Account type not implemented")

    def get_url(self) -> Any:
        raise NotImplementedError("get_url() not implemented")

    def strip_url(self) -> None:
        raise NotImplementedError("strip_url() not implemented")

FullAccount

Bases: Account

Base class for accounts with authentication.

Source code in accumulate\models\accounts.py
25
26
27
28
29
30
31
32
class FullAccount(Account):
    """Base class for accounts with authentication."""

    def __init__(self, account_auth: Optional['AccountAuth'] = None):
        self.account_auth = account_auth or AccountAuth()

    def get_auth(self) -> 'AccountAuth':
        return self.account_auth

KeyBook

Bases: FullAccount

Source code in accumulate\models\accounts.py
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
class KeyBook(FullAccount):
    def __init__(self, url: Any, account_auth: Optional['AccountAuth'] = None, page_count: int = 0, book_type: str = ''):
        print(f"DEBUG: Initializing KeyBook with URL: {url}, page_count: {page_count}, book_type: {book_type}")
        super().__init__(account_auth)
        self.url = self._ensure_url(url)

        # Enforce additional validation specific to KeyBook
        print(f"DEBUG: Ensured URL in KeyBook: {self.url}")
        self._validate_key_book_url()

        self.page_count = page_count
        self.book_type = book_type
        print(f"DEBUG: KeyBook initialized successfully with URL: {self.url}")

    def _ensure_url(self, url: Any) -> Any:
        from accumulate.utils.url import URL
        if isinstance(url, str):
            print(f"DEBUG: Parsing URL from string in _ensure_url: {url}") #
            return URL.parse(url.strip()) #

        if isinstance(url, URL):
            # Normalize existing URL objects
            if "@" in url.authority or url.authority.endswith("@"):
                raise ValueError(f"Invalid URL: '@' not allowed in authority: {url.authority}")
            if url.authority.startswith("acc://"):
                raise ValueError(f"Invalid URL: Redundant 'acc://' in authority: {url.authority}")

        return url

    def _validate_key_book_url(self):
        """Validation specific to KeyBook URLs."""
        # Ensure the URL path is not empty or just "/"
        if not self.url.path or self.url.path == "/":
            print(f"ERROR: Invalid KeyBook URL - Missing book name in path: {self.url}")
            raise ValueError(f"Invalid KeyBook URL: {self.url} must include a book name in the path.")

        # Ensure the path does not contain invalid characters
        if "@" in self.url.path or " " in self.url.path:
            print(f"ERROR: Invalid KeyBook URL - Invalid characters in path: {self.url}")
            raise ValueError(f"Invalid KeyBook URL: {self.url} contains invalid characters in the path.")

        # Ensure authority is not empty or invalid
        if not self.url.authority or not self.url.authority.strip():
            print(f"ERROR: Invalid KeyBook URL - Missing or empty authority: {self.url}")
            raise ValueError(f"Invalid KeyBook URL: Authority must not be empty in {self.url}")

        # Check for invalid domain names
        if self.url.authority.startswith(".") or self.url.authority.endswith(".com"):
            print(f"ERROR: Invalid KeyBook URL - Invalid domain in authority: {self.url}")
            raise ValueError(f"Invalid KeyBook URL: {self.url} contains invalid domain in authority.")

    def get_url(self) -> Any:
        print(f"DEBUG: Retrieving URL in KeyBook: {self.url}")
        return self.url

    def strip_url(self) -> None:
        print(f"DEBUG: Stripping extras from URL in KeyBook: {self.url}")
        self.url = self.url.strip_extras()
        print(f"DEBUG: URL after stripping: {self.url}")

    def get_signers(self) -> List[Any]:
        print(f"DEBUG: Generating signers for KeyBook with page_count: {self.page_count}")
        signers = [self._format_key_page_url(self.url, i) for i in range(self.page_count)]
        print(f"DEBUG: Generated signers in KeyBook: {[str(signer) for signer in signers]}")
        return signers

    def _format_key_page_url(self, book_url: URL, index: int) -> URL:
        if not book_url.authority or not book_url.path:
            raise ValueError(f"Invalid KeyBook URL: {book_url}")

        normalized_path = f"{book_url.path.rstrip('/')}/{index}"
        return URL(authority=book_url.authority, path=normalized_path)

_validate_key_book_url()

Validation specific to KeyBook URLs.

Source code in accumulate\models\accounts.py
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
def _validate_key_book_url(self):
    """Validation specific to KeyBook URLs."""
    # Ensure the URL path is not empty or just "/"
    if not self.url.path or self.url.path == "/":
        print(f"ERROR: Invalid KeyBook URL - Missing book name in path: {self.url}")
        raise ValueError(f"Invalid KeyBook URL: {self.url} must include a book name in the path.")

    # Ensure the path does not contain invalid characters
    if "@" in self.url.path or " " in self.url.path:
        print(f"ERROR: Invalid KeyBook URL - Invalid characters in path: {self.url}")
        raise ValueError(f"Invalid KeyBook URL: {self.url} contains invalid characters in the path.")

    # Ensure authority is not empty or invalid
    if not self.url.authority or not self.url.authority.strip():
        print(f"ERROR: Invalid KeyBook URL - Missing or empty authority: {self.url}")
        raise ValueError(f"Invalid KeyBook URL: Authority must not be empty in {self.url}")

    # Check for invalid domain names
    if self.url.authority.startswith(".") or self.url.authority.endswith(".com"):
        print(f"ERROR: Invalid KeyBook URL - Invalid domain in authority: {self.url}")
        raise ValueError(f"Invalid KeyBook URL: {self.url} contains invalid domain in authority.")

LiteIdentity

Bases: Account

Source code in accumulate\models\accounts.py
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
class LiteIdentity(Account):
    def __init__(self, url: Any, credit_balance: int = 0, last_used_on: Optional[int] = None):
        if url is None:
            raise ValueError("URL cannot be None.")
        if credit_balance < 0:
            raise ValueError("Credit balance cannot be negative.")
        self.url = self._ensure_url(url)
        self.credit_balance = credit_balance
        self.last_used_on = last_used_on

    def _ensure_url(self, url: Any) -> Any:
        if isinstance(url, str):
            from accumulate.utils.url import URL
            return URL.parse(url)
        return url

    def get_url(self) -> Any:
        return self.url

    def strip_url(self) -> None:
        self.url = self.url.strip_extras()

    def get_credit_balance(self) -> int:
        return self.credit_balance

    def get_signature_threshold(self) -> int:
        return 1

    def entry_by_key(self, key: bytes) -> Tuple[int, Optional['LiteIdentity'], bool]:
        key_hash = sha256(key).digest()
        lite_key = self._parse_lite_identity(self.url)

        print(f"[DEBUG] Calculated key_hash[:20]: {key_hash[:20].hex()}")
        print(f"[DEBUG] Derived lite_key: {lite_key.hex()}")

        if lite_key == key_hash[:20]:
            print(f"[DEBUG] Key match successful.")
            return 0, self, True

        print(f"[DEBUG] Key match failed.")
        return -1, None, False


    @staticmethod
    def _parse_lite_identity(url: Any) -> bytes:
        return sha256(url.authority.encode()).digest()[:20]

    def __repr__(self):
        """Custom representation for LiteIdentity."""
        return (
            f"<LiteIdentity url={self.url}, "
            f"credit_balance={self.credit_balance}, "
            f"last_used_on={self.last_used_on}>"
        )

__repr__()

Custom representation for LiteIdentity.

Source code in accumulate\models\accounts.py
118
119
120
121
122
123
124
def __repr__(self):
    """Custom representation for LiteIdentity."""
    return (
        f"<LiteIdentity url={self.url}, "
        f"credit_balance={self.credit_balance}, "
        f"last_used_on={self.last_used_on}>"
    )

LiteTokenAccount

Bases: Account

Source code in accumulate\models\accounts.py
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
class LiteTokenAccount(Account):
    def __init__(self, url: Any, token_url: Any, balance: Decimal = Decimal("0.00")):
        if url is None or token_url is None:
            raise ValueError("URL and Token URL cannot be None.")
        if balance < 0:
            raise ValueError("Balance cannot be negative.")

        self.url = self._ensure_url(url)
        self.token_url = self._ensure_url(token_url)
        if not self.token_url.path:
            raise ValueError(f"Invalid lite token account URL: {self.token_url}")

        self.balance = balance

    def _ensure_url(self, url: Any) -> Any:
        """Ensure the URL is valid or parse it."""
        if isinstance(url, str):
            from accumulate.utils.url import URL
            return URL.parse(url)
        return url

    def get_url(self) -> Any:
        return self.url

    def strip_url(self) -> None:
        self.url = self.url.strip_extras()

    def token_balance(self) -> Decimal:
        return self.balance

    def credit_tokens(self, amount: Decimal) -> bool:
        if amount <= 0:
            return False
        self.balance += amount
        return True

    def can_debit_tokens(self, amount: Decimal) -> bool:
        return amount > 0 and self.balance >= amount

    def debit_tokens(self, amount: Decimal) -> bool:
        if not self.can_debit_tokens(amount):
            return False
        self.balance -= amount
        return True

_ensure_url(url)

Ensure the URL is valid or parse it.

Source code in accumulate\models\accounts.py
141
142
143
144
145
146
def _ensure_url(self, url: Any) -> Any:
    """Ensure the URL is valid or parse it."""
    if isinstance(url, str):
        from accumulate.utils.url import URL
        return URL.parse(url)
    return url

UnknownAccount

Bases: Account

Source code in accumulate\models\accounts.py
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
class UnknownAccount(Account):
    def __init__(self, url: Any):
        self.url = self._ensure_url(url)

    def _ensure_url(self, url: Any) -> Any:
        """Ensure the URL is a valid instance or parse it."""
        if isinstance(url, str):
            from accumulate.utils.url import URL
            return URL.parse(url)
        return url

    def get_url(self) -> Any:
        return self.url

    def strip_url(self) -> None:
        self.url = self.url.strip_extras()

_ensure_url(url)

Ensure the URL is a valid instance or parse it.

Source code in accumulate\models\accounts.py
40
41
42
43
44
45
def _ensure_url(self, url: Any) -> Any:
    """Ensure the URL is a valid instance or parse it."""
    if isinstance(url, str):
        from accumulate.utils.url import URL
        return URL.parse(url)
    return url

submission

Submission dataclass

Represents a transaction submission in the Accumulate blockchain.

Source code in accumulate\models\submission.py
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
@dataclass
class Submission:
    """
    Represents a transaction submission in the Accumulate blockchain.
    """
    txid: Optional[str] = None  # Add `txid` field
    status: Optional[Dict[str, Any]] = None  # Corresponds to protocol.TransactionStatus
    success: bool = False  # Indicates whether the envelope was successfully submitted
    message: Optional[str] = None  # Message returned by the consensus engine

    def __post_init__(self):
        if self.status is not None and not isinstance(self.status, dict):
            raise TypeError("The 'status' field must be a dictionary or None.")

    def to_dict(self) -> dict:
        """
        Converts the Submission instance to a dictionary.
        """
        return {
            "txid": self.txid,
            "status": self.status,
            "success": self.success,
            "message": self.message,
        }

    @classmethod
    def from_dict(cls, data: dict) -> "Submission":
        """
        Creates a Submission instance from a dictionary.
        """
        return cls(
            txid=data.get("txid"),  # extract the `txid` field
            status=data.get("status"),
            success=data.get("success", False),
            message=data.get("message"),
        )

from_dict(data) classmethod

Creates a Submission instance from a dictionary.

Source code in accumulate\models\submission.py
32
33
34
35
36
37
38
39
40
41
42
@classmethod
def from_dict(cls, data: dict) -> "Submission":
    """
    Creates a Submission instance from a dictionary.
    """
    return cls(
        txid=data.get("txid"),  # extract the `txid` field
        status=data.get("status"),
        success=data.get("success", False),
        message=data.get("message"),
    )

to_dict()

Converts the Submission instance to a dictionary.

Source code in accumulate\models\submission.py
21
22
23
24
25
26
27
28
29
30
def to_dict(self) -> dict:
    """
    Converts the Submission instance to a dictionary.
    """
    return {
        "txid": self.txid,
        "status": self.status,
        "success": self.success,
        "message": self.message,
    }

signature_types

SignatureType

Bases: Enum

Cryptographic signature algorithms using string identifiers.

Source code in accumulate\models\signature_types.py
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
class SignatureType(Enum):
    """Cryptographic signature algorithms using string identifiers."""
    UNKNOWN = 0x00
    LEGACY_ED25519 = 0x01
    ED25519 = 0x02
    RCD1 = 0x03
    RECEIPT = 0x04
    PARTITION = 0x05
    SET = 0x06
    REMOTE = 0x07
    BTC = 0x08
    BTC_LEGACY = 0x09
    ETH = 0x0A
    DELEGATED = 0x0B
    INTERNAL = 0x0C
    AUTHORITY = 0x0D
    RSA_SHA256 = 0x0E
    ECDSA_SHA256 = 0x0F
    TYPED_DATA = 0x10

    @classmethod
    def from_value(cls, value):
        """Retrieve an enum instance by its value."""
        for item in cls:
            if item.value == value:
                return item
        raise ValueError(f"Invalid SignatureType value: {value}")

    def to_rpc_format(self) -> str:
        """Convert SignatureType to the expected string format for JSON-RPC."""
        mapping = {
            SignatureType.UNKNOWN: "unknown",
            SignatureType.LEGACY_ED25519: "legacyEd25519",
            SignatureType.ED25519: "ed25519",
            SignatureType.RCD1: "rcd1",
            SignatureType.RECEIPT: "receipt",
            SignatureType.PARTITION: "partition",
            SignatureType.SET: "set",
            SignatureType.REMOTE: "remote",
            SignatureType.BTC: "btc",
            SignatureType.BTC_LEGACY: "btcLegacy",
            SignatureType.ETH: "eth",
            SignatureType.DELEGATED: "delegated",
            SignatureType.INTERNAL: "internal",
            SignatureType.AUTHORITY: "authority",
            SignatureType.RSA_SHA256: "rsaSha256",
            SignatureType.ECDSA_SHA256: "ecdsaSha256",
            SignatureType.TYPED_DATA: "typedData",
        }
        return mapping[self]

from_value(value) classmethod

Retrieve an enum instance by its value.

Source code in accumulate\models\signature_types.py
25
26
27
28
29
30
31
@classmethod
def from_value(cls, value):
    """Retrieve an enum instance by its value."""
    for item in cls:
        if item.value == value:
            return item
    raise ValueError(f"Invalid SignatureType value: {value}")

to_rpc_format()

Convert SignatureType to the expected string format for JSON-RPC.

Source code in accumulate\models\signature_types.py
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
def to_rpc_format(self) -> str:
    """Convert SignatureType to the expected string format for JSON-RPC."""
    mapping = {
        SignatureType.UNKNOWN: "unknown",
        SignatureType.LEGACY_ED25519: "legacyEd25519",
        SignatureType.ED25519: "ed25519",
        SignatureType.RCD1: "rcd1",
        SignatureType.RECEIPT: "receipt",
        SignatureType.PARTITION: "partition",
        SignatureType.SET: "set",
        SignatureType.REMOTE: "remote",
        SignatureType.BTC: "btc",
        SignatureType.BTC_LEGACY: "btcLegacy",
        SignatureType.ETH: "eth",
        SignatureType.DELEGATED: "delegated",
        SignatureType.INTERNAL: "internal",
        SignatureType.AUTHORITY: "authority",
        SignatureType.RSA_SHA256: "rsaSha256",
        SignatureType.ECDSA_SHA256: "ecdsaSha256",
        SignatureType.TYPED_DATA: "typedData",
    }
    return mapping[self]

Additional Models & Schemas

AccountAuthOperations

AccountAuthOperation

Base class for account authentication operations.

Source code in accumulate\models\AccountAuthOperations.py
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
class AccountAuthOperation:
    """
    Base class for account authentication operations.
    """

    def __init__(self, authority: URL):
        self.authority = authority

    def type(self) -> AccountAuthOperationType:
        """
        Return the operation type. Must be implemented by subclasses.
        """
        raise NotImplementedError("Subclasses must implement the `type` method.")

    def hash(self) -> str:
        """
        Generate a unique hash for this operation based on its attributes.
        """
        serialized = f"{self.type().name}:{self.authority}"
        return sha256(serialized.encode()).hexdigest()

hash()

Generate a unique hash for this operation based on its attributes.

Source code in accumulate\models\AccountAuthOperations.py
25
26
27
28
29
30
def hash(self) -> str:
    """
    Generate a unique hash for this operation based on its attributes.
    """
    serialized = f"{self.type().name}:{self.authority}"
    return sha256(serialized.encode()).hexdigest()

type()

Return the operation type. Must be implemented by subclasses.

Source code in accumulate\models\AccountAuthOperations.py
19
20
21
22
23
def type(self) -> AccountAuthOperationType:
    """
    Return the operation type. Must be implemented by subclasses.
    """
    raise NotImplementedError("Subclasses must implement the `type` method.")

AddAccountAuthorityOperation

Bases: AccountAuthOperation

Represents an operation to add an authority to an account's authorization list.

:param authority: The URL of the authority to add.

Source code in accumulate\models\AccountAuthOperations.py
61
62
63
64
65
66
67
68
69
70
71
72
class AddAccountAuthorityOperation(AccountAuthOperation):
    """
    Represents an operation to add an authority to an account's authorization list.

    :param authority: The URL of the authority to add.
    """

    def __init__(self, authority: URL):
        super().__init__(authority)

    def type(self) -> AccountAuthOperationType:
        return AccountAuthOperationType.ADD_AUTHORITY

DisableAccountAuthOperation

Bases: AccountAuthOperation

Represents an operation to disable authorization for a specific authority.

:param authority: The URL of the authority to disable.

Source code in accumulate\models\AccountAuthOperations.py
47
48
49
50
51
52
53
54
55
56
57
58
class DisableAccountAuthOperation(AccountAuthOperation):
    """
    Represents an operation to disable authorization for a specific authority.

    :param authority: The URL of the authority to disable.
    """

    def __init__(self, authority: URL):
        super().__init__(authority)

    def type(self) -> AccountAuthOperationType:
        return AccountAuthOperationType.DISABLE

EnableAccountAuthOperation

Bases: AccountAuthOperation

Represents an operation to enable authorization for a specific authority.

:param authority: The URL of the authority to enable.

Source code in accumulate\models\AccountAuthOperations.py
33
34
35
36
37
38
39
40
41
42
43
44
class EnableAccountAuthOperation(AccountAuthOperation):
    """
    Represents an operation to enable authorization for a specific authority.

    :param authority: The URL of the authority to enable.
    """

    def __init__(self, authority: URL):
        super().__init__(authority)

    def type(self) -> AccountAuthOperationType:
        return AccountAuthOperationType.ENABLE

RemoveAccountAuthorityOperation

Bases: AccountAuthOperation

Represents an operation to remove an authority from an account's authorization list.

:param authority: The URL of the authority to remove.

Source code in accumulate\models\AccountAuthOperations.py
75
76
77
78
79
80
81
82
83
84
85
86
class RemoveAccountAuthorityOperation(AccountAuthOperation):
    """
    Represents an operation to remove an authority from an account's authorization list.

    :param authority: The URL of the authority to remove.
    """

    def __init__(self, authority: URL):
        super().__init__(authority)

    def type(self) -> AccountAuthOperationType:
        return AccountAuthOperationType.REMOVE_AUTHORITY

address

Address

Abstract base class for addresses.

Source code in accumulate\models\address.py
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
class Address:
    """Abstract base class for addresses."""

    def get_type(self) -> str:
        """Get the type of the address."""
        raise NotImplementedError

    def get_public_key_hash(self) -> Tuple[Optional[bytes], bool]:
        """Get the public key hash."""
        raise NotImplementedError

    def get_public_key(self) -> Tuple[Optional[bytes], bool]:
        """Get the public key."""
        raise NotImplementedError

    def get_private_key(self) -> Tuple[Optional[bytes], bool]:
        """Get the private key."""
        raise NotImplementedError

    def __str__(self) -> str:
        """Return the string representation of the address."""
        raise NotImplementedError

__str__()

Return the string representation of the address.

Source code in accumulate\models\address.py
28
29
30
def __str__(self) -> str:
    """Return the string representation of the address."""
    raise NotImplementedError

get_private_key()

Get the private key.

Source code in accumulate\models\address.py
24
25
26
def get_private_key(self) -> Tuple[Optional[bytes], bool]:
    """Get the private key."""
    raise NotImplementedError

get_public_key()

Get the public key.

Source code in accumulate\models\address.py
20
21
22
def get_public_key(self) -> Tuple[Optional[bytes], bool]:
    """Get the public key."""
    raise NotImplementedError

get_public_key_hash()

Get the public key hash.

Source code in accumulate\models\address.py
16
17
18
def get_public_key_hash(self) -> Tuple[Optional[bytes], bool]:
    """Get the public key hash."""
    raise NotImplementedError

get_type()

Get the type of the address.

Source code in accumulate\models\address.py
12
13
14
def get_type(self) -> str:
    """Get the type of the address."""
    raise NotImplementedError

Lite

Bases: Address

Represents a lightweight address.

Source code in accumulate\models\address.py
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
class Lite(Address):
    """Represents a lightweight address."""

    def __init__(self, url: str, address_bytes: bytes):
        self.url = url
        self.address_bytes = address_bytes

    def get_type(self) -> str:
        return "Unknown"

    def get_public_key_hash(self) -> Tuple[Optional[bytes], bool]:
        return None, False

    def get_public_key(self) -> Tuple[Optional[bytes], bool]:
        return None, False

    def get_private_key(self) -> Tuple[Optional[bytes], bool]:
        return None, False

    def __str__(self) -> str:
        return self.url

PrivateKey

Bases: PublicKey

Represents an address based on a private key.

Source code in accumulate\models\address.py
107
108
109
110
111
112
113
114
115
116
117
118
class PrivateKey(PublicKey):
    """Represents an address based on a private key."""

    def __init__(self, signature_type: str, public_key: bytes, private_key: bytes):
        super().__init__(signature_type, public_key)
        self.private_key = private_key

    def get_private_key(self) -> Tuple[Optional[bytes], bool]:
        return self.private_key, True

    def __str__(self) -> str:
        return self.private_key.hex()

PublicKey

Bases: Address

Represents an address based on a public key.

Source code in accumulate\models\address.py
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
class PublicKey(Address):
    """Represents an address based on a public key."""

    def __init__(self, signature_type: str, public_key: bytes):
        self.signature_type = signature_type
        self.public_key = public_key

    def get_type(self) -> str:
        return self.signature_type

    def get_public_key(self) -> Tuple[Optional[bytes], bool]:
        return self.public_key, True

    def get_public_key_hash(self) -> Tuple[Optional[bytes], bool]:
        # Simulate hashing for demonstration
        return hash_public_key(self.public_key, self.signature_type)

    def get_private_key(self) -> Tuple[Optional[bytes], bool]:
        return None, False

    def __str__(self) -> str:
        hash_value, valid = self.get_public_key_hash()
        if not valid:
            return "<invalid address>"
        return format_address(self.signature_type, hash_value)

PublicKeyHashAddress

Bases: Address

Represents an address based on a public key hash.

Source code in accumulate\models\address.py
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
class PublicKeyHashAddress(Address):
    """Represents an address based on a public key hash."""

    def __init__(self, signature_type: str, hash_value: bytes):
        self.signature_type = signature_type #
        self.hash_value = hash_value #

    def get_type(self) -> str:
        return self.signature_type

    def get_public_key_hash(self) -> Tuple[Optional[bytes], bool]:
        return self.hash_value, True #
    def get_public_key(self) -> Tuple[Optional[bytes], bool]:
        return None, False

    def get_private_key(self) -> Tuple[Optional[bytes], bool]:
        return None, False

    def __str__(self) -> str:
        return format_address(self.signature_type, self.hash_value)

Unknown

Bases: Address

Represents an unknown address.

Source code in accumulate\models\address.py
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
class Unknown(Address):
    """Represents an unknown address."""

    def __init__(self, value: bytes, encoding: str = "hex"):
        self.value = value
        self.encoding = encoding

    def get_type(self) -> str:
        return "Unknown"

    def get_public_key_hash(self) -> Tuple[Optional[bytes], bool]:
        return None, False

    def get_public_key(self) -> Tuple[Optional[bytes], bool]:
        return None, False

    def get_private_key(self) -> Tuple[Optional[bytes], bool]:
        return None, False

    def __str__(self) -> str:
        if self.encoding == "base58":
            return base58.b58encode(self.value).decode()
        return self.value.hex()

format_address(signature_type, hash_value)

Format an address based on its type and hash value.

Source code in accumulate\models\address.py
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
def format_address(signature_type: str, hash_value: bytes) -> str:
    """Format an address based on its type and hash value."""
    if signature_type in {"ED25519", "LegacyED25519"}:
        return f"AC1-{hash_value.hex()}"
    if signature_type == "RCD1":
        return f"FA-{hash_value.hex()}"
    if signature_type in {"BTC", "BTCLegacy"}:
        return f"BTC-{hash_value.hex()}"
    if signature_type == "ETH":
        return f"ETH-{hash_value.hex()}"
    if signature_type == "EcdsaSha256":
        return f"AC2-{hash_value.hex()}"
    if signature_type == "RsaSha256":
        return f"AC3-{hash_value.hex()}"
    return f"MH-{hash_value.hex()}"

hash_public_key(public_key, signature_type)

Hash a public key based on its signature type.

Source code in accumulate\models\address.py
161
162
163
164
165
166
167
168
169
170
def hash_public_key(public_key: bytes, signature_type: str) -> Tuple[Optional[bytes], bool]:
    """Hash a public key based on its signature type."""
    try:
        if signature_type in {"ED25519", "LegacyED25519"}:
            return hashlib.sha256(public_key).digest(), True
        if signature_type == "BTC":
            return hashlib.sha256(public_key).digest(), True
        return None, False
    except Exception:
        return None, False

auth

AccountAuth

Manages account authorities for access control.

Source code in accumulate\models\auth.py
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
class AccountAuth:
    """Manages account authorities for access control."""

    def __init__(self, authorities: Optional[List[AuthorityEntry]] = None):
        """
        Initialize AccountAuth.

        :param authorities: Optional list of AuthorityEntry objects.
        """
        self.authorities: List[AuthorityEntry] = authorities or []

    def key_book(self) -> Optional[URL]:
        """
        Get the primary authority's URL.

        :return: URL of the primary authority or None if not available.
        """
        return self.authorities[0].url if self.authorities else None

    def manager_key_book(self) -> Optional[URL]:
        """
        Get the secondary authority's URL.

        :return: URL of the secondary authority or None if not available.
        """
        return self.authorities[1].url if len(self.authorities) > 1 else None

    def all_authorities_are_disabled(self) -> bool:
        """
        Check if all authorities are disabled.

        :return: True if all authorities are disabled, False otherwise.
        """
        return all(authority.disabled for authority in self.authorities)

    def get_authority(self, entry_url: URL) -> Tuple[Optional[AuthorityEntry], bool]:
        """
        Get an authority entry by its URL.

        :param entry_url: The URL of the authority to find.
        :return: A tuple containing the AuthorityEntry and a boolean indicating if it was found.
        """
        for authority in self.authorities:
            if authority.url == entry_url:
                return authority, True
        return None, False

    def add_authority(self, entry_url: URL) -> Tuple[AuthorityEntry, bool]:
        """
        Add a new authority entry.

        :param entry_url: The URL of the new authority.
        :return: A tuple containing the new or existing AuthorityEntry and a boolean indicating if it was newly added.
        """
        existing_authority, found = self.get_authority(entry_url)
        if found:
            return existing_authority, False
        new_authority = AuthorityEntry(url=entry_url)
        self.authorities.append(new_authority)
        self.authorities.sort(key=lambda auth: auth.url)  # Keep the list sorted
        return new_authority, True

    def remove_authority(self, entry_url: URL) -> bool:
        """
        Remove an authority entry by its URL.

        :param entry_url: The URL of the authority to remove.
        :return: True if the authority was removed, False otherwise.
        """
        for i, authority in enumerate(self.authorities):
            if authority.url == entry_url:
                del self.authorities[i]
                return True
        return False

__init__(authorities=None)

Initialize AccountAuth.

:param authorities: Optional list of AuthorityEntry objects.

Source code in accumulate\models\auth.py
23
24
25
26
27
28
29
def __init__(self, authorities: Optional[List[AuthorityEntry]] = None):
    """
    Initialize AccountAuth.

    :param authorities: Optional list of AuthorityEntry objects.
    """
    self.authorities: List[AuthorityEntry] = authorities or []

add_authority(entry_url)

Add a new authority entry.

:param entry_url: The URL of the new authority. :return: A tuple containing the new or existing AuthorityEntry and a boolean indicating if it was newly added.

Source code in accumulate\models\auth.py
67
68
69
70
71
72
73
74
75
76
77
78
79
80
def add_authority(self, entry_url: URL) -> Tuple[AuthorityEntry, bool]:
    """
    Add a new authority entry.

    :param entry_url: The URL of the new authority.
    :return: A tuple containing the new or existing AuthorityEntry and a boolean indicating if it was newly added.
    """
    existing_authority, found = self.get_authority(entry_url)
    if found:
        return existing_authority, False
    new_authority = AuthorityEntry(url=entry_url)
    self.authorities.append(new_authority)
    self.authorities.sort(key=lambda auth: auth.url)  # Keep the list sorted
    return new_authority, True

all_authorities_are_disabled()

Check if all authorities are disabled.

:return: True if all authorities are disabled, False otherwise.

Source code in accumulate\models\auth.py
47
48
49
50
51
52
53
def all_authorities_are_disabled(self) -> bool:
    """
    Check if all authorities are disabled.

    :return: True if all authorities are disabled, False otherwise.
    """
    return all(authority.disabled for authority in self.authorities)

get_authority(entry_url)

Get an authority entry by its URL.

:param entry_url: The URL of the authority to find. :return: A tuple containing the AuthorityEntry and a boolean indicating if it was found.

Source code in accumulate\models\auth.py
55
56
57
58
59
60
61
62
63
64
65
def get_authority(self, entry_url: URL) -> Tuple[Optional[AuthorityEntry], bool]:
    """
    Get an authority entry by its URL.

    :param entry_url: The URL of the authority to find.
    :return: A tuple containing the AuthorityEntry and a boolean indicating if it was found.
    """
    for authority in self.authorities:
        if authority.url == entry_url:
            return authority, True
    return None, False

key_book()

Get the primary authority's URL.

:return: URL of the primary authority or None if not available.

Source code in accumulate\models\auth.py
31
32
33
34
35
36
37
def key_book(self) -> Optional[URL]:
    """
    Get the primary authority's URL.

    :return: URL of the primary authority or None if not available.
    """
    return self.authorities[0].url if self.authorities else None

manager_key_book()

Get the secondary authority's URL.

:return: URL of the secondary authority or None if not available.

Source code in accumulate\models\auth.py
39
40
41
42
43
44
45
def manager_key_book(self) -> Optional[URL]:
    """
    Get the secondary authority's URL.

    :return: URL of the secondary authority or None if not available.
    """
    return self.authorities[1].url if len(self.authorities) > 1 else None

remove_authority(entry_url)

Remove an authority entry by its URL.

:param entry_url: The URL of the authority to remove. :return: True if the authority was removed, False otherwise.

Source code in accumulate\models\auth.py
82
83
84
85
86
87
88
89
90
91
92
93
def remove_authority(self, entry_url: URL) -> bool:
    """
    Remove an authority entry by its URL.

    :param entry_url: The URL of the authority to remove.
    :return: True if the authority was removed, False otherwise.
    """
    for i, authority in enumerate(self.authorities):
        if authority.url == entry_url:
            del self.authorities[i]
            return True
    return False

AuthorityEntry

Represents an authority entry with a URL and a disabled flag.

Source code in accumulate\models\auth.py
 6
 7
 8
 9
10
11
12
13
14
15
16
17
class AuthorityEntry:
    """Represents an authority entry with a URL and a disabled flag."""

    def __init__(self, url: URL, disabled: bool = False):
        """
        Initialize an AuthorityEntry.

        :param url: The URL of the authority.
        :param disabled: Boolean flag indicating if the authority is disabled.
        """
        self.url = url
        self.disabled = disabled

__init__(url, disabled=False)

Initialize an AuthorityEntry.

:param url: The URL of the authority. :param disabled: Boolean flag indicating if the authority is disabled.

Source code in accumulate\models\auth.py
 9
10
11
12
13
14
15
16
17
def __init__(self, url: URL, disabled: bool = False):
    """
    Initialize an AuthorityEntry.

    :param url: The URL of the authority.
    :param disabled: Boolean flag indicating if the authority is disabled.
    """
    self.url = url
    self.disabled = disabled

base_transactions

ExpireOptions

Represents expiration options for a transaction.

Source code in accumulate\models\base_transactions.py
138
139
140
141
142
143
144
145
146
147
class ExpireOptions:
    """
    Represents expiration options for a transaction.
    """

    def __init__(self, at_time: Optional[int] = None):
        """
        :param at_time: The expiration time as a Unix timestamp.
        """
        self.at_time = at_time

__init__(at_time=None)

:param at_time: The expiration time as a Unix timestamp.

Source code in accumulate\models\base_transactions.py
143
144
145
146
147
def __init__(self, at_time: Optional[int] = None):
    """
    :param at_time: The expiration time as a Unix timestamp.
    """
    self.at_time = at_time

HoldUntilOptions

Represents hold-until options for a transaction.

Source code in accumulate\models\base_transactions.py
150
151
152
153
154
155
156
157
158
159
class HoldUntilOptions:
    """
    Represents hold-until options for a transaction.
    """

    def __init__(self, minor_block: Optional[int] = None):
        """
        :param minor_block: The minor block at which the transaction is held until.
        """
        self.minor_block = minor_block

__init__(minor_block=None)

:param minor_block: The minor block at which the transaction is held until.

Source code in accumulate\models\base_transactions.py
155
156
157
158
159
def __init__(self, minor_block: Optional[int] = None):
    """
    :param minor_block: The minor block at which the transaction is held until.
    """
    self.minor_block = minor_block

TransactionBodyBase

Bases: ABC

Base class for all transaction bodies, providing standardized marshaling/unmarshaling.

Source code in accumulate\models\base_transactions.py
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
class TransactionBodyBase(ABC):
    """Base class for all transaction bodies, providing standardized marshaling/unmarshaling."""

    @abstractmethod
    def type(self) -> TransactionType:
        """Return the transaction type."""
        pass

    @abstractmethod
    def fields_to_encode(self):
        """Return the fields to encode as a list of (field_id, value, encoding_function)."""
        pass


    def marshal(self) -> bytes:
        """Generic marshaling for all transactions using structured encoding."""
        logger.debug(f" START Marshaling {self.__class__.__name__}")

        serialized = b""

        #  Confirm Execution (Force an Output to Ensure it Runs)
        assert True, " DEBUG: `marshal()` method is being executed!"

        for field_num, value, encode_func in self.fields_to_encode():
            encoded_value = encode_func(value)

            #  Explicit Debugging of Each Field
            logger.debug(f" Encoding Field {field_num}: {encoded_value.hex() if isinstance(encoded_value, bytes) else encoded_value}")

            serialized += field_marshal_binary(field_num, encoded_value)

        logger.debug(f" FINAL Marshaled {self.__class__.__name__} (HEX): {serialized.hex()}")

        return serialized


    @classmethod
    @abstractmethod
    def unmarshal(cls, data: bytes):
        """Generic unmarshaling method to be implemented per transaction type."""
        pass

    def to_dict(self) -> dict:
        """Convert transaction to a dictionary with correct type formatting."""
        return {"type": self._format_transaction_type(self.type().name)}

    @staticmethod
    def _format_transaction_type(transaction_type: str) -> str:
        """Convert ENUM transaction type to lowerCamelCase for JSON compatibility."""
        words = transaction_type.lower().split("_")
        return words[0] + "".join(word.capitalize() for word in words[1:])

_format_transaction_type(transaction_type) staticmethod

Convert ENUM transaction type to lowerCamelCase for JSON compatibility.

Source code in accumulate\models\base_transactions.py
78
79
80
81
82
@staticmethod
def _format_transaction_type(transaction_type: str) -> str:
    """Convert ENUM transaction type to lowerCamelCase for JSON compatibility."""
    words = transaction_type.lower().split("_")
    return words[0] + "".join(word.capitalize() for word in words[1:])

fields_to_encode() abstractmethod

Return the fields to encode as a list of (field_id, value, encoding_function).

Source code in accumulate\models\base_transactions.py
40
41
42
43
@abstractmethod
def fields_to_encode(self):
    """Return the fields to encode as a list of (field_id, value, encoding_function)."""
    pass

marshal()

Generic marshaling for all transactions using structured encoding.

Source code in accumulate\models\base_transactions.py
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
def marshal(self) -> bytes:
    """Generic marshaling for all transactions using structured encoding."""
    logger.debug(f" START Marshaling {self.__class__.__name__}")

    serialized = b""

    #  Confirm Execution (Force an Output to Ensure it Runs)
    assert True, " DEBUG: `marshal()` method is being executed!"

    for field_num, value, encode_func in self.fields_to_encode():
        encoded_value = encode_func(value)

        #  Explicit Debugging of Each Field
        logger.debug(f" Encoding Field {field_num}: {encoded_value.hex() if isinstance(encoded_value, bytes) else encoded_value}")

        serialized += field_marshal_binary(field_num, encoded_value)

    logger.debug(f" FINAL Marshaled {self.__class__.__name__} (HEX): {serialized.hex()}")

    return serialized

to_dict()

Convert transaction to a dictionary with correct type formatting.

Source code in accumulate\models\base_transactions.py
74
75
76
def to_dict(self) -> dict:
    """Convert transaction to a dictionary with correct type formatting."""
    return {"type": self._format_transaction_type(self.type().name)}

type() abstractmethod

Return the transaction type.

Source code in accumulate\models\base_transactions.py
35
36
37
38
@abstractmethod
def type(self) -> TransactionType:
    """Return the transaction type."""
    pass

unmarshal(data) abstractmethod classmethod

Generic unmarshaling method to be implemented per transaction type.

Source code in accumulate\models\base_transactions.py
68
69
70
71
72
@classmethod
@abstractmethod
def unmarshal(cls, data: bytes):
    """Generic unmarshaling method to be implemented per transaction type."""
    pass

TransactionBodyFactory

Factory for creating transaction body instances based on transaction type.

Source code in accumulate\models\base_transactions.py
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
class TransactionBodyFactory:
    """
    Factory for creating transaction body instances based on transaction type.
    """

    @classmethod
    async def create(cls, client, transaction_type: TransactionType, *args, **kwargs) -> Optional[TransactionBodyBase]:
        """
        Dynamically create a transaction body instance of the specified type.

        :param client: AccumulateClient instance (optional, for API interactions).
        :param transaction_type: Enum specifying the transaction type.
        :param args: Positional arguments for the transaction body constructor.
        :param kwargs: Keyword arguments for the transaction body constructor.
        :return: A fully initialized TransactionBodyBase subclass instance, or None if unsupported.
        """
        from accumulate.models.transactions import (
            AddCredits, CreateIdentity, SendTokens, CreateDataAccount, CreateTokenAccount,
            WriteData, IssueTokens, BurnTokens, TransferCredits, RemoteTransaction, 
            UpdateKeyPage, UpdateAccountAuth, CreateToken
        )

        TRANSACTION_TYPE_MAP: Dict[TransactionType, Type[TransactionBodyBase]] = {
            TransactionType.ADD_CREDITS: AddCredits,
            TransactionType.CREATE_IDENTITY: CreateIdentity,
            TransactionType.SEND_TOKENS: SendTokens,
            TransactionType.CREATE_DATA_ACCOUNT: CreateDataAccount,
            TransactionType.CREATE_TOKEN_ACCOUNT: CreateTokenAccount,
            TransactionType.WRITE_DATA: WriteData,
            TransactionType.ISSUE_TOKENS: IssueTokens,
            TransactionType.BURN_TOKENS: BurnTokens,
            TransactionType.TRANSFER_CREDITS: TransferCredits,
            TransactionType.REMOTE: RemoteTransaction,
            TransactionType.UPDATE_KEY_PAGE: UpdateKeyPage,
            TransactionType.UPDATE_ACCOUNT_AUTH: UpdateAccountAuth,
            TransactionType.CREATE_TOKEN: CreateToken,
        }

        if transaction_type not in TRANSACTION_TYPE_MAP:
            logger.error(f" Unsupported transaction type: {transaction_type}")
            return None  # Or raise an exception if you prefer

        transaction_class = TRANSACTION_TYPE_MAP[transaction_type]

        #  Create the transaction body instance dynamically
        instance = transaction_class(client, *args, **kwargs)

        #  If it has an initialize method, call it asynchronously (e.g., fetching oracle price)
        if hasattr(instance, "initialize") and callable(instance.initialize):
            await instance.initialize(client)

        return instance

create(client, transaction_type, *args, **kwargs) async classmethod

Dynamically create a transaction body instance of the specified type.

:param client: AccumulateClient instance (optional, for API interactions). :param transaction_type: Enum specifying the transaction type. :param args: Positional arguments for the transaction body constructor. :param kwargs: Keyword arguments for the transaction body constructor. :return: A fully initialized TransactionBodyBase subclass instance, or None if unsupported.

Source code in accumulate\models\base_transactions.py
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
@classmethod
async def create(cls, client, transaction_type: TransactionType, *args, **kwargs) -> Optional[TransactionBodyBase]:
    """
    Dynamically create a transaction body instance of the specified type.

    :param client: AccumulateClient instance (optional, for API interactions).
    :param transaction_type: Enum specifying the transaction type.
    :param args: Positional arguments for the transaction body constructor.
    :param kwargs: Keyword arguments for the transaction body constructor.
    :return: A fully initialized TransactionBodyBase subclass instance, or None if unsupported.
    """
    from accumulate.models.transactions import (
        AddCredits, CreateIdentity, SendTokens, CreateDataAccount, CreateTokenAccount,
        WriteData, IssueTokens, BurnTokens, TransferCredits, RemoteTransaction, 
        UpdateKeyPage, UpdateAccountAuth, CreateToken
    )

    TRANSACTION_TYPE_MAP: Dict[TransactionType, Type[TransactionBodyBase]] = {
        TransactionType.ADD_CREDITS: AddCredits,
        TransactionType.CREATE_IDENTITY: CreateIdentity,
        TransactionType.SEND_TOKENS: SendTokens,
        TransactionType.CREATE_DATA_ACCOUNT: CreateDataAccount,
        TransactionType.CREATE_TOKEN_ACCOUNT: CreateTokenAccount,
        TransactionType.WRITE_DATA: WriteData,
        TransactionType.ISSUE_TOKENS: IssueTokens,
        TransactionType.BURN_TOKENS: BurnTokens,
        TransactionType.TRANSFER_CREDITS: TransferCredits,
        TransactionType.REMOTE: RemoteTransaction,
        TransactionType.UPDATE_KEY_PAGE: UpdateKeyPage,
        TransactionType.UPDATE_ACCOUNT_AUTH: UpdateAccountAuth,
        TransactionType.CREATE_TOKEN: CreateToken,
    }

    if transaction_type not in TRANSACTION_TYPE_MAP:
        logger.error(f" Unsupported transaction type: {transaction_type}")
        return None  # Or raise an exception if you prefer

    transaction_class = TRANSACTION_TYPE_MAP[transaction_type]

    #  Create the transaction body instance dynamically
    instance = transaction_class(client, *args, **kwargs)

    #  If it has an initialize method, call it asynchronously (e.g., fetching oracle price)
    if hasattr(instance, "initialize") and callable(instance.initialize):
        await instance.initialize(client)

    return instance

TransactionHeader

Represents the header of a transaction, containing metadata and conditions.

Source code in accumulate\models\base_transactions.py
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
class TransactionHeader:
    """
    Represents the header of a transaction, containing metadata and conditions.
    """

    def __init__(
        self,
        principal: str,
        initiator: bytes,
        timestamp: int,
        signature_type: SignatureType,
        memo: Optional[str] = None,
        metadata: Optional[bytes] = None,
        expire: Optional["ExpireOptions"] = None,
        hold_until: Optional["HoldUntilOptions"] = None,
        authorities: Optional[List[str]] = None,
    ):
        self.timestamp = timestamp
        self.principal = principal
        self.initiator = initiator
        self.signature_type = signature_type
        self.memo = memo
        self.metadata = metadata
        self.expire = expire
        self.hold_until = hold_until
        self.authorities = authorities or []


    @classmethod
    async def create(
        cls,
        principal: str,
        public_key: bytes,
        signer: "Signer",
        timestamp: Optional[int] = None,
        transaction_body=None,  #  Add transaction body to check for RemoteTransaction
    ) -> "TransactionHeader":
        """Automatically compute the initiator hash and return a fully constructed TransactionHeader."""

        from accumulate.signing.signer import Signer
        from accumulate.signing.timestamp import TimestampFromVariable
        from accumulate.models.transactions import RemoteTransaction  #  Import RemoteTransaction

        #  If this is a RemoteTransaction, set header differently
        if isinstance(transaction_body, RemoteTransaction):
            logger.info(" RemoteTransaction detected! Adjusting header...")

            #  Use the referenced transaction's hash as the initiator
            initiator_hash = transaction_body.hash  # This should be the original signed transaction hash

            #  Remote Transactions do not need a new timestamp
            timestamp = None  

            logger.info(f" Using referenced transaction hash: {initiator_hash.hex()}")

        else:

            #  Generate timestamp only if it's not provided
            timestamp = timestamp or TimestampFromVariable().get()

            #  Fetch signer version dynamically
            signer_version = await signer.get_signer_version()

            #  Fetch the correct signature type dynamically
            signature_type = await signer.get_signature_type()

            #  Use the correct Lite Identity URL (not a token sub-account)
            signer_url = str(signer.url)  #  Extract URL from Signer object

            logger.info(f" Correcting Signer URL (used in metadata hash): {signer_url}")

            #  Compute initiator hash using the same function as `Signer.sign_transaction()`
            initiator_hash = Signer.calculate_metadata_hash(
                public_key, timestamp, signer_url, signer_version, signature_type.value
            )

            logger.info(f" Computed Initiator Hash Header (public key): {public_key.hex()}")
            logger.info(f" Computed Initiator Hash Header (timestamp): {timestamp}")
            logger.info(f" Computed Initiator Hash Header (signer): {principal}")
            logger.info(f" Computed Initiator Hash Header (signer_version): {signer_version}")
            logger.info(f" Computed Initiator Hash Header (signature_type.value): {signature_type.value}")
            logger.info(f" Computed Initiator Hash (from TransactionHeader.create()): {initiator_hash.hex()}")

        #  Ensure timestamp and signature type are included when creating the instance
        return cls(
            principal=principal,
            initiator=initiator_hash,
            timestamp=timestamp,  #  This will be None for Remote Transactions
            signature_type=signature_type if not isinstance(transaction_body, RemoteTransaction) else None,
        )


    def to_dict(self) -> dict:
        """Convert the transaction header to a dictionary while conditionally including optional fields."""
        txn_dict = {
            "principal": self.principal,
            "initiator": self.initiator.hex(),
        }
        if self.memo:
            txn_dict["memo"] = self.memo
        if self.metadata:
            txn_dict["metadata"] = base64.b64encode(self.metadata).decode()
        if self.expire:
            txn_dict["expire"] = self.expire.at_time
        if self.hold_until:
            txn_dict["hold_until"] = self.hold_until.minor_block
        if self.authorities:
            txn_dict["authorities"] = self.authorities
        return txn_dict

    def marshal_binary(self) -> bytes:
        """Serialize the transaction header to bytes using the updated field‐based encoding."""
        print("\n DEBUG: Marshaling Transaction Header")
        result = b""

        # Field 1: Principal – encode as: [varint(length)] + principal_bytes
        principal_bytes = self.principal.encode("utf-8")
        field1 = field_marshal_binary(1, encode_uvarint(len(principal_bytes)) + principal_bytes)
        result += field1
        print(f"   Field 1 (Principal): {field1.hex()}")

        # Field 2: Initiator – raw bytes (assumed fixed length, e.g. 32 bytes)
        field2 = field_marshal_binary(2, self.initiator)
        result += field2
        print(f"   Field 2 (Initiator): {field2.hex()}")

        # Optional Field 4: Memo (if present)
        if self.memo:
            memo_bytes = self.memo.encode("utf-8")
            field4 = field_marshal_binary(4, encode_uvarint(len(memo_bytes)) + memo_bytes)
            result += field4
            print(f"   Field 4 (Memo): {field4.hex()}")

        # Optional Field 5: Metadata (if present)
        if self.metadata:
            field5 = field_marshal_binary(5, encode_uvarint(len(self.metadata)) + self.metadata)
            result += field5
            print(f"   Field 5 (Metadata): {field5.hex()}")

        # Optional Field 6: Expire (if present; fixed 8 bytes)
        if self.expire:
            expire_bytes = struct.pack(">Q", self.expire.at_time)
            field6 = field_marshal_binary(6, expire_bytes)
            result += field6
            print(f"   Field 6 (Expire): {field6.hex()}")

        # Optional Field 7: Hold Until (if present; fixed 8 bytes)
        if self.hold_until:
            hold_until_bytes = struct.pack(">Q", self.hold_until.minor_block)
            field7 = field_marshal_binary(7, hold_until_bytes)
            result += field7
            print(f"   Field 7 (Hold Until): {field7.hex()}")

        # Optional Field 8: Authorities (if present; encoded as length-prefixed UTF-8 string)
        if self.authorities:
            auth_str = ",".join(self.authorities)
            auth_bytes = auth_str.encode("utf-8")
            field8 = field_marshal_binary(8, encode_uvarint(len(auth_bytes)) + auth_bytes)
            result += field8
            print(f"   Field 8 (Authorities): {field8.hex()}")

        print(f"   Final Header Encoding: {result.hex()}")
        return result

    @staticmethod
    def unmarshal(data: bytes) -> "TransactionHeader":
        """Deserialize the transaction header from bytes using the updated encoding scheme."""
        print("\n DEBUG: Unmarshaling Transaction Header")
        reader = io.BytesIO(data)

        principal = None
        initiator = None
        memo = None
        metadata = None
        expire = None
        hold_until = None
        authorities = None

        # Process fields one by one (each field starts with a 1-byte field id)
        while True:
            field_id_byte = reader.read(1)
            if not field_id_byte:
                break  # End of header data
            field_id = field_id_byte[0]
            if field_id == 1:
                # Principal is length-prefixed: read length then string
                plen = read_uvarint(reader)
                principal = reader.read(plen).decode("utf-8")
                print(f"   Unmarshaled Field 1 (Principal): {principal}")
            elif field_id == 2:
                # Initiator: fixed length (assume 32 bytes)
                initiator = reader.read(32)
                print(f"   Unmarshaled Field 2 (Initiator): {initiator.hex()}")
            elif field_id == 4:
                # Memo: length-prefixed string
                mlen = read_uvarint(reader)
                memo = reader.read(mlen).decode("utf-8")
                print(f"   Unmarshaled Field 4 (Memo): {memo}")
            elif field_id == 5:
                # Metadata: length-prefixed bytes
                mlen = read_uvarint(reader)
                metadata = reader.read(mlen)
                print(f"   Unmarshaled Field 5 (Metadata): {metadata.hex()}")
            elif field_id == 6:
                # Expire: fixed 8 bytes
                expire_val = struct.unpack(">Q", reader.read(8))[0]
                if expire_val > 0:
                    expire = ExpireOptions(expire_val)
                print(f"   Unmarshaled Field 6 (Expire): {expire_val}")
            elif field_id == 7:
                # Hold Until: fixed 8 bytes
                hold_val = struct.unpack(">Q", reader.read(8))[0]
                if hold_val > 0:
                    hold_until = HoldUntilOptions(hold_val)
                print(f"   Unmarshaled Field 7 (Hold Until): {hold_val}")
            elif field_id == 8:
                # Authorities: length-prefixed string (comma-separated)
                alen = read_uvarint(reader)
                auth_data = reader.read(alen).decode("utf-8")
                authorities = auth_data.split(",")
                print(f"   Unmarshaled Field 8 (Authorities): {authorities}")
            else:
                # Unknown field – skip (or break)
                print(f"   Unknown field id {field_id} encountered. Skipping.")
                break

        return TransactionHeader(
            principal=principal,
            initiator=initiator,
            memo=memo,
            metadata=metadata,
            expire=expire,
            hold_until=hold_until,
            authorities=authorities,
        )


    def build_transaction(self, txn):
        """
        Build transaction JSON while conditionally including optional fields.
        Ensures transactionHash matches header['initiator'] for validation.
        Automatically wraps the transaction inside a list.
        """
        from accumulate.models.transactions import Transaction

        txn_hash = txn.get_hash()

        expected_hash = txn.header.initiator.hex() if txn.header.initiator else None

        logger.info(f" Verifying Transaction Hash Before Sending")
        logger.info(f" Computed Transaction Hash: {txn_hash.hex()}")
        logger.info(f"vs Expected Hash from Initiator: {expected_hash}")

        if expected_hash and txn_hash.hex() != expected_hash:
            logger.error(" Transaction hash mismatch! Computed hash does not match the expected initiator hash.")
            raise ValueError("Transaction hash mismatch! Computed hash does not match the expected initiator hash.")

        txn_data = {
            "header": txn.header.to_dict(),
            "body": txn.body.to_dict() if txn.body else {}
        }

        #  Automatically wrap the transaction inside a list
        return {"transaction": [txn_data]}

build_transaction(txn)

Build transaction JSON while conditionally including optional fields. Ensures transactionHash matches header['initiator'] for validation. Automatically wraps the transaction inside a list.

Source code in accumulate\models\base_transactions.py
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
def build_transaction(self, txn):
    """
    Build transaction JSON while conditionally including optional fields.
    Ensures transactionHash matches header['initiator'] for validation.
    Automatically wraps the transaction inside a list.
    """
    from accumulate.models.transactions import Transaction

    txn_hash = txn.get_hash()

    expected_hash = txn.header.initiator.hex() if txn.header.initiator else None

    logger.info(f" Verifying Transaction Hash Before Sending")
    logger.info(f" Computed Transaction Hash: {txn_hash.hex()}")
    logger.info(f"vs Expected Hash from Initiator: {expected_hash}")

    if expected_hash and txn_hash.hex() != expected_hash:
        logger.error(" Transaction hash mismatch! Computed hash does not match the expected initiator hash.")
        raise ValueError("Transaction hash mismatch! Computed hash does not match the expected initiator hash.")

    txn_data = {
        "header": txn.header.to_dict(),
        "body": txn.body.to_dict() if txn.body else {}
    }

    #  Automatically wrap the transaction inside a list
    return {"transaction": [txn_data]}

create(principal, public_key, signer, timestamp=None, transaction_body=None) async classmethod

Automatically compute the initiator hash and return a fully constructed TransactionHeader.

Source code in accumulate\models\base_transactions.py
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
@classmethod
async def create(
    cls,
    principal: str,
    public_key: bytes,
    signer: "Signer",
    timestamp: Optional[int] = None,
    transaction_body=None,  #  Add transaction body to check for RemoteTransaction
) -> "TransactionHeader":
    """Automatically compute the initiator hash and return a fully constructed TransactionHeader."""

    from accumulate.signing.signer import Signer
    from accumulate.signing.timestamp import TimestampFromVariable
    from accumulate.models.transactions import RemoteTransaction  #  Import RemoteTransaction

    #  If this is a RemoteTransaction, set header differently
    if isinstance(transaction_body, RemoteTransaction):
        logger.info(" RemoteTransaction detected! Adjusting header...")

        #  Use the referenced transaction's hash as the initiator
        initiator_hash = transaction_body.hash  # This should be the original signed transaction hash

        #  Remote Transactions do not need a new timestamp
        timestamp = None  

        logger.info(f" Using referenced transaction hash: {initiator_hash.hex()}")

    else:

        #  Generate timestamp only if it's not provided
        timestamp = timestamp or TimestampFromVariable().get()

        #  Fetch signer version dynamically
        signer_version = await signer.get_signer_version()

        #  Fetch the correct signature type dynamically
        signature_type = await signer.get_signature_type()

        #  Use the correct Lite Identity URL (not a token sub-account)
        signer_url = str(signer.url)  #  Extract URL from Signer object

        logger.info(f" Correcting Signer URL (used in metadata hash): {signer_url}")

        #  Compute initiator hash using the same function as `Signer.sign_transaction()`
        initiator_hash = Signer.calculate_metadata_hash(
            public_key, timestamp, signer_url, signer_version, signature_type.value
        )

        logger.info(f" Computed Initiator Hash Header (public key): {public_key.hex()}")
        logger.info(f" Computed Initiator Hash Header (timestamp): {timestamp}")
        logger.info(f" Computed Initiator Hash Header (signer): {principal}")
        logger.info(f" Computed Initiator Hash Header (signer_version): {signer_version}")
        logger.info(f" Computed Initiator Hash Header (signature_type.value): {signature_type.value}")
        logger.info(f" Computed Initiator Hash (from TransactionHeader.create()): {initiator_hash.hex()}")

    #  Ensure timestamp and signature type are included when creating the instance
    return cls(
        principal=principal,
        initiator=initiator_hash,
        timestamp=timestamp,  #  This will be None for Remote Transactions
        signature_type=signature_type if not isinstance(transaction_body, RemoteTransaction) else None,
    )

marshal_binary()

Serialize the transaction header to bytes using the updated field‐based encoding.

Source code in accumulate\models\base_transactions.py
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
def marshal_binary(self) -> bytes:
    """Serialize the transaction header to bytes using the updated field‐based encoding."""
    print("\n DEBUG: Marshaling Transaction Header")
    result = b""

    # Field 1: Principal – encode as: [varint(length)] + principal_bytes
    principal_bytes = self.principal.encode("utf-8")
    field1 = field_marshal_binary(1, encode_uvarint(len(principal_bytes)) + principal_bytes)
    result += field1
    print(f"   Field 1 (Principal): {field1.hex()}")

    # Field 2: Initiator – raw bytes (assumed fixed length, e.g. 32 bytes)
    field2 = field_marshal_binary(2, self.initiator)
    result += field2
    print(f"   Field 2 (Initiator): {field2.hex()}")

    # Optional Field 4: Memo (if present)
    if self.memo:
        memo_bytes = self.memo.encode("utf-8")
        field4 = field_marshal_binary(4, encode_uvarint(len(memo_bytes)) + memo_bytes)
        result += field4
        print(f"   Field 4 (Memo): {field4.hex()}")

    # Optional Field 5: Metadata (if present)
    if self.metadata:
        field5 = field_marshal_binary(5, encode_uvarint(len(self.metadata)) + self.metadata)
        result += field5
        print(f"   Field 5 (Metadata): {field5.hex()}")

    # Optional Field 6: Expire (if present; fixed 8 bytes)
    if self.expire:
        expire_bytes = struct.pack(">Q", self.expire.at_time)
        field6 = field_marshal_binary(6, expire_bytes)
        result += field6
        print(f"   Field 6 (Expire): {field6.hex()}")

    # Optional Field 7: Hold Until (if present; fixed 8 bytes)
    if self.hold_until:
        hold_until_bytes = struct.pack(">Q", self.hold_until.minor_block)
        field7 = field_marshal_binary(7, hold_until_bytes)
        result += field7
        print(f"   Field 7 (Hold Until): {field7.hex()}")

    # Optional Field 8: Authorities (if present; encoded as length-prefixed UTF-8 string)
    if self.authorities:
        auth_str = ",".join(self.authorities)
        auth_bytes = auth_str.encode("utf-8")
        field8 = field_marshal_binary(8, encode_uvarint(len(auth_bytes)) + auth_bytes)
        result += field8
        print(f"   Field 8 (Authorities): {field8.hex()}")

    print(f"   Final Header Encoding: {result.hex()}")
    return result

to_dict()

Convert the transaction header to a dictionary while conditionally including optional fields.

Source code in accumulate\models\base_transactions.py
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
def to_dict(self) -> dict:
    """Convert the transaction header to a dictionary while conditionally including optional fields."""
    txn_dict = {
        "principal": self.principal,
        "initiator": self.initiator.hex(),
    }
    if self.memo:
        txn_dict["memo"] = self.memo
    if self.metadata:
        txn_dict["metadata"] = base64.b64encode(self.metadata).decode()
    if self.expire:
        txn_dict["expire"] = self.expire.at_time
    if self.hold_until:
        txn_dict["hold_until"] = self.hold_until.minor_block
    if self.authorities:
        txn_dict["authorities"] = self.authorities
    return txn_dict

unmarshal(data) staticmethod

Deserialize the transaction header from bytes using the updated encoding scheme.

Source code in accumulate\models\base_transactions.py
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
@staticmethod
def unmarshal(data: bytes) -> "TransactionHeader":
    """Deserialize the transaction header from bytes using the updated encoding scheme."""
    print("\n DEBUG: Unmarshaling Transaction Header")
    reader = io.BytesIO(data)

    principal = None
    initiator = None
    memo = None
    metadata = None
    expire = None
    hold_until = None
    authorities = None

    # Process fields one by one (each field starts with a 1-byte field id)
    while True:
        field_id_byte = reader.read(1)
        if not field_id_byte:
            break  # End of header data
        field_id = field_id_byte[0]
        if field_id == 1:
            # Principal is length-prefixed: read length then string
            plen = read_uvarint(reader)
            principal = reader.read(plen).decode("utf-8")
            print(f"   Unmarshaled Field 1 (Principal): {principal}")
        elif field_id == 2:
            # Initiator: fixed length (assume 32 bytes)
            initiator = reader.read(32)
            print(f"   Unmarshaled Field 2 (Initiator): {initiator.hex()}")
        elif field_id == 4:
            # Memo: length-prefixed string
            mlen = read_uvarint(reader)
            memo = reader.read(mlen).decode("utf-8")
            print(f"   Unmarshaled Field 4 (Memo): {memo}")
        elif field_id == 5:
            # Metadata: length-prefixed bytes
            mlen = read_uvarint(reader)
            metadata = reader.read(mlen)
            print(f"   Unmarshaled Field 5 (Metadata): {metadata.hex()}")
        elif field_id == 6:
            # Expire: fixed 8 bytes
            expire_val = struct.unpack(">Q", reader.read(8))[0]
            if expire_val > 0:
                expire = ExpireOptions(expire_val)
            print(f"   Unmarshaled Field 6 (Expire): {expire_val}")
        elif field_id == 7:
            # Hold Until: fixed 8 bytes
            hold_val = struct.unpack(">Q", reader.read(8))[0]
            if hold_val > 0:
                hold_until = HoldUntilOptions(hold_val)
            print(f"   Unmarshaled Field 7 (Hold Until): {hold_val}")
        elif field_id == 8:
            # Authorities: length-prefixed string (comma-separated)
            alen = read_uvarint(reader)
            auth_data = reader.read(alen).decode("utf-8")
            authorities = auth_data.split(",")
            print(f"   Unmarshaled Field 8 (Authorities): {authorities}")
        else:
            # Unknown field – skip (or break)
            print(f"   Unknown field id {field_id} encountered. Skipping.")
            break

    return TransactionHeader(
        principal=principal,
        initiator=initiator,
        memo=memo,
        metadata=metadata,
        expire=expire,
        hold_until=hold_until,
        authorities=authorities,
    )

credits

CreditsAccount

Represents an account with a credit balance.

Source code in accumulate\models\credits.py
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
class CreditsAccount:
    """Represents an account with a credit balance."""

    def __init__(self, credit_balance: int = 0):
        """
        Initialize a credits account.

        :param credit_balance: Initial credit balance of the account.
        """
        self.credit_balance = credit_balance

    def get_credit_balance(self) -> int:
        """
        Get the current credit balance.

        :return: The credit balance as an integer.
        """
        return self.credit_balance

    def credit_credits(self, amount: int):
        """
        Add credits to the account.

        :param amount: The amount of credits to add.
        """
        self.credit_balance += amount

    def can_debit_credits(self, amount: int) -> bool:
        """
        Check if the account has enough credits to debit.

        :param amount: The amount to check for debiting.
        :return: True if the account can debit the amount, False otherwise.
        """
        return amount <= self.credit_balance

    def debit_credits(self, amount: int) -> bool:
        """
        Debit credits from the account.

        :param amount: The amount of credits to debit.
        :return: True if the debit was successful, False otherwise.
        """
        if not self.can_debit_credits(amount):
            return False
        self.credit_balance -= amount
        return True

__init__(credit_balance=0)

Initialize a credits account.

:param credit_balance: Initial credit balance of the account.

Source code in accumulate\models\credits.py
 6
 7
 8
 9
10
11
12
def __init__(self, credit_balance: int = 0):
    """
    Initialize a credits account.

    :param credit_balance: Initial credit balance of the account.
    """
    self.credit_balance = credit_balance

can_debit_credits(amount)

Check if the account has enough credits to debit.

:param amount: The amount to check for debiting. :return: True if the account can debit the amount, False otherwise.

Source code in accumulate\models\credits.py
30
31
32
33
34
35
36
37
def can_debit_credits(self, amount: int) -> bool:
    """
    Check if the account has enough credits to debit.

    :param amount: The amount to check for debiting.
    :return: True if the account can debit the amount, False otherwise.
    """
    return amount <= self.credit_balance

credit_credits(amount)

Add credits to the account.

:param amount: The amount of credits to add.

Source code in accumulate\models\credits.py
22
23
24
25
26
27
28
def credit_credits(self, amount: int):
    """
    Add credits to the account.

    :param amount: The amount of credits to add.
    """
    self.credit_balance += amount

debit_credits(amount)

Debit credits from the account.

:param amount: The amount of credits to debit. :return: True if the debit was successful, False otherwise.

Source code in accumulate\models\credits.py
39
40
41
42
43
44
45
46
47
48
49
def debit_credits(self, amount: int) -> bool:
    """
    Debit credits from the account.

    :param amount: The amount of credits to debit.
    :return: True if the debit was successful, False otherwise.
    """
    if not self.can_debit_credits(amount):
        return False
    self.credit_balance -= amount
    return True

get_credit_balance()

Get the current credit balance.

:return: The credit balance as an integer.

Source code in accumulate\models\credits.py
14
15
16
17
18
19
20
def get_credit_balance(self) -> int:
    """
    Get the current credit balance.

    :return: The credit balance as an integer.
    """
    return self.credit_balance

data_entries

AccumulateDataEntry

Bases: DataEntry

Represents a single-hash data entry.

Source code in accumulate\models\data_entries.py
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
class AccumulateDataEntry(DataEntry):
    """Represents a single-hash data entry."""

    def type(self) -> DataEntryType:
        return DataEntryType.ACCUMULATE

    def hash(self) -> bytes:
        hasher = hashlib.sha256()
        for chunk in self.data:
            hasher.update(chunk)
        return hasher.digest()

    def marshal(self) -> bytes:
        """
        Serialize the DataEntry to bytes.
        """
        type_byte = encode_uvarint(2)
        chunk_count = encode_uvarint(len(self.data))  #  Use uvarint encoding for chunk count
        serialized_chunks = b"".join(bytes_marshal_binary(chunk) for chunk in self.data)

        return type_byte + chunk_count + serialized_chunks

    def to_dict(self) -> dict:
        """
         Convert AccumulateDataEntry to a JSON-serializable dictionary.
        """
        return {
            "type": "accumulate",  #  Ensure type matches expected JSON output
            "data": [chunk.hex() for chunk in self.data]  #  Convert bytes to hex
        }

marshal()

Serialize the DataEntry to bytes.

Source code in accumulate\models\data_entries.py
80
81
82
83
84
85
86
87
88
def marshal(self) -> bytes:
    """
    Serialize the DataEntry to bytes.
    """
    type_byte = encode_uvarint(2)
    chunk_count = encode_uvarint(len(self.data))  #  Use uvarint encoding for chunk count
    serialized_chunks = b"".join(bytes_marshal_binary(chunk) for chunk in self.data)

    return type_byte + chunk_count + serialized_chunks

to_dict()

Convert AccumulateDataEntry to a JSON-serializable dictionary.

Source code in accumulate\models\data_entries.py
90
91
92
93
94
95
96
97
def to_dict(self) -> dict:
    """
     Convert AccumulateDataEntry to a JSON-serializable dictionary.
    """
    return {
        "type": "accumulate",  #  Ensure type matches expected JSON output
        "data": [chunk.hex() for chunk in self.data]  #  Convert bytes to hex
    }

DataEntry

Base class for data entries.

Source code in accumulate\models\data_entries.py
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
class DataEntry:
    """Base class for data entries."""

    def __init__(self, data: List[bytes]):
        if not isinstance(data, list) or not all(isinstance(d, bytes) for d in data):
            raise TypeError("Data must be a list of byte arrays.")
        self.data = data

    def type(self) -> DataEntryType:
        """Return the data entry type (must be implemented by subclasses)."""
        raise NotImplementedError("Type method must be implemented by subclasses.")

    def get_data(self) -> List[bytes]:
        """Return the raw data of the entry."""
        return self.data

    def hash(self) -> bytes:
        """Return the hash of the data entry (must be implemented by subclasses)."""
        raise NotImplementedError("Hash method must be implemented by subclasses.")

    def marshal(self) -> bytes:
        """
        Serialize the DataEntry to bytes.
         FIX: Ensure the correct entry encoding.
        """
        type_byte = encode_uvarint(self.type().value)  #  Correctly encode DataEntryType
        serialized_chunks = b"".join(bytes_marshal_binary(chunk) for chunk in self.data)

        return type_byte + serialized_chunks


    @classmethod
    def unmarshal(cls, data: bytes) -> "DataEntry":
        """Deserialize a data entry from bytes."""
        logger.debug(f" Unmarshaling DataEntry")

        reader = io.BytesIO(data)

        #  Step 1: Read **DataEntryType**
        type_value = read_uvarint(reader)
        if type_value not in {DataEntryType.ACCUMULATE.value, DataEntryType.DOUBLE_HASH.value}:
            raise ValueError(f"Unknown DataEntryType: {type_value}")

        #  Step 2: Read **Chunk Count**
        chunk_count = read_uvarint(reader)

        #  Step 3: Read **Each Data Chunk**
        chunks = [unmarshal_bytes(reader) for _ in range(chunk_count)]

        #  Step 4: Return the correct DataEntry subclass
        if type_value == DataEntryType.ACCUMULATE.value:
            return AccumulateDataEntry(chunks)
        elif type_value == DataEntryType.DOUBLE_HASH.value:
            return DoubleHashDataEntry(chunks)
        else:
            raise ValueError(f"Unexpected DataEntryType: {type_value}")

get_data()

Return the raw data of the entry.

Source code in accumulate\models\data_entries.py
22
23
24
def get_data(self) -> List[bytes]:
    """Return the raw data of the entry."""
    return self.data

hash()

Return the hash of the data entry (must be implemented by subclasses).

Source code in accumulate\models\data_entries.py
26
27
28
def hash(self) -> bytes:
    """Return the hash of the data entry (must be implemented by subclasses)."""
    raise NotImplementedError("Hash method must be implemented by subclasses.")

marshal()

Serialize the DataEntry to bytes. FIX: Ensure the correct entry encoding.

Source code in accumulate\models\data_entries.py
30
31
32
33
34
35
36
37
38
def marshal(self) -> bytes:
    """
    Serialize the DataEntry to bytes.
     FIX: Ensure the correct entry encoding.
    """
    type_byte = encode_uvarint(self.type().value)  #  Correctly encode DataEntryType
    serialized_chunks = b"".join(bytes_marshal_binary(chunk) for chunk in self.data)

    return type_byte + serialized_chunks

type()

Return the data entry type (must be implemented by subclasses).

Source code in accumulate\models\data_entries.py
18
19
20
def type(self) -> DataEntryType:
    """Return the data entry type (must be implemented by subclasses)."""
    raise NotImplementedError("Type method must be implemented by subclasses.")

unmarshal(data) classmethod

Deserialize a data entry from bytes.

Source code in accumulate\models\data_entries.py
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
@classmethod
def unmarshal(cls, data: bytes) -> "DataEntry":
    """Deserialize a data entry from bytes."""
    logger.debug(f" Unmarshaling DataEntry")

    reader = io.BytesIO(data)

    #  Step 1: Read **DataEntryType**
    type_value = read_uvarint(reader)
    if type_value not in {DataEntryType.ACCUMULATE.value, DataEntryType.DOUBLE_HASH.value}:
        raise ValueError(f"Unknown DataEntryType: {type_value}")

    #  Step 2: Read **Chunk Count**
    chunk_count = read_uvarint(reader)

    #  Step 3: Read **Each Data Chunk**
    chunks = [unmarshal_bytes(reader) for _ in range(chunk_count)]

    #  Step 4: Return the correct DataEntry subclass
    if type_value == DataEntryType.ACCUMULATE.value:
        return AccumulateDataEntry(chunks)
    elif type_value == DataEntryType.DOUBLE_HASH.value:
        return DoubleHashDataEntry(chunks)
    else:
        raise ValueError(f"Unexpected DataEntryType: {type_value}")

DataEntryUtils

Utility functions for data entries.

Source code in accumulate\models\data_entries.py
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
class DataEntryUtils:
    """Utility functions for data entries."""

    TRANSACTION_SIZE_MAX = 20480  # Maximum transaction size
    FEE_DATA_UNIT = 256          # Fee unit size

    @staticmethod
    def check_data_entry_size(entry: DataEntry) -> int:
        """
        Validate the size of the data entry.

        :param entry: The data entry to check.
        :return: The size of the marshaled data entry in bytes.
        :raises ValueError: If the entry is empty or exceeds the size limit.
        """
        size = sum(len(chunk) for chunk in entry.get_data())
        if size > DataEntryUtils.TRANSACTION_SIZE_MAX:
            raise ValueError(f"Data exceeds {DataEntryUtils.TRANSACTION_SIZE_MAX} byte entry limit.")
        if size <= 0:
            raise ValueError("No data provided for WriteData.")
        return size

    @staticmethod
    def calculate_data_entry_cost(entry: DataEntry, fee_data: int) -> int:
        """
        Calculate the cost of writing a data entry.

        :param entry: The data entry to calculate the cost for.
        :param fee_data: The base fee multiplier for data entries.
        :return: The cost in credits.
        """
        size = DataEntryUtils.check_data_entry_size(entry)
        return fee_data * ((size // DataEntryUtils.FEE_DATA_UNIT) + 1)

calculate_data_entry_cost(entry, fee_data) staticmethod

Calculate the cost of writing a data entry.

:param entry: The data entry to calculate the cost for. :param fee_data: The base fee multiplier for data entries. :return: The cost in credits.

Source code in accumulate\models\data_entries.py
163
164
165
166
167
168
169
170
171
172
173
@staticmethod
def calculate_data_entry_cost(entry: DataEntry, fee_data: int) -> int:
    """
    Calculate the cost of writing a data entry.

    :param entry: The data entry to calculate the cost for.
    :param fee_data: The base fee multiplier for data entries.
    :return: The cost in credits.
    """
    size = DataEntryUtils.check_data_entry_size(entry)
    return fee_data * ((size // DataEntryUtils.FEE_DATA_UNIT) + 1)

check_data_entry_size(entry) staticmethod

Validate the size of the data entry.

:param entry: The data entry to check. :return: The size of the marshaled data entry in bytes. :raises ValueError: If the entry is empty or exceeds the size limit.

Source code in accumulate\models\data_entries.py
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
@staticmethod
def check_data_entry_size(entry: DataEntry) -> int:
    """
    Validate the size of the data entry.

    :param entry: The data entry to check.
    :return: The size of the marshaled data entry in bytes.
    :raises ValueError: If the entry is empty or exceeds the size limit.
    """
    size = sum(len(chunk) for chunk in entry.get_data())
    if size > DataEntryUtils.TRANSACTION_SIZE_MAX:
        raise ValueError(f"Data exceeds {DataEntryUtils.TRANSACTION_SIZE_MAX} byte entry limit.")
    if size <= 0:
        raise ValueError("No data provided for WriteData.")
    return size

DoubleHashDataEntry

Bases: DataEntry

Represents a double-hash data entry (Used in Go JSON).

Source code in accumulate\models\data_entries.py
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
class DoubleHashDataEntry(DataEntry):
    """Represents a double-hash data entry (Used in Go JSON)."""

    def type(self) -> DataEntryType:
        """Return the DataEntryType for double-hash entries."""
        return DataEntryType.DOUBLE_HASH

    def hash(self) -> bytes:
        """Compute the double SHA-256 hash (Merkle root of the data)."""
        hasher = hashlib.sha256()
        for chunk in self.data:
            hasher.update(chunk)
        merkle_root = hasher.digest()
        return hashlib.sha256(merkle_root).digest()

    def marshal(self) -> bytes:
        """
        Serialize the DataEntry to bytes.
         FIX: Ensure the correct entry encoding.
        """
        entry_type = encode_uvarint(1)  #  Entry field identifier is always `01`
        type_value = encode_uvarint(self.type().value)  #  Encode `03` for doubleHash
        data_field = encode_uvarint(2)  #  Data field identifier is always `02`

        serialized_chunks = b"".join(bytes_marshal_binary(chunk) for chunk in self.data)

        return entry_type + type_value + data_field + serialized_chunks  #  Fix order!


    def to_dict(self) -> dict:
        """
         Convert DoubleHashDataEntry to a JSON-serializable dictionary.
        """
        return {
            "type": "doubleHash",  #  Ensure type matches expected JSON output
            "data": [chunk.hex() for chunk in self.data]  #  Convert bytes to hex
        }

hash()

Compute the double SHA-256 hash (Merkle root of the data).

Source code in accumulate\models\data_entries.py
107
108
109
110
111
112
113
def hash(self) -> bytes:
    """Compute the double SHA-256 hash (Merkle root of the data)."""
    hasher = hashlib.sha256()
    for chunk in self.data:
        hasher.update(chunk)
    merkle_root = hasher.digest()
    return hashlib.sha256(merkle_root).digest()

marshal()

Serialize the DataEntry to bytes. FIX: Ensure the correct entry encoding.

Source code in accumulate\models\data_entries.py
115
116
117
118
119
120
121
122
123
124
125
126
def marshal(self) -> bytes:
    """
    Serialize the DataEntry to bytes.
     FIX: Ensure the correct entry encoding.
    """
    entry_type = encode_uvarint(1)  #  Entry field identifier is always `01`
    type_value = encode_uvarint(self.type().value)  #  Encode `03` for doubleHash
    data_field = encode_uvarint(2)  #  Data field identifier is always `02`

    serialized_chunks = b"".join(bytes_marshal_binary(chunk) for chunk in self.data)

    return entry_type + type_value + data_field + serialized_chunks  #  Fix order!

to_dict()

Convert DoubleHashDataEntry to a JSON-serializable dictionary.

Source code in accumulate\models\data_entries.py
129
130
131
132
133
134
135
136
def to_dict(self) -> dict:
    """
     Convert DoubleHashDataEntry to a JSON-serializable dictionary.
    """
    return {
        "type": "doubleHash",  #  Ensure type matches expected JSON output
        "data": [chunk.hex() for chunk in self.data]  #  Convert bytes to hex
    }

type()

Return the DataEntryType for double-hash entries.

Source code in accumulate\models\data_entries.py
103
104
105
def type(self) -> DataEntryType:
    """Return the DataEntryType for double-hash entries."""
    return DataEntryType.DOUBLE_HASH

enums

AccountAuthOperationType

Bases: Enum

Operations for account authorization.

Source code in accumulate\models\enums.py
209
210
211
212
213
214
215
class AccountAuthOperationType(Enum):
    """Operations for account authorization."""
    UNKNOWN = 0
    ENABLE = 1
    DISABLE = 2
    ADD_AUTHORITY = 3
    REMOVE_AUTHORITY = 4

AccountType

Bases: Enum

Types of accounts in the Accumulate blockchain.

Source code in accumulate\models\enums.py
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
class AccountType(Enum):
    """Types of accounts in the Accumulate blockchain."""
    UNKNOWN = 0
    ANCHOR_LEDGER = 1
    IDENTITY = 2
    TOKEN_ISSUER = 3
    TOKEN_ACCOUNT = 4
    LITE_TOKEN_ACCOUNT = 5
    BLOCK_LEDGER = 6
    KEY_PAGE = 9
    KEY_BOOK = 10
    DATA_ACCOUNT = 11
    LITE_DATA_ACCOUNT = 12
    SYSTEM_LEDGER = 14
    LITE_IDENTITY = 15
    SYNTHETIC_LEDGER = 16

BookType

Bases: Enum

Types of key books.

Source code in accumulate\models\enums.py
233
234
235
236
237
class BookType(Enum):
    """Types of key books."""
    NORMAL = 0
    VALIDATOR = 1
    OPERATOR = 2

DataEntryType

Bases: Enum

Types of data entries in the blockchain.

Source code in accumulate\models\enums.py
131
132
133
134
135
136
class DataEntryType(Enum):
    """Types of data entries in the blockchain."""
    UNKNOWN = 0x00
    FACTOM = 0x01
    ACCUMULATE = 0x02
    DOUBLE_HASH = 0x03

EventType

Bases: Enum

Types of blockchain events.

Source code in accumulate\models\enums.py
87
88
89
90
91
class EventType(Enum):
    """Types of blockchain events."""
    ERROR = 1
    BLOCK = 2
    GLOBALS = 3

ExecutorVersion

Bases: Enum

Versions of the executor system.

Source code in accumulate\models\enums.py
219
220
221
222
223
224
225
226
227
228
229
class ExecutorVersion(Enum):
    """Versions of the executor system."""
    V1 = 1
    V1_SIGNATURE_ANCHORING = 2
    V1_DOUBLE_HASH_ENTRIES = 3
    V1_HALT = 4
    V2 = 5
    V2_BAIKONUR = 6
    V2_VANDENBERG = 7
    V2_JIUQUAN = 8
    V_NEXT = 9

KeyPageOperationType

Bases: Enum

Operations for key pages.

Source code in accumulate\models\enums.py
196
197
198
199
200
201
202
203
204
205
class KeyPageOperationType(Enum):
    """Operations for key pages."""
    UNKNOWN = 0
    UPDATE = 1
    REMOVE = 2
    ADD = 3
    SET_THRESHOLD = 15
    UPDATE_ALLOWED = 5
    SET_REJECT_THRESHOLD = 6
    SET_RESPONSE_THRESHOLD = 7

KnownPeerStatus

Bases: Enum

Statuses of known peers in the network.

Source code in accumulate\models\enums.py
95
96
97
98
99
class KnownPeerStatus(Enum):
    """Statuses of known peers in the network."""
    UNKNOWN = 0
    GOOD = 1
    BAD = 2

QueryType

Bases: Enum

Query types for retrieving blockchain data.

Source code in accumulate\models\enums.py
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
class QueryType(Enum):
    """Query types for retrieving blockchain data."""
    DEFAULT = 0x00
    CHAIN = 0x01
    DATA = 0x02
    DIRECTORY = 0x03
    PENDING = 0x04
    BLOCK = 0x05
    ANCHOR_SEARCH = 0x10
    PUBLIC_KEY_SEARCH = 0x11
    PUBLIC_KEY_HASH_SEARCH = 0x12
    DELEGATE_SEARCH = 0x13
    MESSAGE_HASH_SEARCH = 0x14

    @classmethod
    def from_value(cls, value):
        """Retrieve an enum instance by its numeric value."""
        for item in cls:
            if item.value == value:
                return item
        raise ValueError(f"Invalid QueryType value: {value}")

    def to_rpc_format(self) -> str:
        """Convert to the expected JSON-RPC queryType format (camelCase)."""
        mapping = {
            "DEFAULT": "default",
            "CHAIN": "chain",
            "DATA": "data",
            "DIRECTORY": "directory",
            "PENDING": "pending",
            "BLOCK": "block",
            "ANCHOR_SEARCH": "anchor",
            "PUBLIC_KEY_SEARCH": "publicKeySearch",
            "PUBLIC_KEY_HASH_SEARCH": "publicKeyHashSearch",
            "DELEGATE_SEARCH": "delegateSearch",
            "MESSAGE_HASH_SEARCH": "messageHashSearch",
        }
        return mapping[self.name]

from_value(value) classmethod

Retrieve an enum instance by its numeric value.

Source code in accumulate\models\enums.py
43
44
45
46
47
48
49
@classmethod
def from_value(cls, value):
    """Retrieve an enum instance by its numeric value."""
    for item in cls:
        if item.value == value:
            return item
    raise ValueError(f"Invalid QueryType value: {value}")

to_rpc_format()

Convert to the expected JSON-RPC queryType format (camelCase).

Source code in accumulate\models\enums.py
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
def to_rpc_format(self) -> str:
    """Convert to the expected JSON-RPC queryType format (camelCase)."""
    mapping = {
        "DEFAULT": "default",
        "CHAIN": "chain",
        "DATA": "data",
        "DIRECTORY": "directory",
        "PENDING": "pending",
        "BLOCK": "block",
        "ANCHOR_SEARCH": "anchor",
        "PUBLIC_KEY_SEARCH": "publicKeySearch",
        "PUBLIC_KEY_HASH_SEARCH": "publicKeyHashSearch",
        "DELEGATE_SEARCH": "delegateSearch",
        "MESSAGE_HASH_SEARCH": "messageHashSearch",
    }
    return mapping[self.name]

RecordType

Bases: Enum

Types of records stored in the blockchain.

Source code in accumulate\models\enums.py
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
class RecordType(Enum):
    """Types of records stored in the blockchain."""
    ACCOUNT = 0x01
    CHAIN = 0x02
    CHAIN_ENTRY = 0x03
    KEY = 0x04
    MESSAGE = 0x10
    SIGNATURE_SET = 0x11
    MINOR_BLOCK = 0x20
    MAJOR_BLOCK = 0x21
    RANGE = 0x80
    URL = 0x81
    TX_ID = 0x82
    INDEX_ENTRY = 0x83
    ERROR = 0x8F

ServiceType

Bases: Enum

Types of services available in the Accumulate network, using hexadecimal values.

Source code in accumulate\models\enums.py
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
class ServiceType(Enum):
    """Types of services available in the Accumulate network, using hexadecimal values."""
    UNKNOWN = 0x00  # Indicates an unknown service type
    NODE = 0x01  # Node service
    CONSENSUS = 0x02  # Consensus service
    NETWORK = 0x03  # Network service
    METRICS = 0x04  # Metrics service
    QUERY = 0x05  # Querier service
    EVENT = 0x06  # Event service
    SUBMIT = 0x07  # Submitter service
    VALIDATE = 0x08  # Validator service
    FAUCET = 0x09  # Faucet service
    SNAPSHOT = 0x0A  # Snapshot service

TransactionType

Bases: Enum

Transaction types supported by the Accumulate blockchain.

Source code in accumulate\models\enums.py
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
class TransactionType(Enum):
    """Transaction types supported by the Accumulate blockchain."""
    # User Transactions
    UNKNOWN = 0x00
    CREATE_IDENTITY = 0x01
    CREATE_TOKEN_ACCOUNT = 0x02
    SEND_TOKENS = 0x03
    CREATE_DATA_ACCOUNT = 0x04
    WRITE_DATA = 0x05
    WRITE_DATA_TO = 0x06
    ACME_FAUCET = 0x07
    CREATE_TOKEN = 0x08
    ISSUE_TOKENS = 0x09
    BURN_TOKENS = 0x0A
    CREATE_LITE_TOKEN_ACCOUNT = 0x0B
    CREATE_KEY_PAGE = 0x0C
    CREATE_KEY_BOOK = 0x0D
    ADD_CREDITS = 0x0E
    UPDATE_KEY_PAGE = 0x0F
    LOCK_ACCOUNT = 0x10
    BURN_CREDITS = 0x11
    TRANSFER_CREDITS = 0x12
    UPDATE_ACCOUNT_AUTH = 0x15
    UPDATE_KEY = 0x16
    NETWORK_MAINTENANCE = 0x2E
    ACTIVATE_PROTOCOL_VERSION = 0x2F
    REMOTE = 0x30

    # Systems Transactions
    SYNTHETIC_CREATE_IDENTITY = 0x31
    SYNTHETIC_WRITE_DATA = 0x32
    SYNTHETIC_DEPOSIT_TOKENS = 0x33
    SYNTHETIC_DEPOSIT_CREDITS = 0x34
    SYNTHETIC_BURN_TOKENS = 0x35
    SYNTHETIC_FORWARD_TRANSACTION = 0x36

    ##### SYSTEM TRANSACTIONS #####
    SYSTEM_GENESIS = 0x60
    DIRECTORY_ANCHOR = 0x61
    BLOCK_VALIDATOR_ANCHOR = 0x62
    SYSTEM_WRITE_DATA = 0x63


    def is_user(self) -> bool:
        """Check if the transaction type is a user transaction."""
        return self.value < 0x31  # Synthetic transactions start at 0x31

    def is_synthetic(self) -> bool:
        """Check if the transaction type is synthetic."""
        return 0x31 <= self.value <= 0x36

    def is_anchor(self) -> bool:
        """Check if the transaction type is an anchor transaction."""
        return self in {TransactionType.DIRECTORY_ANCHOR, TransactionType.BLOCK_VALIDATOR_ANCHOR}

is_anchor()

Check if the transaction type is an anchor transaction.

Source code in accumulate\models\enums.py
189
190
191
def is_anchor(self) -> bool:
    """Check if the transaction type is an anchor transaction."""
    return self in {TransactionType.DIRECTORY_ANCHOR, TransactionType.BLOCK_VALIDATOR_ANCHOR}

is_synthetic()

Check if the transaction type is synthetic.

Source code in accumulate\models\enums.py
185
186
187
def is_synthetic(self) -> bool:
    """Check if the transaction type is synthetic."""
    return 0x31 <= self.value <= 0x36

is_user()

Check if the transaction type is a user transaction.

Source code in accumulate\models\enums.py
181
182
183
def is_user(self) -> bool:
    """Check if the transaction type is a user transaction."""
    return self.value < 0x31  # Synthetic transactions start at 0x31

VoteType

Bases: Enum

Vote types used in governance.

Source code in accumulate\models\enums.py
122
123
124
125
126
127
class VoteType(Enum):
    """Vote types used in governance."""
    ACCEPT = 0
    REJECT = 1
    ABSTAIN = 2
    SUGGEST = 3

enum_from_name(enum_cls, name)

Retrieve enum value by name.

Source code in accumulate\models\enums.py
241
242
243
244
245
246
def enum_from_name(enum_cls, name: str):
    """Retrieve enum value by name."""
    try:
        return enum_cls[name.upper()]
    except KeyError:
        raise ValueError(f"Invalid {enum_cls.__name__}: {name}")

errors

AccumulateError

Bases: Exception

Base class for Accumulate-related errors.

Source code in accumulate\models\errors.py
41
42
43
44
45
46
47
48
class AccumulateError(Exception):
    """
    Base class for Accumulate-related errors.
    """
    def __init__(self, code: ErrorCode, message: Optional[str] = None):
        self.code = code
        self.message = message or code.description
        super().__init__(f"[{self.code.name}] {self.message}")

EncodingError

Bases: AccumulateError

Error raised when encoding or decoding fails.

Source code in accumulate\models\errors.py
52
53
54
55
56
57
class EncodingError(AccumulateError):
    """
    Error raised when encoding or decoding fails.
    """
    def __init__(self, message: str = ErrorCode.ENCODING_ERROR.description):
        super().__init__(ErrorCode.ENCODING_ERROR, message)

ErrorCode

Bases: Enum

Enumeration of error codes and their descriptions.

Source code in accumulate\models\errors.py
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
class ErrorCode(Enum):
    """
    Enumeration of error codes and their descriptions.
    """
    OK = (0, "Indicates the request succeeded")
    ENCODING_ERROR = (1, "Indicates something could not be decoded or encoded")
    FAILED = (2, "Indicates the request failed")
    DID_PANIC = (3, "Indicates the request failed due to a fatal error")
    UNKNOWN_ERROR = (4, "Indicates the request failed due to an unknown error")

    def __init__(self, value: int, description: str):
        self._value_ = value
        self.description = description

    def success(self) -> bool:
        """
        Determines if the error code represents a successful state.
        """
        return self == ErrorCode.OK

    @classmethod
    def from_value(cls, value: int) -> "ErrorCode":
        """
        Retrieve the error code enum from its integer value.

        :param value: The integer value of the error code.
        :return: The corresponding ErrorCode enum.
        :raises ValueError: If the value is not a valid error code.
        """
        for error in cls:
            if error.value == value:
                return error
        raise ValueError(f"Unknown ErrorCode value: {value}")

from_value(value) classmethod

Retrieve the error code enum from its integer value.

:param value: The integer value of the error code. :return: The corresponding ErrorCode enum. :raises ValueError: If the value is not a valid error code.

Source code in accumulate\models\errors.py
27
28
29
30
31
32
33
34
35
36
37
38
39
@classmethod
def from_value(cls, value: int) -> "ErrorCode":
    """
    Retrieve the error code enum from its integer value.

    :param value: The integer value of the error code.
    :return: The corresponding ErrorCode enum.
    :raises ValueError: If the value is not a valid error code.
    """
    for error in cls:
        if error.value == value:
            return error
    raise ValueError(f"Unknown ErrorCode value: {value}")

success()

Determines if the error code represents a successful state.

Source code in accumulate\models\errors.py
21
22
23
24
25
def success(self) -> bool:
    """
    Determines if the error code represents a successful state.
    """
    return self == ErrorCode.OK

FailedError

Bases: AccumulateError

Error raised for general failure cases.

Source code in accumulate\models\errors.py
61
62
63
64
65
66
class FailedError(AccumulateError):
    """
    Error raised for general failure cases.
    """
    def __init__(self, message: str = "Request failed"):
        super().__init__(ErrorCode.FAILED, message)

PanicError

Bases: AccumulateError

Error raised for fatal errors.

Source code in accumulate\models\errors.py
69
70
71
72
73
74
class PanicError(AccumulateError):
    """
    Error raised for fatal errors.
    """
    def __init__(self, message: str = "A fatal error occurred"):
        super().__init__(ErrorCode.DID_PANIC, message)

UnknownError

Bases: AccumulateError

Error raised for unknown issues.

Source code in accumulate\models\errors.py
77
78
79
80
81
82
class UnknownError(AccumulateError):
    """
    Error raised for unknown issues.
    """
    def __init__(self, message: str = "An unknown error occurred"):
        super().__init__(ErrorCode.UNKNOWN_ERROR, message)

ValidationError

Bases: Exception

Raised when validation fails.

Source code in accumulate\models\errors.py
109
110
class ValidationError(Exception):
    """Raised when validation fails."""

raise_for_error_code(code, message=None)

Raise the appropriate exception based on the error code.

:param code: The error code as an integer. :param message: An optional message describing the error. :raises AccumulateError: The corresponding exception for the error code.

Source code in accumulate\models\errors.py
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
def raise_for_error_code(code: int, message: Optional[str] = None):
    """
    Raise the appropriate exception based on the error code.

    :param code: The error code as an integer.
    :param message: An optional message describing the error.
    :raises AccumulateError: The corresponding exception for the error code.
    """
    error_code = ErrorCode.from_value(code)
    if error_code == ErrorCode.OK:
        return  # No error

    error_map = {
        ErrorCode.ENCODING_ERROR: EncodingError,
        ErrorCode.FAILED: FailedError,
        ErrorCode.DID_PANIC: PanicError,
        ErrorCode.UNKNOWN_ERROR: UnknownError,
    }

    exception_class = error_map.get(error_code, AccumulateError)
    raise exception_class(message or error_code.description)

events

BlockEvent

Bases: Record

Represents a block event.

Source code in accumulate\models\events.py
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
class BlockEvent(Record):
    """Represents a block event."""

    def __init__(
        self,
        partition: str,
        index: int,
        time: datetime,
        major: int,
        entries: Optional[List[Dict[str, Any]]] = None,
    ):
        super().__init__(record_type="BlockEvent")
        self.partition = partition
        self.index = index
        self.time = time
        self.major = major
        self.entries = entries or []

    def to_dict(self) -> Dict[str, Any]:
        """Convert to dictionary."""
        return {
            "record_type": self.record_type,
            "partition": self.partition,
            "index": self.index,
            "time": self.time.isoformat(),
            "major": self.major,
            "entries": self.entries,
        }

    @staticmethod
    def from_dict(data: Dict[str, Any]) -> "BlockEvent":
        """Create a BlockEvent from a dictionary."""
        return BlockEvent(
            partition=data["partition"],
            index=data["index"],
            time=datetime.fromisoformat(data["time"]),
            major=data["major"],
            entries=data.get("entries", []),
        )

from_dict(data) staticmethod

Create a BlockEvent from a dictionary.

Source code in accumulate\models\events.py
53
54
55
56
57
58
59
60
61
62
@staticmethod
def from_dict(data: Dict[str, Any]) -> "BlockEvent":
    """Create a BlockEvent from a dictionary."""
    return BlockEvent(
        partition=data["partition"],
        index=data["index"],
        time=datetime.fromisoformat(data["time"]),
        major=data["major"],
        entries=data.get("entries", []),
    )

to_dict()

Convert to dictionary.

Source code in accumulate\models\events.py
42
43
44
45
46
47
48
49
50
51
def to_dict(self) -> Dict[str, Any]:
    """Convert to dictionary."""
    return {
        "record_type": self.record_type,
        "partition": self.partition,
        "index": self.index,
        "time": self.time.isoformat(),
        "major": self.major,
        "entries": self.entries,
    }

ErrorEvent

Represents an error event in the system.

Source code in accumulate\models\events.py
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
class ErrorEvent:
    """Represents an error event in the system."""

    def __init__(self, err: Optional[Dict[str, Any]] = None):
        self.err = err

    def to_dict(self) -> Dict[str, Any]:
        """Convert to dictionary."""
        return {"err": self.err}

    @staticmethod
    def from_dict(data: Dict[str, Any]) -> "ErrorEvent":
        """Create an ErrorEvent from a dictionary."""
        return ErrorEvent(err=data.get("err"))

from_dict(data) staticmethod

Create an ErrorEvent from a dictionary.

Source code in accumulate\models\events.py
18
19
20
21
@staticmethod
def from_dict(data: Dict[str, Any]) -> "ErrorEvent":
    """Create an ErrorEvent from a dictionary."""
    return ErrorEvent(err=data.get("err"))

to_dict()

Convert to dictionary.

Source code in accumulate\models\events.py
14
15
16
def to_dict(self) -> Dict[str, Any]:
    """Convert to dictionary."""
    return {"err": self.err}

GlobalsEvent

Represents a global values change event.

Source code in accumulate\models\events.py
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
class GlobalsEvent:
    """Represents a global values change event."""

    def __init__(self, old: Optional[Dict[str, Any]] = None, new: Optional[Dict[str, Any]] = None):
        self.old = old
        self.new = new

    def to_dict(self) -> Dict[str, Any]:
        """Convert to dictionary."""
        return {"old": self.old, "new": self.new}

    @staticmethod
    def from_dict(data: Dict[str, Any]) -> "GlobalsEvent":
        """Create a GlobalsEvent from a dictionary."""
        return GlobalsEvent(
            old=data.get("old"),
            new=data.get("new"),
        )

from_dict(data) staticmethod

Create a GlobalsEvent from a dictionary.

Source code in accumulate\models\events.py
77
78
79
80
81
82
83
@staticmethod
def from_dict(data: Dict[str, Any]) -> "GlobalsEvent":
    """Create a GlobalsEvent from a dictionary."""
    return GlobalsEvent(
        old=data.get("old"),
        new=data.get("new"),
    )

to_dict()

Convert to dictionary.

Source code in accumulate\models\events.py
73
74
75
def to_dict(self) -> Dict[str, Any]:
    """Convert to dictionary."""
    return {"old": self.old, "new": self.new}

faucet

Faucet

Represents the Accumulate faucet account.

Source code in accumulate\models\faucet.py
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
class Faucet:
    """Represents the Accumulate faucet account."""

    ACME_FAUCET_AMOUNT = 10
    ACME_FAUCET_BALANCE = 200_000_000

    # Generate the faucet key using a fixed seed
    FAUCET_SEED = hashlib.sha256(b"faucet").digest()
    FAUCET_KEY = Ed25519PrivateKey.from_private_bytes(FAUCET_SEED[:32])

    def __init__(self):
        """Initialize the Faucet."""
        self.faucet_url = self._generate_faucet_url()

    def _generate_faucet_url(self) -> str:
        """
        Generate the faucet URL using the public key.

        :return: A string representing the faucet URL.
        """
        public_key = self.public_key()
        return f"{LiteAuthorityForKey(public_key, SignatureType.ED25519)}/ACME"

    def public_key(self) -> bytes:
        """
        Get the public key of the faucet.

        :return: The public key as bytes.
        """
        return self.FAUCET_KEY.public_key().public_bytes(
            encoding=Encoding.PEM,  # Use PEM for portability
            format=PublicFormat.SubjectPublicKeyInfo  # Standard format for public keys
        )

    def signer(self) -> "FaucetSigner":
        """
        Create a new faucet signer with the current timestamp.

        :return: A FaucetSigner instance.
        """
        return FaucetSigner(int(datetime.now(timezone.utc).timestamp() * 1e9))

__init__()

Initialize the Faucet.

Source code in accumulate\models\faucet.py
24
25
26
def __init__(self):
    """Initialize the Faucet."""
    self.faucet_url = self._generate_faucet_url()

_generate_faucet_url()

Generate the faucet URL using the public key.

:return: A string representing the faucet URL.

Source code in accumulate\models\faucet.py
28
29
30
31
32
33
34
35
def _generate_faucet_url(self) -> str:
    """
    Generate the faucet URL using the public key.

    :return: A string representing the faucet URL.
    """
    public_key = self.public_key()
    return f"{LiteAuthorityForKey(public_key, SignatureType.ED25519)}/ACME"

public_key()

Get the public key of the faucet.

:return: The public key as bytes.

Source code in accumulate\models\faucet.py
37
38
39
40
41
42
43
44
45
46
def public_key(self) -> bytes:
    """
    Get the public key of the faucet.

    :return: The public key as bytes.
    """
    return self.FAUCET_KEY.public_key().public_bytes(
        encoding=Encoding.PEM,  # Use PEM for portability
        format=PublicFormat.SubjectPublicKeyInfo  # Standard format for public keys
    )

signer()

Create a new faucet signer with the current timestamp.

:return: A FaucetSigner instance.

Source code in accumulate\models\faucet.py
48
49
50
51
52
53
54
def signer(self) -> "FaucetSigner":
    """
    Create a new faucet signer with the current timestamp.

    :return: A FaucetSigner instance.
    """
    return FaucetSigner(int(datetime.now(timezone.utc).timestamp() * 1e9))

FaucetSigner

Handles signing for the faucet.

Source code in accumulate\models\faucet.py
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
class FaucetSigner:
    """Handles signing for the faucet."""

    def __init__(self, timestamp: int):
        """
        Initialize the FaucetSigner.

        :param timestamp: The timestamp to use for signing.
        """
        self.timestamp = timestamp

    def version(self) -> int:
        """
        Get the version of the signer.

        :return: Version as an integer.
        """
        return 1

    def set_public_key(self, sig: Signature) -> None:
        """
        Set the public key for a given signature.

        :param sig: The signature object to update.
        :raises ValueError: If the signature type is unsupported.
        """
        if isinstance(sig, (LegacyED25519Signature, ED25519Signature, RCD1Signature)):
            sig.public_key = Faucet.FAUCET_KEY.public_key().public_bytes(
                encoding=Encoding.PEM,
                format=PublicFormat.SubjectPublicKeyInfo
            )
        else:
            raise ValueError(f"Cannot set the public key on {type(sig).__name__}")

    def sign(self, sig: Signature, sig_md_hash: bytes, message: bytes) -> None:
        """
        Sign the message with the faucet key.

        :param sig: The signature object to update.
        :param sig_md_hash: The metadata hash for the signature.
        :param message: The message to sign.
        :raises ValueError: If the signature type is unsupported.
        """
        if isinstance(sig, (LegacyED25519Signature, ED25519Signature, RCD1Signature)):
            signature = Faucet.FAUCET_KEY.sign(message)
            sig.signature = signature
        else:
            raise ValueError(f"Cannot sign {type(sig).__name__} with a key.")

__init__(timestamp)

Initialize the FaucetSigner.

:param timestamp: The timestamp to use for signing.

Source code in accumulate\models\faucet.py
60
61
62
63
64
65
66
def __init__(self, timestamp: int):
    """
    Initialize the FaucetSigner.

    :param timestamp: The timestamp to use for signing.
    """
    self.timestamp = timestamp

set_public_key(sig)

Set the public key for a given signature.

:param sig: The signature object to update. :raises ValueError: If the signature type is unsupported.

Source code in accumulate\models\faucet.py
76
77
78
79
80
81
82
83
84
85
86
87
88
89
def set_public_key(self, sig: Signature) -> None:
    """
    Set the public key for a given signature.

    :param sig: The signature object to update.
    :raises ValueError: If the signature type is unsupported.
    """
    if isinstance(sig, (LegacyED25519Signature, ED25519Signature, RCD1Signature)):
        sig.public_key = Faucet.FAUCET_KEY.public_key().public_bytes(
            encoding=Encoding.PEM,
            format=PublicFormat.SubjectPublicKeyInfo
        )
    else:
        raise ValueError(f"Cannot set the public key on {type(sig).__name__}")

sign(sig, sig_md_hash, message)

Sign the message with the faucet key.

:param sig: The signature object to update. :param sig_md_hash: The metadata hash for the signature. :param message: The message to sign. :raises ValueError: If the signature type is unsupported.

Source code in accumulate\models\faucet.py
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
def sign(self, sig: Signature, sig_md_hash: bytes, message: bytes) -> None:
    """
    Sign the message with the faucet key.

    :param sig: The signature object to update.
    :param sig_md_hash: The metadata hash for the signature.
    :param message: The message to sign.
    :raises ValueError: If the signature type is unsupported.
    """
    if isinstance(sig, (LegacyED25519Signature, ED25519Signature, RCD1Signature)):
        signature = Faucet.FAUCET_KEY.sign(message)
        sig.signature = signature
    else:
        raise ValueError(f"Cannot sign {type(sig).__name__} with a key.")

version()

Get the version of the signer.

:return: Version as an integer.

Source code in accumulate\models\faucet.py
68
69
70
71
72
73
74
def version(self) -> int:
    """
    Get the version of the signer.

    :return: Version as an integer.
    """
    return 1

fee_schedule

Fee

Enumeration of transaction fees in credits.

Source code in accumulate\models\fee_schedule.py
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
class Fee:
    """Enumeration of transaction fees in credits."""
    FEE_FAILED_MAXIMUM = 100  # $0.01
    FEE_SIGNATURE = 1         # $0.0001
    FEE_CREATE_IDENTITY = 50000  # $5.00
    FEE_CREATE_DIRECTORY = 1000  # $0.10
    FEE_CREATE_ACCOUNT = 2500    # $0.25
    FEE_TRANSFER_TOKENS = 300    # $0.03
    FEE_TRANSFER_TOKENS_EXTRA = 100  # $0.01
    FEE_CREATE_TOKEN = 500000    # $50.00
    FEE_GENERAL_TINY = 1         # $0.001
    FEE_GENERAL_SMALL = 10       # $0.001
    FEE_CREATE_KEY_PAGE = 10000  # $1.00
    FEE_CREATE_KEY_PAGE_EXTRA = 100  # $0.01
    FEE_DATA = 10                # $0.001 / 256 bytes
    FEE_SCRATCH_DATA = 1         # $0.0001 / 256 bytes
    FEE_UPDATE_AUTH = 300        # $0.03
    FEE_UPDATE_AUTH_EXTRA = 100  # $0.01
    FEE_MINIMUM_CREDIT_PURCHASE = 100  # $0.01

FeeSchedule

Source code in accumulate\models\fee_schedule.py
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
class FeeSchedule:
    @staticmethod
    def compute_signature_fee(sig: Signature, signature_size_max: int = 1024) -> int:
        logger.debug(f"Starting compute_signature_fee with signature: {sig}")
        size = len(sig.serialize())
        logger.debug(f"Initial signature size: {size}")
        if size > signature_size_max:
            raise ValueError(f"Signature size exceeds {signature_size_max} bytes.")

        fee = Fee.FEE_SIGNATURE
        chunks = max(1, (size - 1) // 256 + 1)
        fee += Fee.FEE_SIGNATURE * (chunks - 1)
        logger.debug(f"Fee after chunk calculation for main signature: {fee} (chunks: {chunks})")

        visited_signatures = set()  # Avoid repeated processing
        while sig and hasattr(sig, "delegated_signature"):
            if sig in visited_signatures:
                raise RuntimeError("Circular delegation detected.")
            visited_signatures.add(sig)

            sig = sig.delegated_signature
            if sig is None:  # Break if no further delegation
                break

            size = len(sig.serialize())
            logger.debug(f"Delegated signature size: {size}")

            chunks = max(1, (size - 1) // 256 + 1)
            fee += Fee.FEE_SIGNATURE * chunks
            logger.debug(f"Updated fee after processing delegated signature: {fee}")

        logger.debug(f"Final computed fee: {fee}")
        return fee



    @staticmethod
    def compute_transaction_fee(tx: Transaction, transaction_size_max: int = 20480) -> int:
        """
        Compute the fee for a given transaction.

        :param tx: The transaction object.
        :param transaction_size_max: Maximum allowed size for a transaction.
        :return: The calculated fee in credits.
        """
        size = len(tx.serialize())
        if size > transaction_size_max:
            raise ValueError(f"Transaction size exceeds {transaction_size_max} bytes.")

        fee = 0
        count = max(1, (size - 1) // 256 + 1)

        tx_type = tx.body.type()
        if tx_type == "CreateToken":
            fee = Fee.FEE_CREATE_TOKEN + Fee.FEE_DATA * (count - 1)
        elif tx_type == "CreateIdentity":
            fee = Fee.FEE_CREATE_IDENTITY + Fee.FEE_DATA * (count - 1)
        elif tx_type in {"CreateTokenAccount", "CreateDataAccount"}:
            fee = Fee.FEE_CREATE_ACCOUNT + Fee.FEE_DATA * (count - 1)
        elif tx_type == "SendTokens":
            fee = (
                Fee.FEE_TRANSFER_TOKENS
                + Fee.FEE_TRANSFER_TOKENS_EXTRA * (len(tx.body.to) - 1)
                + Fee.FEE_DATA * (count - 1)
            )
        elif tx_type == "CreateKeyPage":
            fee = (
                Fee.FEE_CREATE_KEY_PAGE
                + Fee.FEE_CREATE_KEY_PAGE_EXTRA * (len(tx.body.keys) - 1)
                + Fee.FEE_DATA * (count - 1)
            )
        else:
            fee = Fee.FEE_GENERAL_SMALL + Fee.FEE_DATA * (count - 1)

        return fee

    @staticmethod
    def compute_synthetic_refund(tx: Transaction, synth_count: int) -> int:
        paid = FeeSchedule.compute_transaction_fee(tx)
        if paid <= Fee.FEE_FAILED_MAXIMUM:
            return 0

        tx_type = tx.body.type()
        if tx_type in {"SendTokens", "IssueTokens"}:
            if synth_count > 1:  # Ensure this condition is correctly evaluated
                raise ValueError(f"A {tx_type} transaction cannot have multiple outputs.")
            return Fee.FEE_TRANSFER_TOKENS_EXTRA

        return paid - Fee.FEE_FAILED_MAXIMUM

compute_transaction_fee(tx, transaction_size_max=20480) staticmethod

Compute the fee for a given transaction.

:param tx: The transaction object. :param transaction_size_max: Maximum allowed size for a transaction. :return: The calculated fee in credits.

Source code in accumulate\models\fee_schedule.py
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
@staticmethod
def compute_transaction_fee(tx: Transaction, transaction_size_max: int = 20480) -> int:
    """
    Compute the fee for a given transaction.

    :param tx: The transaction object.
    :param transaction_size_max: Maximum allowed size for a transaction.
    :return: The calculated fee in credits.
    """
    size = len(tx.serialize())
    if size > transaction_size_max:
        raise ValueError(f"Transaction size exceeds {transaction_size_max} bytes.")

    fee = 0
    count = max(1, (size - 1) // 256 + 1)

    tx_type = tx.body.type()
    if tx_type == "CreateToken":
        fee = Fee.FEE_CREATE_TOKEN + Fee.FEE_DATA * (count - 1)
    elif tx_type == "CreateIdentity":
        fee = Fee.FEE_CREATE_IDENTITY + Fee.FEE_DATA * (count - 1)
    elif tx_type in {"CreateTokenAccount", "CreateDataAccount"}:
        fee = Fee.FEE_CREATE_ACCOUNT + Fee.FEE_DATA * (count - 1)
    elif tx_type == "SendTokens":
        fee = (
            Fee.FEE_TRANSFER_TOKENS
            + Fee.FEE_TRANSFER_TOKENS_EXTRA * (len(tx.body.to) - 1)
            + Fee.FEE_DATA * (count - 1)
        )
    elif tx_type == "CreateKeyPage":
        fee = (
            Fee.FEE_CREATE_KEY_PAGE
            + Fee.FEE_CREATE_KEY_PAGE_EXTRA * (len(tx.body.keys) - 1)
            + Fee.FEE_DATA * (count - 1)
        )
    else:
        fee = Fee.FEE_GENERAL_SMALL + Fee.FEE_DATA * (count - 1)

    return fee

general

AccountAuth dataclass

Represents account authorization details.

Source code in accumulate\models\general.py
58
59
60
61
@dataclass
class AccountAuth:
    """Represents account authorization details."""
    authorities: List["AuthorityEntry"] = field(default_factory=list)

AnchorMetadata dataclass

Metadata for an anchor.

Source code in accumulate\models\general.py
30
31
32
33
34
35
36
37
@dataclass
class AnchorMetadata:
    """Metadata for an anchor."""
    account: Optional[URL]
    index: int
    source_index: int
    source_block: int
    entry: bytes

AuthorityEntry dataclass

Represents an entry in the account's authorization list.

Source code in accumulate\models\general.py
64
65
66
67
68
@dataclass
class AuthorityEntry:
    """Represents an entry in the account's authorization list."""
    url: Optional[URL]
    disabled: bool  # True if auth checks are disabled for this authority

BlockEntry dataclass

Represents a single entry in a block.

Source code in accumulate\models\general.py
40
41
42
43
44
45
@dataclass
class BlockEntry:
    """Represents a single entry in a block."""
    account: Optional[URL]
    chain: str
    index: int

CreditRecipient dataclass

Source code in accumulate\models\general.py
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
@dataclass
class CreditRecipient:
    url: Optional[URL]
    amount: int

    def to_dict(self) -> dict:
        """Convert TokenRecipient to dictionary format for JSON serialization."""
        return {
            "url": str(self.url),
            "amount": str(self.amount),  # Convert to string to match Accumulate JSON format
        }


    def marshal(self) -> bytes:
        """Serialize CreditRecipient to bytes."""
        url_data = string_marshal_binary(str(self.url))
        amount_data = encode_uvarint(self.amount)
        return url_data + amount_data

    @classmethod
    def unmarshal(cls, data: bytes) -> "CreditRecipient":
        """Deserialize bytes into CreditRecipient."""
        reader = io.BytesIO(data)

        # Read the URL
        url_length, _ = decode_uvarint(reader.read(2))
        url_str = reader.read(url_length).decode("utf-8")
        url = URL.parse(url_str)

        # Read the amount
        amount, _ = decode_uvarint(reader.read())

        return cls(url, amount)

marshal()

Serialize CreditRecipient to bytes.

Source code in accumulate\models\general.py
105
106
107
108
109
def marshal(self) -> bytes:
    """Serialize CreditRecipient to bytes."""
    url_data = string_marshal_binary(str(self.url))
    amount_data = encode_uvarint(self.amount)
    return url_data + amount_data

to_dict()

Convert TokenRecipient to dictionary format for JSON serialization.

Source code in accumulate\models\general.py
 97
 98
 99
100
101
102
def to_dict(self) -> dict:
    """Convert TokenRecipient to dictionary format for JSON serialization."""
    return {
        "url": str(self.url),
        "amount": str(self.amount),  # Convert to string to match Accumulate JSON format
    }

unmarshal(data) classmethod

Deserialize bytes into CreditRecipient.

Source code in accumulate\models\general.py
111
112
113
114
115
116
117
118
119
120
121
122
123
124
@classmethod
def unmarshal(cls, data: bytes) -> "CreditRecipient":
    """Deserialize bytes into CreditRecipient."""
    reader = io.BytesIO(data)

    # Read the URL
    url_length, _ = decode_uvarint(reader.read(2))
    url_str = reader.read(url_length).decode("utf-8")
    url = URL.parse(url_str)

    # Read the amount
    amount, _ = decode_uvarint(reader.read())

    return cls(url, amount)

FeeSchedule dataclass

Represents a fee schedule for the network.

Source code in accumulate\models\general.py
137
138
139
140
141
142
@dataclass
class FeeSchedule:
    """Represents a fee schedule for the network."""
    create_identity_sliding: List[int]
    create_sub_identity: int
    bare_identity_discount: int

IndexEntry dataclass

Represents an index entry in a chain.

Source code in accumulate\models\general.py
48
49
50
51
52
53
54
55
@dataclass
class IndexEntry:
    """Represents an index entry in a chain."""
    source: int
    anchor: Optional[int] = None
    block_index: Optional[int] = None
    block_time: Optional[int] = None  # Unix timestamp
    root_index_index: Optional[int] = None

NetworkGlobals dataclass

Represents network-level global configurations.

Source code in accumulate\models\general.py
157
158
159
160
161
162
163
164
165
@dataclass
class NetworkGlobals:
    """Represents network-level global configurations."""
    operator_accept_threshold: float
    validator_accept_threshold: float
    major_block_schedule: str
    anchor_empty_blocks: bool
    fee_schedule: Optional["FeeSchedule"]
    limits: Optional["NetworkLimits"]

NetworkLimits dataclass

Represents network protocol limits.

Source code in accumulate\models\general.py
145
146
147
148
149
150
151
152
153
154
@dataclass
class NetworkLimits:
    """Represents network protocol limits."""
    data_entry_parts: int
    account_authorities: int
    book_pages: int
    page_entries: int
    identity_accounts: int
    pending_major_blocks: int
    events_per_block: int

Object dataclass

Generic object with chains and pending transactions.

Source code in accumulate\models\general.py
22
23
24
25
26
27
@dataclass
class Object:
    """Generic object with chains and pending transactions."""
    type: str  # Enum for ObjectType
    chains: List["ChainMetadata"] = field(default_factory=list)
    pending: Optional[List["TxIdSet"]] = None  # Pending transactions

TokenRecipient dataclass

Source code in accumulate\models\general.py
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
@dataclass
class TokenRecipient:
    url: URL
    amount: int

    def __post_init__(self):
        if not self.url:
            raise ValueError("URL cannot be None.")
        if self.amount < 0:
            raise ValueError("Amount must be a non-negative integer.")

    def to_dict(self) -> dict:
        """Convert TokenRecipient to dictionary format for JSON serialization."""
        return {
            "url": str(self.url),  
            "amount": str(self.amount),  
        }

    def __repr__(self) -> str:
        return f"TokenRecipient(url={self.url}, amount={self.amount})"

to_dict()

Convert TokenRecipient to dictionary format for JSON serialization.

Source code in accumulate\models\general.py
81
82
83
84
85
86
def to_dict(self) -> dict:
    """Convert TokenRecipient to dictionary format for JSON serialization."""
    return {
        "url": str(self.url),  
        "amount": str(self.amount),  
    }

key_management

AddKeyOperation dataclass

Represents an operation to add a key to a key page.

:param entry: The key specification to add.

Source code in accumulate\models\key_management.py
128
129
130
131
132
133
134
135
@dataclass
class AddKeyOperation:
    """
    Represents an operation to add a key to a key page.

    :param entry: The key specification to add.
    """
    entry: KeySpec

KeyPage dataclass

Represents a page of keys with threshold signature requirements.

Source code in accumulate\models\key_management.py
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
@dataclass
class KeyPage:
    """Represents a page of keys with threshold signature requirements."""
    accept_threshold: int
    keys: List[KeySpec] = field(default_factory=list)

    def get_m_of_n(self) -> Tuple[int, int]:
        """
        Retrieve the signature requirements for the key page.

        :return: A tuple (m, n) where m is the threshold and n is the total number of keys.
        """
        return self.accept_threshold, len(self.keys)

    def set_threshold(self, m: int) -> None:
        """
        Set the signature threshold for the key page.

        :param m: The required number of signatures.
        :raises ValueError: If the threshold is invalid.
        """
        if m <= 0:
            raise ValueError("Threshold must be greater than 0.")
        if m > len(self.keys):
            raise ValueError(
                f"Cannot require {m} signatures with only {len(self.keys)} keys available."
            )
        self.accept_threshold = m

    def entry_by_key_hash(self, key_hash: bytes) -> Tuple[int, Optional[KeySpec], bool]:
        """
        Find a key entry by its hash.

        :param key_hash: The hash of the key to search for.
        :return: A tuple (index, key_spec, found) where index is the position, key_spec is the found key, and found is a boolean.
        """
        for i, key_spec in enumerate(self.keys):
            if key_spec.public_key_hash == key_hash:
                return i, key_spec, True
        return -1, None, False

    def add_key_spec(self, key_spec: KeySpec) -> None:
        """
        Add a key specification to the key page.

        :param key_spec: The key specification to add.
        """
        self.keys.append(key_spec)
        self.keys.sort(
            key=lambda ks: (ks.public_key_hash, ks.delegate or "")
        )

    def remove_key_spec_at(self, index: int) -> None:
        """
        Remove a key specification at a specific index.

        :param index: The index of the key to remove.
        :raises IndexError: If the index is out of range.
        """
        if not (0 <= index < len(self.keys)):
            raise IndexError("Key index out of range")
        self.keys.pop(index)

add_key_spec(key_spec)

Add a key specification to the key page.

:param key_spec: The key specification to add.

Source code in accumulate\models\key_management.py
66
67
68
69
70
71
72
73
74
75
def add_key_spec(self, key_spec: KeySpec) -> None:
    """
    Add a key specification to the key page.

    :param key_spec: The key specification to add.
    """
    self.keys.append(key_spec)
    self.keys.sort(
        key=lambda ks: (ks.public_key_hash, ks.delegate or "")
    )

entry_by_key_hash(key_hash)

Find a key entry by its hash.

:param key_hash: The hash of the key to search for. :return: A tuple (index, key_spec, found) where index is the position, key_spec is the found key, and found is a boolean.

Source code in accumulate\models\key_management.py
54
55
56
57
58
59
60
61
62
63
64
def entry_by_key_hash(self, key_hash: bytes) -> Tuple[int, Optional[KeySpec], bool]:
    """
    Find a key entry by its hash.

    :param key_hash: The hash of the key to search for.
    :return: A tuple (index, key_spec, found) where index is the position, key_spec is the found key, and found is a boolean.
    """
    for i, key_spec in enumerate(self.keys):
        if key_spec.public_key_hash == key_hash:
            return i, key_spec, True
    return -1, None, False

get_m_of_n()

Retrieve the signature requirements for the key page.

:return: A tuple (m, n) where m is the threshold and n is the total number of keys.

Source code in accumulate\models\key_management.py
31
32
33
34
35
36
37
def get_m_of_n(self) -> Tuple[int, int]:
    """
    Retrieve the signature requirements for the key page.

    :return: A tuple (m, n) where m is the threshold and n is the total number of keys.
    """
    return self.accept_threshold, len(self.keys)

remove_key_spec_at(index)

Remove a key specification at a specific index.

:param index: The index of the key to remove. :raises IndexError: If the index is out of range.

Source code in accumulate\models\key_management.py
77
78
79
80
81
82
83
84
85
86
def remove_key_spec_at(self, index: int) -> None:
    """
    Remove a key specification at a specific index.

    :param index: The index of the key to remove.
    :raises IndexError: If the index is out of range.
    """
    if not (0 <= index < len(self.keys)):
        raise IndexError("Key index out of range")
    self.keys.pop(index)

set_threshold(m)

Set the signature threshold for the key page.

:param m: The required number of signatures. :raises ValueError: If the threshold is invalid.

Source code in accumulate\models\key_management.py
39
40
41
42
43
44
45
46
47
48
49
50
51
52
def set_threshold(self, m: int) -> None:
    """
    Set the signature threshold for the key page.

    :param m: The required number of signatures.
    :raises ValueError: If the threshold is invalid.
    """
    if m <= 0:
        raise ValueError("Threshold must be greater than 0.")
    if m > len(self.keys):
        raise ValueError(
            f"Cannot require {m} signatures with only {len(self.keys)} keys available."
        )
    self.accept_threshold = m

KeySpec dataclass

Represents a key specification with metadata.

Source code in accumulate\models\key_management.py
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
@dataclass
class KeySpec:
    """Represents a key specification with metadata."""
    public_key_hash: bytes
    delegate: Optional[str] = None
    last_used_on: int = 0

    def get_last_used_on(self) -> int:
        """Retrieve the timestamp of the last key usage."""
        return self.last_used_on

    def set_last_used_on(self, timestamp: int) -> None:
        """Set the timestamp of the last key usage."""
        self.last_used_on = timestamp

get_last_used_on()

Retrieve the timestamp of the last key usage.

Source code in accumulate\models\key_management.py
16
17
18
def get_last_used_on(self) -> int:
    """Retrieve the timestamp of the last key usage."""
    return self.last_used_on

set_last_used_on(timestamp)

Set the timestamp of the last key usage.

Source code in accumulate\models\key_management.py
20
21
22
def set_last_used_on(self, timestamp: int) -> None:
    """Set the timestamp of the last key usage."""
    self.last_used_on = timestamp

KeySpecParams dataclass

Represents the parameters for a key specification.

Source code in accumulate\models\key_management.py
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
@dataclass
class KeySpecParams:
    """Represents the parameters for a key specification."""
    key_hash: bytes
    delegate: Optional[str] = None

    def marshal(self) -> bytes:
        """Serialize the KeySpecParams to bytes correctly."""
        key_hash_data = field_marshal_binary(1, field_marshal_binary(0x20, self.key_hash))  #  keyHash encoding with `0x20`

        delegate_data = b""
        if self.delegate:
            delegate_data = field_marshal_binary(2, self.delegate.encode("utf-8"))

        return key_hash_data + delegate_data  #  Concatenating

    @classmethod
    def unmarshal(cls, data: bytes) -> "KeySpecParams":
        """Deserialize bytes into a KeySpecParams instance using field-based encoding."""
        reader = io.BytesIO(data)

        key_hash = None
        delegate = None

        while reader.tell() < len(data):  #  Process all fields
            field_number = read_uvarint(reader)  #  Read the field number
            if field_number == 1:  #  Key Hash
                key_hash = unmarshal_bytes(reader)
            elif field_number == 2:  #  Delegate (if present)
                delegate = unmarshal_string(reader)

        if key_hash is None:
            raise ValueError(" Missing key_hash in KeySpecParams deserialization!")

        return cls(key_hash=key_hash, delegate=delegate)

marshal()

Serialize the KeySpecParams to bytes correctly.

Source code in accumulate\models\key_management.py
 95
 96
 97
 98
 99
100
101
102
103
def marshal(self) -> bytes:
    """Serialize the KeySpecParams to bytes correctly."""
    key_hash_data = field_marshal_binary(1, field_marshal_binary(0x20, self.key_hash))  #  keyHash encoding with `0x20`

    delegate_data = b""
    if self.delegate:
        delegate_data = field_marshal_binary(2, self.delegate.encode("utf-8"))

    return key_hash_data + delegate_data  #  Concatenating

unmarshal(data) classmethod

Deserialize bytes into a KeySpecParams instance using field-based encoding.

Source code in accumulate\models\key_management.py
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
@classmethod
def unmarshal(cls, data: bytes) -> "KeySpecParams":
    """Deserialize bytes into a KeySpecParams instance using field-based encoding."""
    reader = io.BytesIO(data)

    key_hash = None
    delegate = None

    while reader.tell() < len(data):  #  Process all fields
        field_number = read_uvarint(reader)  #  Read the field number
        if field_number == 1:  #  Key Hash
            key_hash = unmarshal_bytes(reader)
        elif field_number == 2:  #  Delegate (if present)
            delegate = unmarshal_string(reader)

    if key_hash is None:
        raise ValueError(" Missing key_hash in KeySpecParams deserialization!")

    return cls(key_hash=key_hash, delegate=delegate)

RemoveKeyOperation dataclass

Represents an operation to remove a key from a key page.

:param entry: The key specification to remove.

Source code in accumulate\models\key_management.py
138
139
140
141
142
143
144
145
@dataclass
class RemoveKeyOperation:
    """
    Represents an operation to remove a key from a key page.

    :param entry: The key specification to remove.
    """
    entry: KeySpec

SetRejectThresholdKeyPageOperation dataclass

Represents an operation to set the rejection threshold for a key page.

:param threshold: The number of signatures required to reject.

Source code in accumulate\models\key_management.py
170
171
172
173
174
175
176
177
@dataclass
class SetRejectThresholdKeyPageOperation:
    """
    Represents an operation to set the rejection threshold for a key page.

    :param threshold: The number of signatures required to reject.
    """
    threshold: int

SetResponseThresholdKeyPageOperation dataclass

Represents an operation to set the response threshold for a key page.

:param threshold: The number of signatures required for a response.

Source code in accumulate\models\key_management.py
180
181
182
183
184
185
186
187
@dataclass
class SetResponseThresholdKeyPageOperation:
    """
    Represents an operation to set the response threshold for a key page.

    :param threshold: The number of signatures required for a response.
    """
    threshold: int

SetThresholdKeyPageOperation dataclass

Represents an operation to set the signature threshold for a key page.

:param threshold: The required number of signatures.

Source code in accumulate\models\key_management.py
160
161
162
163
164
165
166
167
@dataclass
class SetThresholdKeyPageOperation:
    """
    Represents an operation to set the signature threshold for a key page.

    :param threshold: The required number of signatures.
    """
    threshold: int

UpdateAllowedKeyPageOperation dataclass

Represents an operation to update the allowed or denied transactions for a key page.

:param allow: List of allowed transaction types. :param deny: List of denied transaction types.

Source code in accumulate\models\key_management.py
190
191
192
193
194
195
196
197
198
199
@dataclass
class UpdateAllowedKeyPageOperation:
    """
    Represents an operation to update the allowed or denied transactions for a key page.

    :param allow: List of allowed transaction types.
    :param deny: List of denied transaction types.
    """
    allow: Optional[List[str]] = None
    deny: Optional[List[str]] = None

UpdateKeyOperation dataclass

Represents an operation to update a key in a key page.

:param old_entry: The existing key specification to update. :param new_entry: The new key specification to replace the old one.

Source code in accumulate\models\key_management.py
148
149
150
151
152
153
154
155
156
157
@dataclass
class UpdateKeyOperation:
    """
    Represents an operation to update a key in a key page.

    :param old_entry: The existing key specification to update.
    :param new_entry: The new key specification to replace the old one.
    """
    old_entry: KeySpec
    new_entry: KeySpec

key_signature

KeySignature

Bases: ABC

Abstract base class to represent a cryptographic signature.

Source code in accumulate\models\key_signature.py
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
class KeySignature(ABC):
    """Abstract base class to represent a cryptographic signature."""

    @abstractmethod
    def get_signature(self) -> bytes:
        """Return the signature bytes."""
        pass

    @abstractmethod
    def get_public_key_hash(self) -> bytes:
        """Return the hash of the public key."""
        pass

    @abstractmethod
    def get_public_key(self) -> bytes:
        """Return the public key bytes."""
        pass

    @abstractmethod
    def get_signer_version(self) -> int:
        """Return the version of the signer."""
        pass

    @abstractmethod
    def get_timestamp(self) -> int:
        """Return the timestamp of the signature."""
        pass

get_public_key() abstractmethod

Return the public key bytes.

Source code in accumulate\models\key_signature.py
20
21
22
23
@abstractmethod
def get_public_key(self) -> bytes:
    """Return the public key bytes."""
    pass

get_public_key_hash() abstractmethod

Return the hash of the public key.

Source code in accumulate\models\key_signature.py
15
16
17
18
@abstractmethod
def get_public_key_hash(self) -> bytes:
    """Return the hash of the public key."""
    pass

get_signature() abstractmethod

Return the signature bytes.

Source code in accumulate\models\key_signature.py
10
11
12
13
@abstractmethod
def get_signature(self) -> bytes:
    """Return the signature bytes."""
    pass

get_signer_version() abstractmethod

Return the version of the signer.

Source code in accumulate\models\key_signature.py
25
26
27
28
@abstractmethod
def get_signer_version(self) -> int:
    """Return the version of the signer."""
    pass

get_timestamp() abstractmethod

Return the timestamp of the signature.

Source code in accumulate\models\key_signature.py
30
31
32
33
@abstractmethod
def get_timestamp(self) -> int:
    """Return the timestamp of the signature."""
    pass

node_info

NodeInfo dataclass

Represents information about a network node.

Source code in accumulate\models\node_info.py
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
@dataclass
class NodeInfo:
    """
    Represents information about a network node.
    """
    peer_id: str  # node's unique ID
    network: str  # name of the network (e.g., "mainnet" or "testnet")
    services: List[ServiceAddress]  # services the node provides
    version: str  # software version of the node
    commit: str  # commit hash of the software version

    @classmethod
    def from_dict(cls, data: dict) -> "NodeInfo":
        """
        Deserialize a dictionary into a NodeInfo object.
        """
        return cls(
            peer_id=data.get("peer_id", ""),
            network=data.get("network", ""),
            services=[ServiceAddress.from_dict(svc) for svc in data.get("services", [])],
            version=data.get("version", ""),
            commit=data.get("commit", ""),
        )

    def to_dict(self) -> dict:
        """
        Serialize a NodeInfo object into a dictionary.
        """
        return {
            "peer_id": self.peer_id,
            "network": self.network,
            "services": [service.to_dict() for service in self.services],
            "version": self.version,
            "commit": self.commit,
        }

from_dict(data) classmethod

Deserialize a dictionary into a NodeInfo object.

Source code in accumulate\models\node_info.py
19
20
21
22
23
24
25
26
27
28
29
30
@classmethod
def from_dict(cls, data: dict) -> "NodeInfo":
    """
    Deserialize a dictionary into a NodeInfo object.
    """
    return cls(
        peer_id=data.get("peer_id", ""),
        network=data.get("network", ""),
        services=[ServiceAddress.from_dict(svc) for svc in data.get("services", [])],
        version=data.get("version", ""),
        commit=data.get("commit", ""),
    )

to_dict()

Serialize a NodeInfo object into a dictionary.

Source code in accumulate\models\node_info.py
32
33
34
35
36
37
38
39
40
41
42
def to_dict(self) -> dict:
    """
    Serialize a NodeInfo object into a dictionary.
    """
    return {
        "peer_id": self.peer_id,
        "network": self.network,
        "services": [service.to_dict() for service in self.services],
        "version": self.version,
        "commit": self.commit,
    }

options

FaucetOptions dataclass

Options for requesting tokens from the faucet.

Source code in accumulate\models\options.py
37
38
39
40
@dataclass
class FaucetOptions:
    """Options for requesting tokens from the faucet."""
    token: Optional[str] = None  # Token URL

RangeOptions dataclass

Options for querying ranges.

Source code in accumulate\models\options.py
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
@dataclass
class RangeOptions:
    """Options for querying ranges."""
    start: Optional[int] = None  # Starting index
    count: Optional[int] = None  # Number of results to return
    expand: Optional[bool] = None  # Request expanded results
    from_end: Optional[bool] = False  # Query from the end

    def to_dict(self) -> dict:
        """Convert RangeOptions to a dictionary."""
        return {
            "start": self.start,
            "count": self.count,
            "expand": self.expand,
            "from_end": self.from_end,
        }

to_dict()

Convert RangeOptions to a dictionary.

Source code in accumulate\models\options.py
15
16
17
18
19
20
21
22
def to_dict(self) -> dict:
    """Convert RangeOptions to a dictionary."""
    return {
        "start": self.start,
        "count": self.count,
        "expand": self.expand,
        "from_end": self.from_end,
    }

ReceiptOptions dataclass

Options for querying receipts.

Source code in accumulate\models\options.py
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
@dataclass
class ReceiptOptions:
    """Options for querying receipts."""
    for_any: bool = False  # Query for any receipt
    for_height: Optional[int] = None  # Query for receipts at a specific height

    def to_dict(self) -> dict:
        """Convert ReceiptOptions to a dictionary."""
        return {
            "for_any": self.for_any,
            "for_height": self.for_height,
        }

    def is_valid(self) -> bool:
        """Validate the receipt options."""
        return self.for_any or self.for_height is not None

is_valid()

Validate the receipt options.

Source code in accumulate\models\options.py
63
64
65
def is_valid(self) -> bool:
    """Validate the receipt options."""
    return self.for_any or self.for_height is not None

to_dict()

Convert ReceiptOptions to a dictionary.

Source code in accumulate\models\options.py
56
57
58
59
60
61
def to_dict(self) -> dict:
    """Convert ReceiptOptions to a dictionary."""
    return {
        "for_any": self.for_any,
        "for_height": self.for_height,
    }

SubmitOptions dataclass

Options for submitting transactions.

Source code in accumulate\models\options.py
24
25
26
27
28
@dataclass
class SubmitOptions:
    """Options for submitting transactions."""
    verify: Optional[bool] = True  # Verify the envelope before submitting
    wait: Optional[bool] = True  # Wait for inclusion into a block or rejection

SubscribeOptions dataclass

Options for subscribing to events.

Source code in accumulate\models\options.py
43
44
45
46
47
@dataclass
class SubscribeOptions:
    """Options for subscribing to events."""
    partition: Optional[str] = None  # Partition name
    account: Optional[str] = None  # Account URL

ValidateOptions dataclass

Options for validating transactions.

Source code in accumulate\models\options.py
31
32
33
34
@dataclass
class ValidateOptions:
    """Options for validating transactions."""
    full: Optional[bool] = True  # Fully validate signatures and transactions

protocol

AccountAuthOperation

Base class for account authorization operations.

Source code in accumulate\models\protocol.py
151
152
153
154
class AccountAuthOperation:
    """Base class for account authorization operations."""
    def __init__(self, authority: URL):
        self.authority = authority

AccountWithTokens

Interface for accounts that manage tokens.

Source code in accumulate\models\protocol.py
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
class AccountWithTokens:
    """Interface for accounts that manage tokens."""

    def __init__(self, url: URL, balance: Decimal, token_url: URL):
        self.url = url
        self.balance = balance
        self.token_url = token_url

    def token_balance(self) -> Decimal:
        return self.balance

    def credit_tokens(self, amount: Decimal) -> bool:
        if amount <= 0:
            return False
        self.balance += amount
        return True

    def can_debit_tokens(self, amount: Decimal) -> bool:
        return amount > 0 and self.balance >= amount

    def debit_tokens(self, amount: Decimal) -> bool:
        if not self.can_debit_tokens(amount):
            return False
        self.balance -= amount
        return True

    def get_token_url(self) -> URL:
        return self.token_url

AddAccountAuthorityOperation

Bases: AccountAuthOperation

Add a new authority to an account.

Source code in accumulate\models\protocol.py
167
168
169
class AddAccountAuthorityOperation(AccountAuthOperation):
    """Add a new authority to an account."""
    pass

AllowedTransactions

Bit mask for allowed transactions.

Source code in accumulate\models\protocol.py
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
class AllowedTransactions:
    """Bit mask for allowed transactions."""

    def __init__(self, value: int = 0):
        self.value = value

    def set(self, bit: int) -> None:
        """Set the bit to 1."""
        self.value |= (1 << bit)

    def clear(self, bit: int) -> None:
        """Clear the bit (set to 0)."""
        self.value &= ~(1 << bit)

    def is_set(self, bit: int) -> bool:
        """Check if the bit is set."""
        return (self.value & (1 << bit)) != 0

    def unpack(self) -> List[int]:
        """List all set bits."""
        bits = []
        value = self.value
        bit_index = 0
        while value > 0:
            if value & 1:
                bits.append(bit_index)
            value >>= 1
            bit_index += 1
        return bits

    def get_enum_value(self) -> int:
        """Get the underlying integer value."""
        return self.value

    def set_enum_value(self, value: int) -> None:
        """Set the value from an integer."""
        self.value = value

    def to_json(self) -> str:
        """Serialize the object to JSON."""
        return json.dumps(self.unpack())

    @classmethod
    def from_json(cls, json_str: str) -> "AllowedTransactions":
        """Deserialize from JSON."""
        bits = json.loads(json_str)
        instance = cls()
        for bit in bits:
            instance.set(bit)
        return instance

clear(bit)

Clear the bit (set to 0).

Source code in accumulate\models\protocol.py
224
225
226
def clear(self, bit: int) -> None:
    """Clear the bit (set to 0)."""
    self.value &= ~(1 << bit)

from_json(json_str) classmethod

Deserialize from JSON.

Source code in accumulate\models\protocol.py
256
257
258
259
260
261
262
263
@classmethod
def from_json(cls, json_str: str) -> "AllowedTransactions":
    """Deserialize from JSON."""
    bits = json.loads(json_str)
    instance = cls()
    for bit in bits:
        instance.set(bit)
    return instance

get_enum_value()

Get the underlying integer value.

Source code in accumulate\models\protocol.py
244
245
246
def get_enum_value(self) -> int:
    """Get the underlying integer value."""
    return self.value

is_set(bit)

Check if the bit is set.

Source code in accumulate\models\protocol.py
228
229
230
def is_set(self, bit: int) -> bool:
    """Check if the bit is set."""
    return (self.value & (1 << bit)) != 0

set(bit)

Set the bit to 1.

Source code in accumulate\models\protocol.py
220
221
222
def set(self, bit: int) -> None:
    """Set the bit to 1."""
    self.value |= (1 << bit)

set_enum_value(value)

Set the value from an integer.

Source code in accumulate\models\protocol.py
248
249
250
def set_enum_value(self, value: int) -> None:
    """Set the value from an integer."""
    self.value = value

to_json()

Serialize the object to JSON.

Source code in accumulate\models\protocol.py
252
253
254
def to_json(self) -> str:
    """Serialize the object to JSON."""
    return json.dumps(self.unpack())

unpack()

List all set bits.

Source code in accumulate\models\protocol.py
232
233
234
235
236
237
238
239
240
241
242
def unpack(self) -> List[int]:
    """List all set bits."""
    bits = []
    value = self.value
    bit_index = 0
    while value > 0:
        if value & 1:
            bits.append(bit_index)
        value >>= 1
        bit_index += 1
    return bits

DisableAccountAuthOperation

Bases: AccountAuthOperation

Disable authorization for an account.

Source code in accumulate\models\protocol.py
162
163
164
class DisableAccountAuthOperation(AccountAuthOperation):
    """Disable authorization for an account."""
    pass

EnableAccountAuthOperation

Bases: AccountAuthOperation

Enable authorization for an account.

Source code in accumulate\models\protocol.py
157
158
159
class EnableAccountAuthOperation(AccountAuthOperation):
    """Enable authorization for an account."""
    pass

LiteTokenAccount

Bases: AccountWithTokens

Represents a lite token account.

Source code in accumulate\models\protocol.py
127
128
129
class LiteTokenAccount(AccountWithTokens):
    """Represents a lite token account."""
    pass

Receipt dataclass

Represents a receipt with block metadata.

Source code in accumulate\models\protocol.py
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
@dataclass
class Receipt:
    """Represents a receipt with block metadata."""
    local_block: Optional[int] = None
    local_block_time: Optional[str] = None  # ISO 8601 datetime string
    major_block: Optional[int] = None

    def to_dict(self) -> dict:
        """Convert the receipt to a dictionary."""
        return {
            "local_block": self.local_block,
            "local_block_time": self.local_block_time,
            "major_block": self.major_block,
        }

    @classmethod
    def from_dict(cls, data: dict) -> "Receipt":
        """Create a receipt from a dictionary."""
        local_block_time = data.get("local_block_time")
        if local_block_time:
            cls._validate_iso8601(local_block_time)

        return cls(
            local_block=data.get("local_block"),
            local_block_time=local_block_time,
            major_block=data.get("major_block"),
        )

    @staticmethod
    def _validate_iso8601(date_str: str):
        """Validate the string is in ISO 8601 format."""
        try:
            datetime.fromisoformat(date_str)
        except ValueError:
            raise ValueError(f"Invalid ISO 8601 datetime: {date_str}")

_validate_iso8601(date_str) staticmethod

Validate the string is in ISO 8601 format.

Source code in accumulate\models\protocol.py
205
206
207
208
209
210
211
@staticmethod
def _validate_iso8601(date_str: str):
    """Validate the string is in ISO 8601 format."""
    try:
        datetime.fromisoformat(date_str)
    except ValueError:
        raise ValueError(f"Invalid ISO 8601 datetime: {date_str}")

from_dict(data) classmethod

Create a receipt from a dictionary.

Source code in accumulate\models\protocol.py
192
193
194
195
196
197
198
199
200
201
202
203
@classmethod
def from_dict(cls, data: dict) -> "Receipt":
    """Create a receipt from a dictionary."""
    local_block_time = data.get("local_block_time")
    if local_block_time:
        cls._validate_iso8601(local_block_time)

    return cls(
        local_block=data.get("local_block"),
        local_block_time=local_block_time,
        major_block=data.get("major_block"),
    )

to_dict()

Convert the receipt to a dictionary.

Source code in accumulate\models\protocol.py
184
185
186
187
188
189
190
def to_dict(self) -> dict:
    """Convert the receipt to a dictionary."""
    return {
        "local_block": self.local_block,
        "local_block_time": self.local_block_time,
        "major_block": self.major_block,
    }

RemoveAccountAuthorityOperation

Bases: AccountAuthOperation

Remove an authority from an account.

Source code in accumulate\models\protocol.py
172
173
174
class RemoveAccountAuthorityOperation(AccountAuthOperation):
    """Remove an authority from an account."""
    pass

TokenAccount

Bases: AccountWithTokens

Represents a standard token account.

Source code in accumulate\models\protocol.py
132
133
134
class TokenAccount(AccountWithTokens):
    """Represents a standard token account."""
    pass

TokenIssuer

Represents a token issuer.

Source code in accumulate\models\protocol.py
137
138
139
140
141
142
143
144
145
146
147
148
class TokenIssuer:
    """Represents a token issuer."""

    def __init__(self, issued: Decimal, supply_limit: Optional[Decimal] = None):
        self.issued = issued
        self.supply_limit = supply_limit

    def issue(self, amount: Decimal) -> bool:
        self.issued += amount
        if self.supply_limit is None:
            return True
        return self.issued <= self.supply_limit

acme_url()

Returns the URL for the ACME token.

Source code in accumulate\models\protocol.py
18
19
20
def acme_url() -> URL:
    """Returns the URL for the ACME token."""
    return URL(authority=ACME)

lite_data_address(chain_id)

Generates a lite data address from a chain ID.

Source code in accumulate\models\protocol.py
28
29
30
31
32
33
def lite_data_address(chain_id: bytes) -> Optional[URL]:
    """Generates a lite data address from a chain ID."""
    if len(chain_id) < 32:
        raise ValueError("chain_id must be 32 bytes long")
    key_str = chain_id.hex()[:32]
    return URL(authority=key_str)

lite_token_address(pub_key, token_url_str)

Generates a lite token account URL from a public key and token URL.

Source code in accumulate\models\protocol.py
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
def lite_token_address(pub_key: bytes, token_url_str: str) -> Optional[URL]:
    """Generates a lite token account URL from a public key and token URL."""
    logger = logging.getLogger(__name__)
    try:
        logger.debug(f"Attempting to generate lite token address for token URL: {token_url_str}")
        print(f"DEBUG: Normalizing token URL: {token_url_str}")

        # Normalize and validate the token URL
        token_url_str = normalize_acc_url(token_url_str)
        print(f"DEBUG: Normalized token URL: {token_url_str}")

        token_url = URL.parse(token_url_str)
        print(f"DEBUG: Parsed token URL: {token_url}")

        # Validate based on token URL type
        print(f"DEBUG: Validating token URL. Authority: {token_url.authority}, Path: {token_url.path}")
        if token_url.path == "" and not re.match(r"^[a-fA-F0-9]{40,64}$", token_url.authority):
            if '.' not in token_url.authority:  # Check if it's a valid ADI
                raise ValueError("Invalid token URL: Missing path or invalid identity format.")

        if token_url.query or token_url.fragment or token_url.user_info:
            raise ValueError("Token URL cannot include query, fragment, or user info.")

        # Generate authority
        key_hash = sha256(pub_key).hexdigest()[:40]
        checksum = sha256(key_hash.encode()).hexdigest()[-8:]
        authority = f"{key_hash}{checksum}"
        print(f"DEBUG: Generated authority: {authority}")

        return URL(authority=authority, path=token_url.path)

    except ValueError as ve:
        logger.error(f"Failed to generate lite token address: {ve}")
        raise ValueError(f"Invalid token URL '{token_url_str}': {ve}") from ve

normalize_acc_url(url_str)

Ensures a URL starts with the 'acc://' prefix.

Source code in accumulate\models\protocol.py
50
51
52
53
54
def normalize_acc_url(url_str: str) -> str:
    """Ensures a URL starts with the 'acc://' prefix."""
    if not url_str.startswith("acc://"):
        return f"acc://{url_str}"
    return url_str

parse_lite_address(url)

Parses and validates a lite address.

Source code in accumulate\models\protocol.py
36
37
38
39
40
41
42
43
44
45
46
47
48
def parse_lite_address(url: URL) -> Optional[bytes]:
    """Parses and validates a lite address."""
    try:
        b = bytes.fromhex(url.authority)
        if len(b) <= 4:
            raise ValueError("Too short")
        byte_value, byte_check = b[:-4], b[-4:]
        checksum = sha256(byte_value).digest()[-4:]
        if checksum != byte_check:
            raise ValueError("Invalid checksum")
        return byte_value
    except Exception as e:
        raise ValueError(f"Error parsing lite address: {e}")

unknown_url()

Returns the URL for unknown entities.

Source code in accumulate\models\protocol.py
23
24
25
def unknown_url() -> URL:
    """Returns the URL for unknown entities."""
    return URL(authority=UNKNOWN)

responses

SubmissionResponse dataclass

Represents the response for a transaction submission.

Source code in accumulate\models\responses.py
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
@dataclass
class SubmissionResponse:
    """Represents the response for a transaction submission."""
    status: Optional[TransactionStatus] = None
    success: bool = False
    message: Optional[str] = None
    receipt: Optional[Receipt] = None  # Added Receipt integration

    def to_dict(self) -> dict:
        """Serialize the response to a dictionary."""
        serialized_data = {
            "status": self.status.to_dict() if self.status else None,
            "success": self.success,
            "message": self.message,
            "receipt": self.receipt.to_dict() if self.receipt else None,  # Serialize Receipt
        }
        print(f"[DEBUG] Serialized SubmissionResponse: {serialized_data}")
        return serialized_data

    @classmethod
    def from_dict(cls, data: dict) -> "SubmissionResponse":
        """Deserialize the response from a dictionary."""
        print(f"[DEBUG] Deserializing SubmissionResponse from data: {data}")
        status = TransactionStatus.from_dict(data["status"]) if data.get("status") else None
        print(f"[DEBUG] Deserialized status: {status.to_dict() if status else None}")
        receipt = Receipt.from_dict(data["receipt"]) if data.get("receipt") else None
        print(f"[DEBUG] Deserialized receipt: {receipt.to_dict() if receipt else None}")
        response = cls(
            status=status,
            success=data.get("success", False),
            message=data.get("message"),
            receipt=receipt,
        )
        print(f"[DEBUG] Final SubmissionResponse object: {response}")
        return response

from_dict(data) classmethod

Deserialize the response from a dictionary.

Source code in accumulate\models\responses.py
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
@classmethod
def from_dict(cls, data: dict) -> "SubmissionResponse":
    """Deserialize the response from a dictionary."""
    print(f"[DEBUG] Deserializing SubmissionResponse from data: {data}")
    status = TransactionStatus.from_dict(data["status"]) if data.get("status") else None
    print(f"[DEBUG] Deserialized status: {status.to_dict() if status else None}")
    receipt = Receipt.from_dict(data["receipt"]) if data.get("receipt") else None
    print(f"[DEBUG] Deserialized receipt: {receipt.to_dict() if receipt else None}")
    response = cls(
        status=status,
        success=data.get("success", False),
        message=data.get("message"),
        receipt=receipt,
    )
    print(f"[DEBUG] Final SubmissionResponse object: {response}")
    return response

to_dict()

Serialize the response to a dictionary.

Source code in accumulate\models\responses.py
16
17
18
19
20
21
22
23
24
25
def to_dict(self) -> dict:
    """Serialize the response to a dictionary."""
    serialized_data = {
        "status": self.status.to_dict() if self.status else None,
        "success": self.success,
        "message": self.message,
        "receipt": self.receipt.to_dict() if self.receipt else None,  # Serialize Receipt
    }
    print(f"[DEBUG] Serialized SubmissionResponse: {serialized_data}")
    return serialized_data

TransactionResultSet dataclass

Represents a set of transaction results returned from a query.

Source code in accumulate\models\responses.py
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
@dataclass
class TransactionResultSet:
    """Represents a set of transaction results returned from a query."""
    results: List[TransactionStatus] = None

    def __post_init__(self):
        if self.results is None:
            self.results = []

    def add_result(self, result: TransactionStatus):
        """Add a transaction status to the results."""
        print(f"[DEBUG] Adding result: {result.to_dict()}")
        self.results.append(result)

    def to_dict(self) -> dict:
        """Convert the result set to a dictionary representation."""
        serialized_data = {"results": [result.to_dict() for result in self.results]}
        print(f"[DEBUG] Serialized TransactionResultSet: {serialized_data}")
        return serialized_data

    @classmethod
    def from_dict(cls, data: dict) -> "TransactionResultSet":
        """Create a TransactionResultSet from a dictionary."""
        print(f"[DEBUG] Deserializing TransactionResultSet from data: {data}")
        results = [
            TransactionStatus.from_dict(item) for item in data.get("results", [])
        ]
        for i, result in enumerate(results):
            print(f"[DEBUG] Deserialized result {i}: {result.to_dict()}")
        result_set = cls(results)
        print(f"[DEBUG] Final TransactionResultSet object: {result_set}")
        return result_set

add_result(result)

Add a transaction status to the results.

Source code in accumulate\models\responses.py
54
55
56
57
def add_result(self, result: TransactionStatus):
    """Add a transaction status to the results."""
    print(f"[DEBUG] Adding result: {result.to_dict()}")
    self.results.append(result)

from_dict(data) classmethod

Create a TransactionResultSet from a dictionary.

Source code in accumulate\models\responses.py
65
66
67
68
69
70
71
72
73
74
75
76
@classmethod
def from_dict(cls, data: dict) -> "TransactionResultSet":
    """Create a TransactionResultSet from a dictionary."""
    print(f"[DEBUG] Deserializing TransactionResultSet from data: {data}")
    results = [
        TransactionStatus.from_dict(item) for item in data.get("results", [])
    ]
    for i, result in enumerate(results):
        print(f"[DEBUG] Deserialized result {i}: {result.to_dict()}")
    result_set = cls(results)
    print(f"[DEBUG] Final TransactionResultSet object: {result_set}")
    return result_set

to_dict()

Convert the result set to a dictionary representation.

Source code in accumulate\models\responses.py
59
60
61
62
63
def to_dict(self) -> dict:
    """Convert the result set to a dictionary representation."""
    serialized_data = {"results": [result.to_dict() for result in self.results]}
    print(f"[DEBUG] Serialized TransactionResultSet: {serialized_data}")
    return serialized_data

search

AnchorSearchQuery

Bases: SearchQuery

Search for an anchor in an account.

Source code in accumulate\models\search.py
35
36
37
38
39
40
41
42
43
44
45
46
class AnchorSearchQuery(SearchQuery):
    """Search for an anchor in an account."""

    def __init__(self, anchor: Union[bytes, str], include_receipt: Optional[bool] = None):
        """
        Args:
            anchor (Union[bytes, str]): The anchor value (hash) to search for.
            include_receipt (Optional[bool]): Whether to include a receipt in the response.
        """
        anchor_value = anchor.hex() if isinstance(anchor, bytes) else anchor
        extra_params = {"include_receipt": include_receipt} if include_receipt is not None else {}
        super().__init__(QueryType.ANCHOR_SEARCH, anchor_value, extra_params)

__init__(anchor, include_receipt=None)

Parameters:

Name Type Description Default
anchor Union[bytes, str]

The anchor value (hash) to search for.

required
include_receipt Optional[bool]

Whether to include a receipt in the response.

None
Source code in accumulate\models\search.py
38
39
40
41
42
43
44
45
46
def __init__(self, anchor: Union[bytes, str], include_receipt: Optional[bool] = None):
    """
    Args:
        anchor (Union[bytes, str]): The anchor value (hash) to search for.
        include_receipt (Optional[bool]): Whether to include a receipt in the response.
    """
    anchor_value = anchor.hex() if isinstance(anchor, bytes) else anchor
    extra_params = {"include_receipt": include_receipt} if include_receipt is not None else {}
    super().__init__(QueryType.ANCHOR_SEARCH, anchor_value, extra_params)

DelegateSearchQuery

Bases: SearchQuery

Search for a delegate in an account.

Source code in accumulate\models\search.py
61
62
63
64
65
66
67
68
69
class DelegateSearchQuery(SearchQuery):
    """Search for a delegate in an account."""

    def __init__(self, delegate_url: str):
        """
        Args:
            delegate_url (str): The URL of the delegate being searched.
        """
        super().__init__(QueryType.DELEGATE_SEARCH, delegate_url)

__init__(delegate_url)

Parameters:

Name Type Description Default
delegate_url str

The URL of the delegate being searched.

required
Source code in accumulate\models\search.py
64
65
66
67
68
69
def __init__(self, delegate_url: str):
    """
    Args:
        delegate_url (str): The URL of the delegate being searched.
    """
    super().__init__(QueryType.DELEGATE_SEARCH, delegate_url)

PublicKeySearchQuery

Bases: SearchQuery

Search for a public key in an account.

Source code in accumulate\models\search.py
48
49
50
51
52
53
54
55
56
57
58
class PublicKeySearchQuery(SearchQuery):
    """Search for a public key in an account."""

    def __init__(self, public_key: str, key_type: Optional[str] = None):
        """
        Args:
            public_key (str): The public key, address, or public key hash.
            key_type (Optional[str]): The type of public key (e.g., 'ed25519', 'btc', 'eth').
        """
        extra_params = {"type": key_type} if key_type else {}
        super().__init__(QueryType.PUBLIC_KEY_SEARCH, public_key, extra_params)

__init__(public_key, key_type=None)

Parameters:

Name Type Description Default
public_key str

The public key, address, or public key hash.

required
key_type Optional[str]

The type of public key (e.g., 'ed25519', 'btc', 'eth').

None
Source code in accumulate\models\search.py
51
52
53
54
55
56
57
58
def __init__(self, public_key: str, key_type: Optional[str] = None):
    """
    Args:
        public_key (str): The public key, address, or public key hash.
        key_type (Optional[str]): The type of public key (e.g., 'ed25519', 'btc', 'eth').
    """
    extra_params = {"type": key_type} if key_type else {}
    super().__init__(QueryType.PUBLIC_KEY_SEARCH, public_key, extra_params)

SearchQuery

Bases: Query

Base class for all search queries (Anchor, Public Key, Delegate).

Source code in accumulate\models\search.py
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
class SearchQuery(Query):
    """Base class for all search queries (Anchor, Public Key, Delegate)."""

    def __init__(self, query_type: QueryType, value: str, extra_params: Optional[Dict[str, Any]] = None):
        """
        Args:
            query_type (QueryType): The type of search query (anchor, publicKey, delegate).
            value (str): The value being searched (hash, public key, or delegate URL).
            extra_params (Optional[Dict[str, Any]]): Additional query parameters.
        """
        super().__init__(query_type)
        self.value = value
        self.extra_params = extra_params or {}

    def is_valid(self):
        """Validate the search query."""
        if not self.value:
            raise AccumulateError(f"{self.query_type.name} search requires a valid value.")

    def to_dict(self) -> dict:
        """Convert the search query into a dictionary that can be used with `client.search()`."""
        return {
            "value": self.value,
            "extra_params": self.extra_params
        }

__init__(query_type, value, extra_params=None)

Parameters:

Name Type Description Default
query_type QueryType

The type of search query (anchor, publicKey, delegate).

required
value str

The value being searched (hash, public key, or delegate URL).

required
extra_params Optional[Dict[str, Any]]

Additional query parameters.

None
Source code in accumulate\models\search.py
11
12
13
14
15
16
17
18
19
20
def __init__(self, query_type: QueryType, value: str, extra_params: Optional[Dict[str, Any]] = None):
    """
    Args:
        query_type (QueryType): The type of search query (anchor, publicKey, delegate).
        value (str): The value being searched (hash, public key, or delegate URL).
        extra_params (Optional[Dict[str, Any]]): Additional query parameters.
    """
    super().__init__(query_type)
    self.value = value
    self.extra_params = extra_params or {}

is_valid()

Validate the search query.

Source code in accumulate\models\search.py
22
23
24
25
def is_valid(self):
    """Validate the search query."""
    if not self.value:
        raise AccumulateError(f"{self.query_type.name} search requires a valid value.")

to_dict()

Convert the search query into a dictionary that can be used with client.search().

Source code in accumulate\models\search.py
27
28
29
30
31
32
def to_dict(self) -> dict:
    """Convert the search query into a dictionary that can be used with `client.search()`."""
    return {
        "value": self.value,
        "extra_params": self.extra_params
    }

service

FindServiceOptions dataclass

Represents options for finding a service in the Accumulate network.

Source code in accumulate\models\service.py
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
@dataclass
class FindServiceOptions:
    """
    Represents options for finding a service in the Accumulate network.
    """
    network: str  # network name
    service: Optional[ServiceAddress] = None  # service address to search for
    known: Optional[bool] = None  # Restrict results to known peers
    timeout: Optional[timedelta] = None  #  timeout for querying the DHT

    def to_dict(self) -> dict:
        """
        Serialize a FindServiceOptions object into a dictionary.
        Removes `timeout` if it is None to prevent JSON-RPC errors.
        """
        params = {
            "network": self.network,
            "service": self.service.to_dict() if self.service else None,
            "known": self.known,
        }

        # Only include timeout if it is set
        if self.timeout is not None:
            params["timeout"] = self.timeout.total_seconds()

        return params


    @classmethod
    def from_dict(cls, data: dict) -> "FindServiceOptions":
        """
        Deserialize a dictionary into a FindServiceOptions object.
        """
        return cls(
            network=data.get("network", ""),
            service=ServiceAddress.from_dict(data["service"]) if "service" in data else None,
            known=data.get("known"),
            timeout=timedelta(seconds=data["timeout"]) if "timeout" in data else None,
        )

from_dict(data) classmethod

Deserialize a dictionary into a FindServiceOptions object.

Source code in accumulate\models\service.py
108
109
110
111
112
113
114
115
116
117
118
@classmethod
def from_dict(cls, data: dict) -> "FindServiceOptions":
    """
    Deserialize a dictionary into a FindServiceOptions object.
    """
    return cls(
        network=data.get("network", ""),
        service=ServiceAddress.from_dict(data["service"]) if "service" in data else None,
        known=data.get("known"),
        timeout=timedelta(seconds=data["timeout"]) if "timeout" in data else None,
    )

to_dict()

Serialize a FindServiceOptions object into a dictionary. Removes timeout if it is None to prevent JSON-RPC errors.

Source code in accumulate\models\service.py
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
def to_dict(self) -> dict:
    """
    Serialize a FindServiceOptions object into a dictionary.
    Removes `timeout` if it is None to prevent JSON-RPC errors.
    """
    params = {
        "network": self.network,
        "service": self.service.to_dict() if self.service else None,
        "known": self.known,
    }

    # Only include timeout if it is set
    if self.timeout is not None:
        params["timeout"] = self.timeout.total_seconds()

    return params

FindServiceResult dataclass

Represents the result of a service search in the Accumulate network.

Source code in accumulate\models\service.py
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
@dataclass
class FindServiceResult:
    """
    Represents the result of a service search in the Accumulate network.
    """
    peer_id: str  # unique ID of the peer providing the service
    status: str  # status of the known peer
    addresses: List[str]  # list of addresses associated with the service

    def to_dict(self) -> dict:
        """
        Serialize a FindServiceResult object into a dictionary.
        """
        return {
            "peer_id": self.peer_id,
            "status": self.status,
            "addresses": self.addresses,
        }

    @classmethod
    def from_dict(cls, data: dict) -> "FindServiceResult":
        """
        Deserialize a dictionary into a FindServiceResult object.
        """
        return cls(
            peer_id=data.get("peer_id", ""),
            status=data.get("status", ""),
            addresses=data.get("addresses", []),
        )

from_dict(data) classmethod

Deserialize a dictionary into a FindServiceResult object.

Source code in accumulate\models\service.py
140
141
142
143
144
145
146
147
148
149
@classmethod
def from_dict(cls, data: dict) -> "FindServiceResult":
    """
    Deserialize a dictionary into a FindServiceResult object.
    """
    return cls(
        peer_id=data.get("peer_id", ""),
        status=data.get("status", ""),
        addresses=data.get("addresses", []),
    )

to_dict()

Serialize a FindServiceResult object into a dictionary.

Source code in accumulate\models\service.py
130
131
132
133
134
135
136
137
138
def to_dict(self) -> dict:
    """
    Serialize a FindServiceResult object into a dictionary.
    """
    return {
        "peer_id": self.peer_id,
        "status": self.status,
        "addresses": self.addresses,
    }

ServiceAddress dataclass

Represents a service address with type and argument.

Source code in accumulate\models\service.py
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
@dataclass
class ServiceAddress:
    """
    Represents a service address with type and argument.
    """
    service_type: int  # Type of the service, represented as an integer
    argument: Optional[str] = None  # Optional argument for the service

    @property
    def type(self) -> int:
        """Alias for service_type."""
        return self.service_type

    def __str__(self) -> str:
        """
        Returns {type}:{argument}, or {type} if the argument is empty.
        """
        base = hex(self.service_type)[2:] if isinstance(self.service_type, int) else str(self.service_type)
        return f"{base}:{self.argument}" if self.argument else base

    def to_dict(self) -> dict:
        """
        Converts the object to a dictionary.
        """
        return {"type": self.service_type, "argument": self.argument}

    @staticmethod
    def from_dict(data: dict) -> "ServiceAddress":
        """
        Creates a ServiceAddress from a dictionary.
        """
        return ServiceAddress(data["type"], data.get("argument"))

    @staticmethod
    def parse_service_address(address: str) -> "ServiceAddress":
        """
        Parses a string into a ServiceAddress.

        :param address: A string representing the service address in the format {type}:{argument}.
        :return: A ServiceAddress instance.
        :raises ValueError: If the format is invalid.
        """
        if not address or ":" not in address:
            raise ValueError("Invalid service address: Missing ':' separator or empty string")

        parts = address.split(":", maxsplit=1)
        if len(parts) < 2 or not parts[0] or not parts[1]:
            raise ValueError("Invalid service address: Missing type or argument")

        service_type = int(parts[0], 16) if parts[0].startswith("0x") else int(parts[0])
        return ServiceAddress(service_type=service_type, argument=parts[1])

    @staticmethod
    def unpack_address(address: str) -> dict:
        """
        Simulates unpacking of an address string for its components.

        :param address: A string representing the service address.
        :return: A dictionary containing type and argument of the service address.
        :raises ValueError: If the address cannot be parsed.
        """
        try:
            service_address = ServiceAddress.parse_service_address(address)
            return {
                "type": service_address.type,
                "argument": service_address.argument,
            }
        except Exception as e:
            raise ValueError(f"Failed to parse address: {e}")

type property

Alias for service_type.

__str__()

Returns {type}:{argument}, or {type} if the argument is empty.

Source code in accumulate\models\service.py
21
22
23
24
25
26
def __str__(self) -> str:
    """
    Returns {type}:{argument}, or {type} if the argument is empty.
    """
    base = hex(self.service_type)[2:] if isinstance(self.service_type, int) else str(self.service_type)
    return f"{base}:{self.argument}" if self.argument else base

from_dict(data) staticmethod

Creates a ServiceAddress from a dictionary.

Source code in accumulate\models\service.py
34
35
36
37
38
39
@staticmethod
def from_dict(data: dict) -> "ServiceAddress":
    """
    Creates a ServiceAddress from a dictionary.
    """
    return ServiceAddress(data["type"], data.get("argument"))

parse_service_address(address) staticmethod

Parses a string into a ServiceAddress.

:param address: A string representing the service address in the format {type}:{argument}. :return: A ServiceAddress instance. :raises ValueError: If the format is invalid.

Source code in accumulate\models\service.py
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
@staticmethod
def parse_service_address(address: str) -> "ServiceAddress":
    """
    Parses a string into a ServiceAddress.

    :param address: A string representing the service address in the format {type}:{argument}.
    :return: A ServiceAddress instance.
    :raises ValueError: If the format is invalid.
    """
    if not address or ":" not in address:
        raise ValueError("Invalid service address: Missing ':' separator or empty string")

    parts = address.split(":", maxsplit=1)
    if len(parts) < 2 or not parts[0] or not parts[1]:
        raise ValueError("Invalid service address: Missing type or argument")

    service_type = int(parts[0], 16) if parts[0].startswith("0x") else int(parts[0])
    return ServiceAddress(service_type=service_type, argument=parts[1])

to_dict()

Converts the object to a dictionary.

Source code in accumulate\models\service.py
28
29
30
31
32
def to_dict(self) -> dict:
    """
    Converts the object to a dictionary.
    """
    return {"type": self.service_type, "argument": self.argument}

unpack_address(address) staticmethod

Simulates unpacking of an address string for its components.

:param address: A string representing the service address. :return: A dictionary containing type and argument of the service address. :raises ValueError: If the address cannot be parsed.

Source code in accumulate\models\service.py
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
@staticmethod
def unpack_address(address: str) -> dict:
    """
    Simulates unpacking of an address string for its components.

    :param address: A string representing the service address.
    :return: A dictionary containing type and argument of the service address.
    :raises ValueError: If the address cannot be parsed.
    """
    try:
        service_address = ServiceAddress.parse_service_address(address)
        return {
            "type": service_address.type,
            "argument": service_address.argument,
        }
    except Exception as e:
        raise ValueError(f"Failed to parse address: {e}")

signatures

AuthoritySignature

Bases: Signature

Source code in accumulate\models\signatures.py
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
class AuthoritySignature(Signature):
    def __init__(self, origin: URL, authority: URL, vote: Optional[str], txid: Optional[str]):
        super().__init__('AuthoritySignature', origin)
        self.authority = authority
        self.vote = vote
        self.txid = txid

    def hash(self) -> bytes:
        """Calculate hash for AuthoritySignature."""
        authority_str = str(self.authority).removeprefix("acc://")
        authority_bytes = authority_str.encode()
        vote_bytes = str(self.vote).encode() if self.vote else b""

        combined = authority_bytes + vote_bytes

        result_hash = do_sha256(combined)
        return result_hash

    def verify(self, msg: bytes) -> bool:
        # TODO: Implement authority-specific verification
        return True

hash()

Calculate hash for AuthoritySignature.

Source code in accumulate\models\signatures.py
279
280
281
282
283
284
285
286
287
288
def hash(self) -> bytes:
    """Calculate hash for AuthoritySignature."""
    authority_str = str(self.authority).removeprefix("acc://")
    authority_bytes = authority_str.encode()
    vote_bytes = str(self.vote).encode() if self.vote else b""

    combined = authority_bytes + vote_bytes

    result_hash = do_sha256(combined)
    return result_hash

DelegatedSignature

Bases: Signature

Source code in accumulate\models\signatures.py
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
class DelegatedSignature(Signature):
    def __init__(self, signature: Signature, delegator: URL):
        super().__init__('DelegatedSignature', signature.get_url())
        self.signature = signature
        self.delegator = delegator

    def hash(self) -> bytes:
        """Calculate hash for DelegatedSignature."""
        delegator_str = str(self.delegator).removeprefix("acc://")
        base_hash = self.signature.hash()
        delegator_bytes = delegator_str.encode()

        combined = base_hash + delegator_bytes

        result_hash = do_sha256(combined)
        return result_hash


    def verify(self, msg: bytes) -> bool:
        return self.signature.verify(msg)

hash()

Calculate hash for DelegatedSignature.

Source code in accumulate\models\signatures.py
256
257
258
259
260
261
262
263
264
265
def hash(self) -> bytes:
    """Calculate hash for DelegatedSignature."""
    delegator_str = str(self.delegator).removeprefix("acc://")
    base_hash = self.signature.hash()
    delegator_bytes = delegator_str.encode()

    combined = base_hash + delegator_bytes

    result_hash = do_sha256(combined)
    return result_hash

ECDSA_SHA256Signature

Bases: Signature

Represents an ECDSA SHA-256 signature.

Source code in accumulate\models\signatures.py
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
class ECDSA_SHA256Signature(Signature):
    """
    Represents an ECDSA SHA-256 signature.
    """

    def __init__(self, signer: URL, public_key: bytes, signature: bytes):
        super().__init__('ECDSA_SHA256', signer)
        self.public_key = public_key
        self.signature = signature

    def hash(self) -> bytes:
        """
        Calculate the SHA-256 hash of the public key.
        """
        return hashlib.sha256(self.public_key).digest()

    def verify(self, msg: bytes) -> bool:
        """
        Verify the ECDSA SHA-256 signature for the provided message.
        """
        try:
            verifying_key = VerifyingKey.from_string(self.public_key, curve=SECP256k1)
            # Use hashlib.sha256 directly as the hash function
            return verifying_key.verify(self.signature, msg, hashfunc=hashlib.sha256)
        except Exception as e:
            print(f"Verification failed: {e}")
            return False

    def sign(self, msg: bytes, private_key: bytes) -> bytes:
        """
        Sign a message using ECDSA SHA-256 with the provided private key.
        """
        try:
            signing_key = SigningKey.from_string(private_key, curve=SECP256k1)
            signature = signing_key.sign(msg, hashfunc=hashlib.sha256)
            self.signature = signature
            return signature
        except Exception as e:
            print(f"Signing failed: {e}")
            raise

hash()

Calculate the SHA-256 hash of the public key.

Source code in accumulate\models\signatures.py
384
385
386
387
388
def hash(self) -> bytes:
    """
    Calculate the SHA-256 hash of the public key.
    """
    return hashlib.sha256(self.public_key).digest()

sign(msg, private_key)

Sign a message using ECDSA SHA-256 with the provided private key.

Source code in accumulate\models\signatures.py
402
403
404
405
406
407
408
409
410
411
412
413
def sign(self, msg: bytes, private_key: bytes) -> bytes:
    """
    Sign a message using ECDSA SHA-256 with the provided private key.
    """
    try:
        signing_key = SigningKey.from_string(private_key, curve=SECP256k1)
        signature = signing_key.sign(msg, hashfunc=hashlib.sha256)
        self.signature = signature
        return signature
    except Exception as e:
        print(f"Signing failed: {e}")
        raise

verify(msg)

Verify the ECDSA SHA-256 signature for the provided message.

Source code in accumulate\models\signatures.py
390
391
392
393
394
395
396
397
398
399
400
def verify(self, msg: bytes) -> bool:
    """
    Verify the ECDSA SHA-256 signature for the provided message.
    """
    try:
        verifying_key = VerifyingKey.from_string(self.public_key, curve=SECP256k1)
        # Use hashlib.sha256 directly as the hash function
        return verifying_key.verify(self.signature, msg, hashfunc=hashlib.sha256)
    except Exception as e:
        print(f"Verification failed: {e}")
        return False

ED25519Signature

Bases: Signature

Source code in accumulate\models\signatures.py
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
class ED25519Signature(Signature):

    def __init__(self, signer: URL, publicKey: bytes, signature: bytes, transaction_data: bytes):
        super().__init__('ed25519', signer, 1)
        self.publicKey = publicKey
        self.signature = signature
        self.timestamp = int(time.time() * 1e6)
        from accumulate.utils.hash_functions import hash_data
        self.transactionHash = hash_data(transaction_data).hex()

    def hash(self, message: bytes) -> bytes:
        """Follow JS hashing structure for ED25519."""
        sig_md_hash = hashlib.sha256(self.encode()).digest()
        final_hash = hashlib.sha256(sig_md_hash + message).digest()
        return final_hash

    def verify(self, msg: bytes) -> bool:
        try:
            vk = ed25519.Ed25519PublicKey.from_public_bytes(self.publicKey)
            final_hash = self.hash(msg)  # Ensure message is hashed before verification
            vk.verify(self.signature, final_hash)
            return True
        except Exception:
            return False

    def to_dict(self) -> dict:
        """Convert ED25519 signature to a dictionary."""
        return {
            "type": self.signature_type.lower(),
            "publicKey": self.publicKey.hex(),
            "signature": self.signature.hex(),
            "signer": str(self.signer),
            "signerVersion": self.version,
            "timestamp": self.timestamp,
            "transactionHash": self.transactionHash,
        }

hash(message)

Follow JS hashing structure for ED25519.

Source code in accumulate\models\signatures.py
63
64
65
66
67
def hash(self, message: bytes) -> bytes:
    """Follow JS hashing structure for ED25519."""
    sig_md_hash = hashlib.sha256(self.encode()).digest()
    final_hash = hashlib.sha256(sig_md_hash + message).digest()
    return final_hash

to_dict()

Convert ED25519 signature to a dictionary.

Source code in accumulate\models\signatures.py
78
79
80
81
82
83
84
85
86
87
88
def to_dict(self) -> dict:
    """Convert ED25519 signature to a dictionary."""
    return {
        "type": self.signature_type.lower(),
        "publicKey": self.publicKey.hex(),
        "signature": self.signature.hex(),
        "signer": str(self.signer),
        "signerVersion": self.version,
        "timestamp": self.timestamp,
        "transactionHash": self.transactionHash,
    }

EIP712Signature

Bases: Signature

Source code in accumulate\models\signatures.py
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
class EIP712Signature(Signature):
    def __init__(self, signer: URL, public_key: bytes, signature: bytes, chain_id: int):
        super().__init__('EIP712', signer, 1)
        self.public_key = public_key
        self.signature = signature
        self.chain_id = chain_id

    def hash(self, data: Dict[str, Any]) -> bytes:
        """Generate EIP-712 compliant hash."""
        encoded_data = self._encode_typed_data(data)
        return hashlib.sha256(encoded_data).digest()

    def verify(self, data: Dict[str, Any]) -> bool:
        try:
            message_hash = self.hash(data)
            print(f"Debug: Verifying message_hash={message_hash.hex()}, signature={self.signature.hex()}")

            eth_key = keys.PublicKey(self.public_key)
            print(f"Debug: Using public_key={self.public_key.hex()}")

            # Perform verification
            result = eth_key.verify_msg_hash(message_hash, keys.Signature(self.signature))
            print(f"Debug: Verification result={result}")
            return result
        except Exception as e: #
            print(f"Error during verification: {e}") #
            return False #

    @staticmethod
    def _encode_typed_data(data: Dict[str, Any]) -> bytes:
        """Encode EIP-712 typed data."""
        return b"".join(f"{key}:{value}".encode() for key, value in data.items())

_encode_typed_data(data) staticmethod

Encode EIP-712 typed data.

Source code in accumulate\models\signatures.py
120
121
122
123
@staticmethod
def _encode_typed_data(data: Dict[str, Any]) -> bytes:
    """Encode EIP-712 typed data."""
    return b"".join(f"{key}:{value}".encode() for key, value in data.items())

hash(data)

Generate EIP-712 compliant hash.

Source code in accumulate\models\signatures.py
 99
100
101
102
def hash(self, data: Dict[str, Any]) -> bytes:
    """Generate EIP-712 compliant hash."""
    encoded_data = self._encode_typed_data(data)
    return hashlib.sha256(encoded_data).digest()

ETHSignature

Bases: Signature

Represents an Ethereum signature.

Source code in accumulate\models\signatures.py
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
class ETHSignature(Signature):
    """Represents an Ethereum signature."""
    def __init__(self, signer: URL, public_key: bytes, signature: bytes):
        super().__init__('ETH', signer)
        self.public_key = public_key
        self.signature = signature

    def hash(self) -> bytes:
        """Calculate the Ethereum-specific hash of the public key."""
        return keccak(self.public_key)[-20:]  # Return the last 20 bytes

    def verify(self, message: bytes) -> bool:
        """Verify the Ethereum signature."""
        try:
            # Hash the message using Ethereum's EIP-191 specification
            message_hash = keccak(b"\x19Ethereum Signed Message:\n" + str(len(message)).encode() + message)
            eth_key = keys.PublicKey(self.public_key)

            # Create a Signature object
            sig_obj = keys.Signature(self.signature[:64] + bytes([self.signature[64] % 2]))  # Ensure v is 0 or 1

            # Verify the signature
            return eth_key.verify_msg_hash(message_hash, sig_obj)
        except Exception as e:
            print(f"Error during ETH signature verification: {e}")
            return False

    def get_signature(self) -> bytes:
        """Return the raw signature bytes."""
        return self.signature

    def get_public_key(self) -> bytes:
        """Return the public key bytes."""
        return self.public_key

get_public_key()

Return the public key bytes.

Source code in accumulate\models\signatures.py
368
369
370
def get_public_key(self) -> bytes:
    """Return the public key bytes."""
    return self.public_key

get_signature()

Return the raw signature bytes.

Source code in accumulate\models\signatures.py
364
365
366
def get_signature(self) -> bytes:
    """Return the raw signature bytes."""
    return self.signature

hash()

Calculate the Ethereum-specific hash of the public key.

Source code in accumulate\models\signatures.py
344
345
346
def hash(self) -> bytes:
    """Calculate the Ethereum-specific hash of the public key."""
    return keccak(self.public_key)[-20:]  # Return the last 20 bytes

verify(message)

Verify the Ethereum signature.

Source code in accumulate\models\signatures.py
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
def verify(self, message: bytes) -> bool:
    """Verify the Ethereum signature."""
    try:
        # Hash the message using Ethereum's EIP-191 specification
        message_hash = keccak(b"\x19Ethereum Signed Message:\n" + str(len(message)).encode() + message)
        eth_key = keys.PublicKey(self.public_key)

        # Create a Signature object
        sig_obj = keys.Signature(self.signature[:64] + bytes([self.signature[64] % 2]))  # Ensure v is 0 or 1

        # Verify the signature
        return eth_key.verify_msg_hash(message_hash, sig_obj)
    except Exception as e:
        print(f"Error during ETH signature verification: {e}")
        return False

Lite

Represents a lite account URL and associated data.

Source code in accumulate\models\signatures.py
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
class Lite:
    """
    Represents a lite account URL and associated data.
    """

    def __init__(self, url: str, bytes_: bytes):
        self.url = url
        self.bytes = bytes_

    def __repr__(self):
        return f"<Lite url={self.url}, bytes={self.bytes.hex()}>"

    def get_url(self) -> str:
        """Return the URL of the lite account."""
        return self.url

    def get_bytes(self) -> bytes:
        """Return the raw bytes of the lite account."""
        return self.bytes

    def __str__(self):
        """String representation of the lite account."""
        return self.url

__str__()

String representation of the lite account.

Source code in accumulate\models\signatures.py
571
572
573
def __str__(self):
    """String representation of the lite account."""
    return self.url

get_bytes()

Return the raw bytes of the lite account.

Source code in accumulate\models\signatures.py
567
568
569
def get_bytes(self) -> bytes:
    """Return the raw bytes of the lite account."""
    return self.bytes

get_url()

Return the URL of the lite account.

Source code in accumulate\models\signatures.py
563
564
565
def get_url(self) -> str:
    """Return the URL of the lite account."""
    return self.url

PrivateKey

Represents a private key and its associated public key.

Source code in accumulate\models\signatures.py
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
class PrivateKey:
    """
    Represents a private key and its associated public key.
    """
    def __init__(self, key: bytes, type_: str, public_key: Optional[bytes] = None):
        """
        Initialize a PrivateKey instance.

        :param key: The private key bytes.
        :param type_: The type of the private key (e.g., ED25519, ECDSA).
        :param public_key: The optional public key bytes.
        """
        self.key = key
        self.type = type_
        self.public_key = PublicKey(public_key, type_) if public_key else None

    def __repr__(self):
        return f"<PrivateKey type={self.type}, key={self.key.hex()}>"

    def to_dict(self) -> dict:
        return {
            "type": self.type,
            "key": self.key.hex(),
            "public_key": self.public_key.to_dict() if self.public_key else None,
        }

    def get_type(self) -> str:
        """
        Get the type of the private key.
        """
        return self.type

    def get_private_key(self) -> Tuple[bytes, bool]:
        """
        Get the raw private key bytes.

        :return: A tuple containing the private key bytes and a boolean indicating success.
        """
        return self.key, True

    def get_public_key(self) -> Optional[PublicKey]:
        """
        Get the associated public key.

        :return: The associated PublicKey instance or None if not set.
        """
        return self.public_key

    def __str__(self):
        """
        Format the private key as a string representation.
        """
        return binascii.hexlify(self.key).decode()

__init__(key, type_, public_key=None)

Initialize a PrivateKey instance.

:param key: The private key bytes. :param type_: The type of the private key (e.g., ED25519, ECDSA). :param public_key: The optional public key bytes.

Source code in accumulate\models\signatures.py
471
472
473
474
475
476
477
478
479
480
481
def __init__(self, key: bytes, type_: str, public_key: Optional[bytes] = None):
    """
    Initialize a PrivateKey instance.

    :param key: The private key bytes.
    :param type_: The type of the private key (e.g., ED25519, ECDSA).
    :param public_key: The optional public key bytes.
    """
    self.key = key
    self.type = type_
    self.public_key = PublicKey(public_key, type_) if public_key else None

__str__()

Format the private key as a string representation.

Source code in accumulate\models\signatures.py
515
516
517
518
519
def __str__(self):
    """
    Format the private key as a string representation.
    """
    return binascii.hexlify(self.key).decode()

get_private_key()

Get the raw private key bytes.

:return: A tuple containing the private key bytes and a boolean indicating success.

Source code in accumulate\models\signatures.py
499
500
501
502
503
504
505
def get_private_key(self) -> Tuple[bytes, bool]:
    """
    Get the raw private key bytes.

    :return: A tuple containing the private key bytes and a boolean indicating success.
    """
    return self.key, True

get_public_key()

Get the associated public key.

:return: The associated PublicKey instance or None if not set.

Source code in accumulate\models\signatures.py
507
508
509
510
511
512
513
def get_public_key(self) -> Optional[PublicKey]:
    """
    Get the associated public key.

    :return: The associated PublicKey instance or None if not set.
    """
    return self.public_key

get_type()

Get the type of the private key.

Source code in accumulate\models\signatures.py
493
494
495
496
497
def get_type(self) -> str:
    """
    Get the type of the private key.
    """
    return self.type

PublicKey

Represents a public key and provides methods for its operations.

Source code in accumulate\models\signatures.py
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
class PublicKey:
    """
    Represents a public key and provides methods for its operations.
    """
    def __init__(self, key: bytes, type_: str):
        """
        Initialize a PublicKey instance.

        :param key: The public key bytes.
        :param type_: The type of the public key (e.g., ED25519, ECDSA).
        """
        self.key = key
        self.type = type_

    def __repr__(self):
        return f"<PublicKey type={self.type}, key={self.key.hex()}>"

    def get_type(self) -> str:
        """
        Get the type of the public key.
        """
        return self.type

    def get_public_key(self) -> Tuple[bytes, bool]:
        """
        Get the raw public key bytes.

        :return: A tuple containing the public key bytes and a boolean indicating success.
        """
        return self.key, True

    def get_public_key_hash(self) -> Tuple[bytes, bool]:
        """
        Get the hash of the public key.

        :return: A tuple containing the hashed public key bytes and a boolean indicating success.
        """
        try:
            return hashlib.sha256(self.key).digest(), True
        except Exception:
            return b"", False

    def __str__(self):
        """
        Format the public key as a string.
        """
        pub_key_hash, success = self.get_public_key_hash()
        return binascii.hexlify(pub_key_hash).decode() if success else "<invalid address>"

__init__(key, type_)

Initialize a PublicKey instance.

:param key: The public key bytes. :param type_: The type of the public key (e.g., ED25519, ECDSA).

Source code in accumulate\models\signatures.py
421
422
423
424
425
426
427
428
429
def __init__(self, key: bytes, type_: str):
    """
    Initialize a PublicKey instance.

    :param key: The public key bytes.
    :param type_: The type of the public key (e.g., ED25519, ECDSA).
    """
    self.key = key
    self.type = type_

__str__()

Format the public key as a string.

Source code in accumulate\models\signatures.py
459
460
461
462
463
464
def __str__(self):
    """
    Format the public key as a string.
    """
    pub_key_hash, success = self.get_public_key_hash()
    return binascii.hexlify(pub_key_hash).decode() if success else "<invalid address>"

get_public_key()

Get the raw public key bytes.

:return: A tuple containing the public key bytes and a boolean indicating success.

Source code in accumulate\models\signatures.py
440
441
442
443
444
445
446
def get_public_key(self) -> Tuple[bytes, bool]:
    """
    Get the raw public key bytes.

    :return: A tuple containing the public key bytes and a boolean indicating success.
    """
    return self.key, True

get_public_key_hash()

Get the hash of the public key.

:return: A tuple containing the hashed public key bytes and a boolean indicating success.

Source code in accumulate\models\signatures.py
448
449
450
451
452
453
454
455
456
457
def get_public_key_hash(self) -> Tuple[bytes, bool]:
    """
    Get the hash of the public key.

    :return: A tuple containing the hashed public key bytes and a boolean indicating success.
    """
    try:
        return hashlib.sha256(self.key).digest(), True
    except Exception:
        return b"", False

get_type()

Get the type of the public key.

Source code in accumulate\models\signatures.py
434
435
436
437
438
def get_type(self) -> str:
    """
    Get the type of the public key.
    """
    return self.type

PublicKeyHash

Represents a hash derived from a public key.

Source code in accumulate\models\signatures.py
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
class PublicKeyHash:
    """
    Represents a hash derived from a public key.
    """

    def __init__(self, type_: str, hash_: bytes):
        self.type = type_
        self.hash = hash_

    def to_dict(self) -> dict:
        return {"type": self.type, "hash": self.hash.hex()}

    def __repr__(self):
        return f"<PublicKeyHash type={self.type}, hash={self.hash.hex()}>"

    def get_type(self) -> str:
        """Return the type of the public key hash."""
        return self.type

    def get_public_key_hash(self) -> bytes:
        """Return the hash of the public key."""
        return self.hash

    def __str__(self):
        """String representation of the address."""
        return f"{self.type}:{self.hash.hex()}"

__str__()

String representation of the address.

Source code in accumulate\models\signatures.py
545
546
547
def __str__(self):
    """String representation of the address."""
    return f"{self.type}:{self.hash.hex()}"

get_public_key_hash()

Return the hash of the public key.

Source code in accumulate\models\signatures.py
541
542
543
def get_public_key_hash(self) -> bytes:
    """Return the hash of the public key."""
    return self.hash

get_type()

Return the type of the public key hash.

Source code in accumulate\models\signatures.py
537
538
539
def get_type(self) -> str:
    """Return the type of the public key hash."""
    return self.type

RCD1Signature

Bases: Signature

Source code in accumulate\models\signatures.py
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
class RCD1Signature(Signature):
    def __init__(self, signer: URL, public_key: bytes, signature: bytes, timestamp: int):
        super().__init__('RCD1', signer)
        self.public_key = public_key
        self.signature = signature
        self.timestamp = timestamp

    def hash(self) -> bytes:
        """Calculate RCD1-specific hash."""
        return do_sha256(self.public_key, str(self.timestamp).encode())

    def verify(self, msg: bytes) -> bool:
        """Verify the signature using ED25519."""
        try:
            vk = VerifyingKey.from_string(self.public_key, curve=SECP256k1)
            return vk.verify(self.signature, msg)
        except Exception:
            return False

hash()

Calculate RCD1-specific hash.

Source code in accumulate\models\signatures.py
303
304
305
def hash(self) -> bytes:
    """Calculate RCD1-specific hash."""
    return do_sha256(self.public_key, str(self.timestamp).encode())

verify(msg)

Verify the signature using ED25519.

Source code in accumulate\models\signatures.py
307
308
309
310
311
312
313
def verify(self, msg: bytes) -> bool:
    """Verify the signature using ED25519."""
    try:
        vk = VerifyingKey.from_string(self.public_key, curve=SECP256k1)
        return vk.verify(self.signature, msg)
    except Exception:
        return False

Signature

Base class for managing all signature types.

Source code in accumulate\models\signatures.py
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
class Signature:
    """Base class for managing all signature types."""
    def __init__(self, signature_type: str, signer: Optional[URL] = None, version: int = 1):
        self.signature_type = signature_type
        self.signer = signer
        self.version = version
        self.signature = None

    def get_url(self) -> Optional[URL]:
        return self.signer

    def get_version(self) -> int:
        return self.version

    def get_signature(self) -> Optional[bytes]:
        return self.signature

    def hash(self, message: bytes) -> bytes:
        """Follow JS hashing structure: First hash signature metadata, then concatenate and hash with message."""
        sig_md_hash = hashlib.sha256(self.encode()).digest()
        final_hash = hashlib.sha256(sig_md_hash + message).digest()
        return final_hash

    def encode(self) -> bytes:
        """Serialize signature metadata for consistent hashing."""
        signer_bytes = str(self.signer).encode() if self.signer else b""
        return self.signature_type.encode() + signer_bytes + str(self.version).encode()


    def verify(self, msg: bytes) -> bool:
        raise NotImplementedError("Subclasses should implement this method.")

encode()

Serialize signature metadata for consistent hashing.

Source code in accumulate\models\signatures.py
41
42
43
44
def encode(self) -> bytes:
    """Serialize signature metadata for consistent hashing."""
    signer_bytes = str(self.signer).encode() if self.signer else b""
    return self.signature_type.encode() + signer_bytes + str(self.version).encode()

hash(message)

Follow JS hashing structure: First hash signature metadata, then concatenate and hash with message.

Source code in accumulate\models\signatures.py
35
36
37
38
39
def hash(self, message: bytes) -> bytes:
    """Follow JS hashing structure: First hash signature metadata, then concatenate and hash with message."""
    sig_md_hash = hashlib.sha256(self.encode()).digest()
    final_hash = hashlib.sha256(sig_md_hash + message).digest()
    return final_hash

SignatureFactory

Factory to create signatures based on type.

Source code in accumulate\models\signatures.py
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
class SignatureFactory:
    """Factory to create signatures based on type."""
    @staticmethod
    def create_signature(sig_type: str, **kwargs) -> Optional[Signature]:
        if sig_type == "LegacyED25519":
            # Pass only the required arguments
            required_args = {k: kwargs[k] for k in ["signer", "public_key", "signature", "timestamp"] if k in kwargs}
            return LegacyED25519Signature(**required_args)
        elif sig_type == "TypedData":
            required_args = {k: kwargs[k] for k in ["signer", "public_key", "signature", "chain_id", "memo", "data"] if k in kwargs}
            return TypedDataSignature(**required_args)
        elif sig_type == "RCD1":
            required_args = {k: kwargs[k] for k in ["signer", "public_key", "signature", "timestamp"] if k in kwargs}
            return RCD1Signature(**required_args)
        elif sig_type == "BTC":
            required_args = {k: kwargs[k] for k in ["signer", "public_key", "signature"] if k in kwargs}
            return BTCSignature(**required_args)
        elif sig_type == "DelegatedSignature":
            required_args = {k: kwargs[k] for k in ["signature", "delegator"] if k in kwargs}
            return DelegatedSignature(**required_args)
        elif sig_type == "AuthoritySignature":
            required_args = {k: kwargs[k] for k in ["origin", "authority", "vote", "txid"] if k in kwargs}
            return AuthoritySignature(**required_args)
        else:
            raise ValueError(f"Unsupported signature type: {sig_type}")

TypedDataSignature

Bases: Signature

Source code in accumulate\models\signatures.py
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
class TypedDataSignature(Signature):
    def __init__(
        self,
        signer: Optional[URL],
        public_key: bytes,
        signature: bytes,
        chain_id: int,
        memo: Optional[str] = None,
        data: Optional[bytes] = None,
    ):
        super().__init__('TypedData', signer)
        self.public_key = public_key
        self.signature = signature
        self.chain_id = chain_id
        self.memo = memo
        self.data = data

    def hash(self, data: Dict[str, Any]) -> bytes:
        """Generate EIP-712 compliant hash."""
        encoded_data = self._encode_typed_data(data)
        return hashlib.sha256(encoded_data).digest()

    def verify(self, data: Dict[str, Any]) -> bool:
        try: #
            message_hash = self.hash(data)
            eth_key = keys.PublicKey(self.public_key)
            return eth_key.verify_msg_hash(message_hash, keys.Signature(self.signature))
        except Exception:
            return False

    @staticmethod
    def _encode_typed_data(data: Dict[str, Any]) -> bytes:
        """Encode EIP-712 typed data."""
        return b"".join(f"{key}:{value}".encode() for key, value in data.items())

_encode_typed_data(data) staticmethod

Encode EIP-712 typed data.

Source code in accumulate\models\signatures.py
242
243
244
245
@staticmethod
def _encode_typed_data(data: Dict[str, Any]) -> bytes:
    """Encode EIP-712 typed data."""
    return b"".join(f"{key}:{value}".encode() for key, value in data.items())

hash(data)

Generate EIP-712 compliant hash.

Source code in accumulate\models\signatures.py
229
230
231
232
def hash(self, data: Dict[str, Any]) -> bytes:
    """Generate EIP-712 compliant hash."""
    encoded_data = self._encode_typed_data(data)
    return hashlib.sha256(encoded_data).digest()

do_btc_hash(pub_key)

Calculate the Bitcoin hash (RIPEMD160(SHA256(pub_key))).

Source code in accumulate\models\signatures.py
327
328
329
330
331
332
def do_btc_hash(pub_key: bytes) -> bytes:
    """Calculate the Bitcoin hash (RIPEMD160(SHA256(pub_key)))."""
    sha256_hash = hashlib.sha256(pub_key).digest()
    ripemd160 = hashlib.new('ripemd160')
    ripemd160.update(sha256_hash)
    return ripemd160.digest() 

do_eth_hash(pub_key)

Calculate the Ethereum address hash.

Source code in accumulate\models\signatures.py
322
323
324
325
def do_eth_hash(pub_key: bytes) -> bytes:
    """Calculate the Ethereum address hash."""
    from eth_utils import keccak
    return keccak(pub_key)[-20:]

transaction_results

AddCreditsResult

Bases: TransactionResult

Represents the result of an Add Credits transaction.

:param amount: The amount of tokens added. :param credits: The number of credits added. :param oracle: The oracle rate used for conversion.

Source code in accumulate\models\transaction_results.py
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
class AddCreditsResult(TransactionResult):
    """
    Represents the result of an Add Credits transaction.

    :param amount: The amount of tokens added.
    :param credits: The number of credits added.
    :param oracle: The oracle rate used for conversion.
    """
    def __init__(self, amount: int = 0, credits: int = 0, oracle: int = 0):
        if amount < 0 or credits < 0 or oracle < 0:
            raise ValueError("Amount, credits, and oracle must be non-negative integers.")
        self.amount = amount
        self.credits = credits
        self.oracle = oracle

    def copy(self) -> "AddCreditsResult":
        return AddCreditsResult(self.amount, self.credits, self.oracle)

    def equal(self, other: "AddCreditsResult") -> bool:
        return (
            isinstance(other, AddCreditsResult) and
            self.amount == other.amount and
            self.credits == other.credits and
            self.oracle == other.oracle
        )

EmptyResult

Bases: TransactionResult

Represents an empty transaction result.

Source code in accumulate\models\transaction_results.py
18
19
20
21
22
23
24
25
26
class EmptyResult(TransactionResult):
    """
    Represents an empty transaction result.
    """
    def copy(self) -> "EmptyResult":
        return EmptyResult()

    def equal(self, other: "EmptyResult") -> bool:
        return isinstance(other, EmptyResult)

TransactionResult

Base class for transaction results.

Source code in accumulate\models\transaction_results.py
 7
 8
 9
10
11
12
13
14
15
class TransactionResult:
    """
    Base class for transaction results.
    """
    def copy(self) -> "TransactionResult":
        raise NotImplementedError("Subclasses must implement this method.")

    def equal(self, other: "TransactionResult") -> bool:
        raise NotImplementedError("Subclasses must implement this method.")

WriteDataResult

Bases: TransactionResult

Represents the result of a Write Data transaction.

:param entry_hash: The hash of the data entry. :param account_url: The URL of the account associated with the entry. :param account_id: The ID of the account associated with the entry.

Source code in accumulate\models\transaction_results.py
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
class WriteDataResult(TransactionResult):
    """
    Represents the result of a Write Data transaction.

    :param entry_hash: The hash of the data entry.
    :param account_url: The URL of the account associated with the entry.
    :param account_id: The ID of the account associated with the entry.
    """
    def __init__(
        self,
        entry_hash: bytes = b"",
        account_url: Optional[URL] = None,
        account_id: Optional[bytes] = None,
    ):
        self.entry_hash = entry_hash
        self.account_url = account_url
        self.account_id = account_id

    def copy(self) -> "WriteDataResult":
        return WriteDataResult(self.entry_hash, self.account_url, self.account_id)

    def equal(self, other: "WriteDataResult") -> bool:
        return (
            isinstance(other, WriteDataResult) and
            self.entry_hash == other.entry_hash and
            self.account_url == other.account_url and
            self.account_id == other.account_id
        )

copy_transaction_result(result)

Create a copy of the transaction result.

:param result: The transaction result to copy. :return: A copy of the transaction result.

Source code in accumulate\models\transaction_results.py
144
145
146
147
148
149
150
151
def copy_transaction_result(result: TransactionResult) -> TransactionResult:
    """
    Create a copy of the transaction result.

    :param result: The transaction result to copy.
    :return: A copy of the transaction result.
    """
    return result.copy()

deserialize_json(data)

Deserialize JSON bytes into a dictionary.

:param data: JSON bytes. :return: A dictionary representation of the JSON data.

Source code in accumulate\models\transaction_results.py
154
155
156
157
158
159
160
161
162
def deserialize_json(data: bytes) -> Dict[str, Any]:
    """
    Deserialize JSON bytes into a dictionary.

    :param data: JSON bytes.
    :return: A dictionary representation of the JSON data.
    """
    import json
    return json.loads(data)

equal_transaction_result(a, b)

Compare two transaction results for equality.

:param a: The first transaction result. :param b: The second transaction result. :return: True if they are equal, False otherwise.

Source code in accumulate\models\transaction_results.py
102
103
104
105
106
107
108
109
110
def equal_transaction_result(a: TransactionResult, b: TransactionResult) -> bool:
    """
    Compare two transaction results for equality.

    :param a: The first transaction result.
    :param b: The second transaction result.
    :return: True if they are equal, False otherwise.
    """
    return a.equal(b)

new_transaction_result(typ)

Factory method to create a new transaction result based on the type.

:param typ: The transaction type. :return: A new instance of the appropriate TransactionResult subclass.

Source code in accumulate\models\transaction_results.py
86
87
88
89
90
91
92
93
94
95
96
97
98
99
def new_transaction_result(typ: str) -> TransactionResult:
    """
    Factory method to create a new transaction result based on the type.

    :param typ: The transaction type.
    :return: A new instance of the appropriate TransactionResult subclass.
    """
    if typ == "WriteDataResult":
        return WriteDataResult()
    elif typ == "AddCreditsResult":
        return AddCreditsResult()
    elif typ == "EmptyResult":
        return EmptyResult()
    raise ValueError(f"Unknown transaction result type: {typ}")

unmarshal_transaction_result(data)

Deserialize a transaction result from raw data or JSON.

:param data: Raw bytes or JSON object containing the transaction result. :return: The deserialized TransactionResult.

Source code in accumulate\models\transaction_results.py
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
def unmarshal_transaction_result(data: Union[bytes, Dict[str, Any]]) -> TransactionResult:
    """
    Deserialize a transaction result from raw data or JSON.

    :param data: Raw bytes or JSON object containing the transaction result.
    :return: The deserialized TransactionResult.
    """
    if isinstance(data, bytes):
        data = deserialize_json(data)

    result_type = data.get("Type")
    if not result_type:
        raise ValueError("Missing transaction result type in data")

    result = new_transaction_result(result_type)

    # Deserialize fields and handle bytes explicitly
    for key, value in data.items():
        if key.lower() == "entry_hash" or key.lower() == "account_id":
            # Convert hex strings back to bytes
            setattr(result, key.lower(), bytes.fromhex(value) if isinstance(value, str) else value)
        elif key.lower() == "account_url" and isinstance(value, str):
            # Convert string to URL object
            setattr(result, key.lower(), URL.parse(value))
        else:
            setattr(result, key.lower(), value)

    return result

txid

TxID

Represents a transaction ID.

Source code in accumulate\models\txid.py
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
class TxID:
    """Represents a transaction ID."""

    def __init__(self, url: URL, tx_hash: bytes):
        if not isinstance(url, URL):
            raise ValueError("TxID must be initialized with a URL instance.")
        if not isinstance(tx_hash, bytes) or len(tx_hash) != 32:
            raise ValueError("Transaction hash must be a 32-byte value.")
        self.url = url
        self.tx_hash = tx_hash
        self._str_cache = None

    @staticmethod
    def parse(txid_str: str) -> "TxID":
        """
        Parse a TxID string into a TxID object.
        """
        print(f"Parsing TxID string: {txid_str}")  # Debugging input
        url = URL.parse(txid_str)

        # Validate that '@' is present in the original string
        if "@" not in txid_str:
            raise ValueError(f"Invalid TxID structure: '{txid_str}'. Must contain '@' separating hash and authority.")

        # Validate presence of user_info (TxHash)
        if not url.user_info:
            raise MissingHashError(f"TxID missing hash: {txid_str}")

        try:
            tx_hash = bytes.fromhex(url.user_info)
            if len(tx_hash) != 32:
                raise InvalidHashError(f"Transaction hash must be 32 bytes: {url.user_info}")
        except ValueError as e:
            raise InvalidHashError(f"Invalid transaction hash format: {url.user_info}. Error: {e}")

        # Clean URL (remove TxHash from user_info)
        clean_url = url.with_user_info("")
        return TxID(clean_url, tx_hash)


    def __str__(self) -> str:
        """
        Return the string representation of the TxID.
        """
        if self._str_cache is None:
            # Combine URL and hash as required
            self._str_cache = f"{str(self.url)}@{self.tx_hash.hex()}"
        print(f"String representation of TxID: {self._str_cache}")  # Debugging
        return self._str_cache

    def compare(self, other: "TxID") -> int:
        print(f"Comparing TxIDs: {self} vs {other}")  # Debugging
        if not isinstance(other, TxID):
            raise ValueError("Comparison must be between two TxIDs")
        if self.tx_hash != other.tx_hash:
            return (self.tx_hash > other.tx_hash) - (self.tx_hash < other.tx_hash)
        return (str(self.url) > str(other.url)) - (str(self.url) < str(other.url))  # Updated comparison


    def as_url(self) -> URL:
        """
        Construct a URL representation of the TxID.
        """
        return self.url.with_user_info(self.tx_hash.hex())

    def account(self) -> URL:
        """Get the account URL associated with the TxID."""
        return self.url

    def __eq__(self, other: object) -> bool:
        """Equality operator."""
        return isinstance(other, TxID) and self.tx_hash == other.tx_hash and self.url == other.url

    def __hash__(self) -> int:
        """Hash operator."""
        return hash((str(self.url), self.tx_hash))

    def json(self) -> str:
        """
        Serialize the TxID to a JSON string.
        """
        return json.dumps({"url": str(self.url), "hash": self.tx_hash.hex()})

    @classmethod
    def from_json(cls, json_str: str) -> "TxID":
        """
        Deserialize a JSON string into a TxID instance.
        """
        data = json.loads(json_str)
        url = URL.parse(data["url"])
        tx_hash = bytes.fromhex(data["hash"])
        return cls(url, tx_hash)

__eq__(other)

Equality operator.

Source code in accumulate\models\txid.py
79
80
81
def __eq__(self, other: object) -> bool:
    """Equality operator."""
    return isinstance(other, TxID) and self.tx_hash == other.tx_hash and self.url == other.url

__hash__()

Hash operator.

Source code in accumulate\models\txid.py
83
84
85
def __hash__(self) -> int:
    """Hash operator."""
    return hash((str(self.url), self.tx_hash))

__str__()

Return the string representation of the TxID.

Source code in accumulate\models\txid.py
50
51
52
53
54
55
56
57
58
def __str__(self) -> str:
    """
    Return the string representation of the TxID.
    """
    if self._str_cache is None:
        # Combine URL and hash as required
        self._str_cache = f"{str(self.url)}@{self.tx_hash.hex()}"
    print(f"String representation of TxID: {self._str_cache}")  # Debugging
    return self._str_cache

account()

Get the account URL associated with the TxID.

Source code in accumulate\models\txid.py
75
76
77
def account(self) -> URL:
    """Get the account URL associated with the TxID."""
    return self.url

as_url()

Construct a URL representation of the TxID.

Source code in accumulate\models\txid.py
69
70
71
72
73
def as_url(self) -> URL:
    """
    Construct a URL representation of the TxID.
    """
    return self.url.with_user_info(self.tx_hash.hex())

from_json(json_str) classmethod

Deserialize a JSON string into a TxID instance.

Source code in accumulate\models\txid.py
 93
 94
 95
 96
 97
 98
 99
100
101
@classmethod
def from_json(cls, json_str: str) -> "TxID":
    """
    Deserialize a JSON string into a TxID instance.
    """
    data = json.loads(json_str)
    url = URL.parse(data["url"])
    tx_hash = bytes.fromhex(data["hash"])
    return cls(url, tx_hash)

json()

Serialize the TxID to a JSON string.

Source code in accumulate\models\txid.py
87
88
89
90
91
def json(self) -> str:
    """
    Serialize the TxID to a JSON string.
    """
    return json.dumps({"url": str(self.url), "hash": self.tx_hash.hex()})

parse(txid_str) staticmethod

Parse a TxID string into a TxID object.

Source code in accumulate\models\txid.py
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
@staticmethod
def parse(txid_str: str) -> "TxID":
    """
    Parse a TxID string into a TxID object.
    """
    print(f"Parsing TxID string: {txid_str}")  # Debugging input
    url = URL.parse(txid_str)

    # Validate that '@' is present in the original string
    if "@" not in txid_str:
        raise ValueError(f"Invalid TxID structure: '{txid_str}'. Must contain '@' separating hash and authority.")

    # Validate presence of user_info (TxHash)
    if not url.user_info:
        raise MissingHashError(f"TxID missing hash: {txid_str}")

    try:
        tx_hash = bytes.fromhex(url.user_info)
        if len(tx_hash) != 32:
            raise InvalidHashError(f"Transaction hash must be 32 bytes: {url.user_info}")
    except ValueError as e:
        raise InvalidHashError(f"Invalid transaction hash format: {url.user_info}. Error: {e}")

    # Clean URL (remove TxHash from user_info)
    clean_url = url.with_user_info("")
    return TxID(clean_url, tx_hash)

txid_set

TxIdSet

Source code in accumulate\models\txid_set.py
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
class TxIdSet:
    def __init__(self):
        self.entries: List[TxID] = []

    def add(self, txid: TxID):
        """
        Add a transaction ID to the set using a sorted insertion.
        """
        for i, entry in enumerate(self.entries):
            comparison = txid.compare(entry)
            if comparison == 0:  # Already exists
                return
            elif comparison < 0:  # Insert before
                self.entries.insert(i, txid)
                return
        # Add to the end if no earlier position was found
        self.entries.append(txid)

    def remove(self, txid: TxID):
        """
        Remove a transaction ID from the set if it exists.
        """
        for i, entry in enumerate(self.entries):
            if txid.compare(entry) == 0:
                del self.entries[i]
                return

    def contains_hash(self, hash_: bytes) -> bool:
        """
        Check if a transaction ID with the given hash exists in the set.
        """
        for entry in self.entries:
            if entry.tx_hash == hash_:  # Compare directly with the tx_hash attribute
                return True
        return False

add(txid)

Add a transaction ID to the set using a sorted insertion.

Source code in accumulate\models\txid_set.py
11
12
13
14
15
16
17
18
19
20
21
22
23
def add(self, txid: TxID):
    """
    Add a transaction ID to the set using a sorted insertion.
    """
    for i, entry in enumerate(self.entries):
        comparison = txid.compare(entry)
        if comparison == 0:  # Already exists
            return
        elif comparison < 0:  # Insert before
            self.entries.insert(i, txid)
            return
    # Add to the end if no earlier position was found
    self.entries.append(txid)

contains_hash(hash_)

Check if a transaction ID with the given hash exists in the set.

Source code in accumulate\models\txid_set.py
34
35
36
37
38
39
40
41
def contains_hash(self, hash_: bytes) -> bool:
    """
    Check if a transaction ID with the given hash exists in the set.
    """
    for entry in self.entries:
        if entry.tx_hash == hash_:  # Compare directly with the tx_hash attribute
            return True
    return False

remove(txid)

Remove a transaction ID from the set if it exists.

Source code in accumulate\models\txid_set.py
25
26
27
28
29
30
31
32
def remove(self, txid: TxID):
    """
    Remove a transaction ID from the set if it exists.
    """
    for i, entry in enumerate(self.entries):
        if txid.compare(entry) == 0:
            del self.entries[i]
            return

types

AtomicSlice

Thread-safe list management.

Source code in accumulate\models\types.py
26
27
28
29
30
31
32
33
34
35
36
class AtomicSlice:
    """Thread-safe list management."""
    def __init__(self):
        self.items = []

    def add(self, item: Any):
        self.items.append(item)

    def compare(self, other: List[Any]) -> bool:
        """Compare the items with another list."""
        return self.items == other

compare(other)

Compare the items with another list.

Source code in accumulate\models\types.py
34
35
36
def compare(self, other: List[Any]) -> bool:
    """Compare the items with another list."""
    return self.items == other

AtomicUint

Thread-safe atomic counter.

Source code in accumulate\models\types.py
11
12
13
14
15
16
17
18
19
20
21
22
23
class AtomicUint:
    """Thread-safe atomic counter."""
    def __init__(self, value: int = 0):
        self.value = value

    def increment(self):
        self.value += 1

    def store(self, value: int):
        self.value = value

    def load(self) -> int:
        return self.value

Utils

conversion

camel_to_snake(name)

Converts camelCase to snake_case.

Source code in accumulate\utils\conversion.py
5
6
7
def camel_to_snake(name: str) -> str:
    """Converts camelCase to snake_case."""
    return re.sub(r'(?<!^)(?=[A-Z])', '_', name).lower()

address_from

from_ecdsa_private_key(key)

Create a PrivateKey instance from an ECDSA private key.

Source code in accumulate\utils\address_from.py
91
92
93
94
95
96
97
98
99
def from_ecdsa_private_key(key: ec.EllipticCurvePrivateKey) -> PrivateKey:
    """Create a PrivateKey instance from an ECDSA private key."""
    private_key_bytes = key.private_bytes(
        encoding=serialization.Encoding.PEM,
        format=serialization.PrivateFormat.PKCS8,
        encryption_algorithm=serialization.NoEncryption()
    )
    public_key = from_ecdsa_public_key(key.public_key())
    return PrivateKey(type_=SignatureType.ECDSA_SHA256, key=private_key_bytes, public_key=public_key.key)

from_ecdsa_public_key(key)

Create a PublicKey instance from an ECDSA public key.

Source code in accumulate\utils\address_from.py
82
83
84
85
86
87
88
def from_ecdsa_public_key(key: ec.EllipticCurvePublicKey) -> PublicKey:
    """Create a PublicKey instance from an ECDSA public key."""
    key_bytes = key.public_bytes(
        encoding=serialization.Encoding.PEM,
        format=serialization.PublicFormat.SubjectPublicKeyInfo
    )
    return PublicKey(type_=SignatureType.ECDSA_SHA256, key=key_bytes)

from_ed25519_private_key(key)

Create a PrivateKey instance from an Ed25519 private key (must be 64 bytes).

Source code in accumulate\utils\address_from.py
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
def from_ed25519_private_key(key: bytes) -> PrivateKey:
    """Create a PrivateKey instance from an Ed25519 private key (must be 64 bytes)."""
    if len(key) == 32:
        private_key_bytes = key
    elif len(key) == 64:
        private_key_bytes = key[:32]  # Extract first 32 bytes as the actual private key
    else:
        raise ValueError("Invalid Ed25519 private key length (must be 32 or 64 bytes).")

    # Generate the public key from the extracted private key
    private_key = Ed25519PrivateKey.from_private_bytes(private_key_bytes)
    public_key_bytes = private_key.public_key().public_bytes(
        encoding=serialization.Encoding.Raw,
        format=serialization.PublicFormat.Raw
    )

    return PrivateKey(type_=SignatureType.ED25519, key=private_key_bytes, public_key=public_key_bytes)

from_ed25519_public_key(key)

Create a PublicKey instance from an Ed25519 public key (must be 32 bytes).

Source code in accumulate\utils\address_from.py
36
37
38
39
40
def from_ed25519_public_key(key: bytes) -> PublicKey:
    """Create a PublicKey instance from an Ed25519 public key (must be 32 bytes)."""
    if len(key) != 32:
        raise ValueError("Invalid Ed25519 public key length (must be 32 bytes).")
    return PublicKey(type_=SignatureType.ED25519, key=key)

from_eth_private_key(key)

Create a PrivateKey instance from an Ethereum private key.

Source code in accumulate\utils\address_from.py
102
103
104
105
106
def from_eth_private_key(key: bytes) -> PrivateKey:
    """Create a PrivateKey instance from an Ethereum private key."""
    eth_key = eth_keys.PrivateKey(key)
    public_key_bytes = eth_key.public_key.to_bytes()
    return PrivateKey(type_=SignatureType.ECDSA_SHA256, key=key, public_key=public_key_bytes)

from_private_key_bytes(key, signature_type)

Create a PrivateKey instance from raw private key bytes.

Source code in accumulate\utils\address_from.py
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
def from_private_key_bytes(key: bytes, signature_type: SignatureType) -> PrivateKey:
    """Create a PrivateKey instance from raw private key bytes."""
    if signature_type == SignatureType.ED25519:
        if len(key) not in {32, 64}:
            raise ValueError("Ed25519 key must be 32 or 64 bytes.")
        return from_ed25519_private_key(key)

    elif signature_type == SignatureType.RSA_SHA256:
        private_key = serialization.load_pem_private_key(key, password=None)
        if not isinstance(private_key, rsa.RSAPrivateKey):
            raise ValueError("Provided key is not an RSA private key.")
        return from_rsa_private_key(private_key)

    elif signature_type == SignatureType.ECDSA_SHA256:
        private_key = serialization.load_pem_private_key(key, password=None)
        if not isinstance(private_key, ec.EllipticCurvePrivateKey):
            raise ValueError("Provided key is not an ECDSA private key.")
        return from_ecdsa_private_key(private_key)

    elif signature_type in {SignatureType.BTC, SignatureType.ETH, SignatureType.BTC_LEGACY}:
        return from_eth_private_key(key)

    else:
        raise ValueError(f"Unsupported signature type: {signature_type}")

from_rsa_private_key(key)

Create a PrivateKey instance from an RSA private key.

Source code in accumulate\utils\address_from.py
71
72
73
74
75
76
77
78
79
def from_rsa_private_key(key: rsa.RSAPrivateKey) -> PrivateKey:
    """Create a PrivateKey instance from an RSA private key."""
    private_key_bytes = key.private_bytes(
        encoding=serialization.Encoding.PEM,
        format=serialization.PrivateFormat.PKCS8,
        encryption_algorithm=serialization.NoEncryption()
    )
    public_key = from_rsa_public_key(key.public_key())
    return PrivateKey(type_=SignatureType.RSA_SHA256, key=private_key_bytes, public_key=public_key.key)

from_rsa_public_key(key)

Create a PublicKey instance from an RSA public key.

Source code in accumulate\utils\address_from.py
62
63
64
65
66
67
68
def from_rsa_public_key(key: rsa.RSAPublicKey) -> PublicKey:
    """Create a PublicKey instance from an RSA public key."""
    key_bytes = key.public_bytes(
        encoding=serialization.Encoding.PEM,
        format=serialization.PublicFormat.SubjectPublicKeyInfo
    )
    return PublicKey(type_=SignatureType.RSA_SHA256, key=key_bytes)

generate_ed25519_keypair()

Generate an Ed25519 keypair that matches TweetNaCl (64-byte private key, 32-byte public key).

Source code in accumulate\utils\address_from.py
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
def generate_ed25519_keypair():
    """Generate an Ed25519 keypair that matches TweetNaCl (64-byte private key, 32-byte public key)."""
    private_key = Ed25519PrivateKey.generate()

    # Extract private key bytes (32 bytes)
    private_key_bytes = private_key.private_bytes(
        encoding=serialization.Encoding.Raw,
        format=serialization.PrivateFormat.Raw,
        encryption_algorithm=serialization.NoEncryption()
    )

    # Extract public key bytes (32 bytes)
    public_key_bytes = private_key.public_key().public_bytes(
        encoding=serialization.Encoding.Raw,
        format=serialization.PublicFormat.Raw
    )

    # Concatenate private key + public key to create a 64-byte private key
    private_key_64 = private_key_bytes + public_key_bytes

    return private_key_64, public_key_bytes

encoding

consume(target, consumer)

Extracts fields from an object and applies the consumer function to each field Mirrors the JS consume() function

Source code in accumulate\utils\encoding.py
188
189
190
191
192
193
194
195
196
197
def consume(target: dict, consumer: callable):
    """
    Extracts fields from an object and applies the consumer function to each field
    Mirrors the JS `consume()` function
    """
    if not isinstance(target, dict):
        raise TypeError("consume() expects a dictionary as input")

    for field_num, (key, value) in enumerate(target.items(), start=1):
        consumer(field_num, value)

decode_uvarint(buf)

Decodes an unsigned integer from a bytes object using varint encoding

Source code in accumulate\utils\encoding.py
40
41
42
43
44
45
46
47
48
49
def decode_uvarint(buf: bytes) -> tuple[int, int]:
    """Decodes an unsigned integer from a bytes object using varint encoding"""
    x = 0
    shift = 0
    for i, b in enumerate(buf):
        x |= (b & 0x7F) << shift
        if b < 0x80:
            return x, i + 1
        shift += 7
    return 0, 0

encode(target)

Python equivalent of the JS encode() function Encodes an object into a binary format using uvarint encoding.

Source code in accumulate\utils\encoding.py
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
def encode(target: dict) -> bytes:
    """
    Python equivalent of the JS encode() function
    Encodes an object into a binary format using uvarint encoding.
    """
    if not isinstance(target, dict):
        raise TypeError("encode() expects a dictionary as input")

    parts = bytearray()

    # Iterate through fields and encode each one
    for field_num, (key, value) in enumerate(target.items(), start=1):
        field_number_encoded = encode_uvarint(field_num)
        value_encoded = encode_value(value)

        parts.extend(field_number_encoded)  # Field number
        parts.extend(value_encoded)         # Encoded value

    return bytes(parts)

encode_compact_int(value)

Encodes an integer in compact form: a one‑byte length followed by the big‑endian bytes

Source code in accumulate\utils\encoding.py
67
68
69
70
71
72
73
74
def encode_compact_int(value: int) -> bytes:
    """Encodes an integer in compact form: a one‑byte length followed by the big‑endian bytes"""
    if value == 0:
        return b'\x00'
    num_bytes = (value.bit_length() + 7) // 8
    result = bytes([num_bytes]) + value.to_bytes(num_bytes, byteorder='big')
    logger.debug(f"encode_compact_int: {result.hex()}")
    return result

encode_uvarint(x)

Encodes an unsigned integer using varint encoding.

Source code in accumulate\utils\encoding.py
27
28
29
30
31
32
33
34
35
36
37
38
def encode_uvarint(x: int) -> bytes:
    """Encodes an unsigned integer using varint encoding."""
    if x < 0:
        raise ValueError("Cannot encode negative value as unsigned varint")
    buf = []
    while x > 0x7F:
        buf.append((x & 0x7F) | 0x80)
        x >>= 7
    buf.append(x & 0x7F)
    result = bytes(buf)
    logger.debug(f"encode_uvarint: {result.hex()}")
    return result

encode_value(value)

Encodes a single value based on its type Mirrors the encoding rules from the JavaScript library

Source code in accumulate\utils\encoding.py
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
def encode_value(value: Any) -> bytes:
    """
    Encodes a single value based on its type
    Mirrors the encoding rules from the JavaScript library
    """
    if isinstance(value, int):
        return encode_uvarint(value)  # Variable-length encoding for numbers
    elif isinstance(value, str):
        value_bytes = value.encode("utf-8")
        return encode_uvarint(len(value_bytes)) + value_bytes
    elif isinstance(value, bytes):
        return encode_uvarint(len(value)) + value
    elif isinstance(value, dict):
        return encode(value)  # Recursive encoding for nested objects
    elif isinstance(value, list):
        encoded_list = bytearray()
        for item in value:
            encoded_list.extend(encode_value(item))
        return encoded_list
    else:
        raise TypeError(f"Unsupported data type: {type(value)}")

field_marshal_binary(field, val)

Encodes a field by writing its field number as one byte, then appending the provided value (which itself may already be length‑prefixed)

Source code in accumulate\utils\encoding.py
77
78
79
80
81
82
83
84
85
86
def field_marshal_binary(field: int, val: bytes) -> bytes:
    """
    Encodes a field by writing its field number as one byte,
    then appending the provided value (which itself may already be length‑prefixed)
    """
    if field < 1 or field > 32:
        raise ValueOutOfRangeException(field)
    result = struct.pack("B", field) + val
    logger.debug(f"field_marshal_binary (field {field}): {result.hex()}")
    return result

read_uvarint(reader)

Reads an unsigned varint from a byte stream

Source code in accumulate\utils\encoding.py
51
52
53
54
55
56
57
58
59
60
61
62
63
64
def read_uvarint(reader: io.BytesIO) -> int:
    """Reads an unsigned varint from a byte stream"""
    x = 0
    shift = 0
    while True:
        b = reader.read(1)
        if len(b) == 0:
            raise EOFError("Unexpected end of stream while reading uvarint")
        b_val = b[0]
        x |= (b_val & 0x7F) << shift
        if b_val < 0x80:
            break
        shift += 7
    return x

hash_functions

LiteAuthorityForHash(key_hash)

Generate a Lite Token Account suffix from a key hash

:param key_hash: The SHA-256 first 20 bytes :return: A valid Lite Token Account suffix

Source code in accumulate\utils\hash_functions.py
108
109
110
111
112
113
114
115
116
117
118
119
def LiteAuthorityForHash(key_hash: bytes) -> str:
    """
    Generate a Lite Token Account suffix from a key hash

    :param key_hash: The SHA-256 first 20 bytes
    :return: A valid Lite Token Account suffix
    """
    first20 = key_hash[:20]  # Extract the first 20 bytes
    first20_hex = first20.hex()  # Convert first 20 bytes to a hex string
    checksum_full = hashlib.sha256(first20_hex.encode()).digest()  # Hash the hex string
    checksum = checksum_full[-4:]  # Extract the last 4 bytes as checksum
    return f"{(first20 + checksum).hex()}"  # Append checksum and return

LiteAuthorityForKey(pub_key, signature_type)

Generate a Lite Token Account (LTA) URL from a public key

:param pub_key: The public key in bytes :param signature_type: The signature type (e.g., "ED25519") :return: A valid Lite Token Account (LTA) URL

Source code in accumulate\utils\hash_functions.py
 96
 97
 98
 99
100
101
102
103
104
105
106
def LiteAuthorityForKey(pub_key: bytes, signature_type: str) -> str:
    """
    Generate a Lite Token Account (LTA) URL from a public key

    :param pub_key: The public key in bytes
    :param signature_type: The signature type (e.g., "ED25519")
    :return: A valid Lite Token Account (LTA) URL
    """
    key_hash = hashlib.sha256(pub_key).digest()  # SHA-256 Hash
    lite_account = f"acc://{LiteAuthorityForHash(key_hash)}"
    return lite_account

btc_address(public_key)

Generate a BTC address from a public key

Source code in accumulate\utils\hash_functions.py
61
62
63
64
65
66
67
68
69
70
def btc_address(public_key: bytes) -> str:
    """
    Generate a BTC address from a public key
    """
    if len(public_key) not in {33, 65}:
        raise ValueError("Invalid public key length for BTC")
    pub_hash = public_key_hash(public_key, SignatureType.BTC)
    versioned_payload = b"\x00" + pub_hash
    checksum = hash_data(hash_data(versioned_payload))[:4]
    return base58.b58encode(versioned_payload + checksum).decode()

compute_hash(obj)

Compute a SHA-256 hash for an object implementing a marshal_binary() method. If raw bytes are provided, hash them directly.

Source code in accumulate\utils\hash_functions.py
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
def compute_hash(obj: Any) -> bytes:
    """
    Compute a SHA-256 hash for an object implementing a `marshal_binary()` method.
    If raw bytes are provided, hash them directly.
    """
    if isinstance(obj, bytes):  #  Allow raw bytes
        return hashlib.sha256(obj).digest()

    if not hasattr(obj, "marshal_binary") or not callable(obj.marshal_binary):
        raise EncodingError("Object must implement a `marshal_binary` method")

    try:
        binary_data = obj.marshal_binary()
    except Exception as e:
        raise EncodingError("Failed to marshal object for hashing") from e

    return hashlib.sha256(binary_data).digest()

eth_address(public_key)

Generate an ETH address from a public key

Source code in accumulate\utils\hash_functions.py
73
74
75
76
77
78
79
80
81
82
def eth_address(public_key: bytes) -> str:
    """
    Generate an ETH address from a public key
    """
    if len(public_key) == 65:  # Uncompressed key
        public_key = public_key[1:]  # Remove the prefix
    if len(public_key) != 64:
        raise ValueError("Invalid public key length for ETH")
    pub_hash = public_key_hash(public_key, SignatureType.ETH)
    return "0x" + pub_hash.hex()

hash_data(data)

Computes the SHA-256 hash of the given data

Source code in accumulate\utils\hash_functions.py
85
86
87
88
89
90
91
def hash_data(data: bytes) -> bytes:
    """
    Computes the SHA-256 hash of the given data
    """
    if not isinstance(data, bytes):
        raise ValueError("Input must be of type bytes")
    return hashlib.sha256(data).digest()

public_key_hash(public_key, signature_type)

Calculate the public key hash based on the signature type

Source code in accumulate\utils\hash_functions.py
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
def public_key_hash(public_key: bytes, signature_type: SignatureType) -> bytes:
    """
    Calculate the public key hash based on the signature type
    """
    if signature_type in [
        SignatureType.ED25519,
        SignatureType.LEGACY_ED25519,
        SignatureType.RSA_SHA256,
        SignatureType.ECDSA_SHA256,
    ]:
        return hashlib.sha256(public_key).digest()
    elif signature_type == SignatureType.RCD1:
        return hashlib.sha256(b"RCD" + public_key).digest()
    elif signature_type in [SignatureType.BTC, SignatureType.BTC_LEGACY]:
        sha256_hash = hashlib.sha256(public_key).digest()
        ripemd160 = hashlib.new("ripemd160")
        ripemd160.update(sha256_hash)
        return ripemd160.digest()
    elif signature_type == SignatureType.ETH:
        return keccak(public_key)[-20:]
    else:
        raise ValueError(f"Unsupported signature type for public key hash: {signature_type}")

Additional Utils

address_parse

is_wif_key(wif)

Check if a string is a valid WIF (Wallet Import Format) key.

:param wif: The WIF key string. :return: True if valid, False otherwise.

Source code in accumulate\utils\address_parse.py
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
def is_wif_key(wif: str) -> bool:
    """
    Check if a string is a valid WIF (Wallet Import Format) key.

    :param wif: The WIF key string.
    :return: True if valid, False otherwise.
    """
    if not wif.startswith(("5", "K", "L")) or len(wif) not in (51, 52):
        return False
    try:
        decoded = base58.b58decode(wif)
        if len(decoded) not in (37, 38):  # 32 bytes private key + 4 bytes checksum (+1 byte for compression flag)
            return False #
        checksum = decoded[-4:]
        expected_checksum = hashlib.sha256(hashlib.sha256(decoded[:-4]).digest()).digest()[:4]
        return checksum == expected_checksum
    except Exception: #
        return False #

parse_ac_address(address)

Parse an Accumulate public key (AC) address.

Source code in accumulate\utils\address_parse.py
62
63
64
65
66
67
68
69
70
71
72
73
74
def parse_ac_address(address: str) -> PublicKeyHash:
    """Parse an Accumulate public key (AC) address."""
    prefix = address[:3]  # Extract prefix (e.g., "AC1")
    version = address[2:3]  # Extract version (e.g., "1" from "AC1")
    prefix_map = {"1": "AC1", "2": "AC2", "3": "AC3"}
    type_map = {"1": "ED25519", "2": "EcdsaSha256", "3": "RsaSha256"}

    if version not in prefix_map or prefix != prefix_map[version]:
        raise ValidationError(f"Invalid AC address type: {address}")

    # Validate the rest of the address
    hash_bytes = parse_with_prefix(address, 32, prefix_map[version])
    return PublicKeyHash(type_map[version], hash_bytes)

parse_as_address(address)

Parse an Accumulate private key (AS) address.

Source code in accumulate\utils\address_parse.py
77
78
79
80
81
82
83
84
85
86
87
88
89
def parse_as_address(address: str) -> PrivateKey:
    """Parse an Accumulate private key (AS) address."""
    prefix = address[:3]  # Extract prefix (e.g., "AS1")
    version = address[2:3]  # Extract version (e.g., "1" from "AS1")
    prefix_map = {"1": "AS1", "2": "AS2", "3": "AS3"}
    type_map = {"1": "ED25519", "2": "EcdsaSha256", "3": "RsaSha256"}

    if version not in prefix_map or prefix != prefix_map[version]:
        raise ValidationError(f"Invalid AS address type: {address}")

    # Validate the rest of the address
    private_key_bytes = parse_with_prefix(address, 32, prefix_map[version])
    return PrivateKey(private_key_bytes, type_map[version])

parse_btc_address(address)

Parse a Bitcoin public key (BT) address.

Source code in accumulate\utils\address_parse.py
114
115
116
117
def parse_btc_address(address: str) -> PublicKeyHash:
    """Parse a Bitcoin public key (BT) address."""
    hash_bytes = parse_with_checksum(address[2:], 20, b"\x00")
    return PublicKeyHash("BTC", hash_bytes)

parse_eth_address(address)

Parse an Ethereum address.

Source code in accumulate\utils\address_parse.py
120
121
122
123
124
125
126
127
128
129
130
131
132
133
def parse_eth_address(address: str) -> PublicKeyHash:
    """Parse an Ethereum address."""
    if not address.startswith("0x"):
        raise ValidationError("Unknown address format")  # Standardized message

    try:
        hash_bytes = bytes.fromhex(address[2:])
    except ValueError:
        raise ValidationError("Unknown address format")  # Standardized message

    if len(hash_bytes) != 20:
        raise ValidationError("Unknown address format")  # Standardized message #

    return PublicKeyHash("ETH", hash_bytes)

parse_fa_address(address)

Parse a Factom public key (FA) address.

Source code in accumulate\utils\address_parse.py
 93
 94
 95
 96
 97
 98
 99
100
def parse_fa_address(address: str) -> PublicKeyHash:
    """Parse a Factom public key (FA) address."""
    # Strip the readable prefix before decoding
    if not address.startswith("FA"):  # This condition ensures the prefix validation
        raise ValidationError("Invalid FA address prefix")
    encoded_payload = address[2:]  # Remove the "FA" prefix
    hash_bytes = parse_with_checksum(encoded_payload, 32, b"\x5f\xb1")
    return PublicKeyHash("RCD1", hash_bytes)

parse_fs_address(address)

Parse a Factom private key (Fs) address.

Source code in accumulate\utils\address_parse.py
103
104
105
106
107
108
109
110
def parse_fs_address(address: str) -> PrivateKey:
    """Parse a Factom private key (Fs) address."""
    # Strip the readable prefix before decoding
    if not address.startswith("Fs"):
        raise ValidationError("Invalid Fs address prefix") #
    encoded_payload = address[2:]  # Remove the "Fs" prefix
    private_key_bytes = parse_with_checksum(encoded_payload, 32, b"\x64\x78")
    return PrivateKey(private_key_bytes, "RCD1")

parse_mh_address(address)

Parse an unknown hash (as a multihash).

Source code in accumulate\utils\address_parse.py
138
139
140
141
142
143
144
145
146
def parse_mh_address(address: str) -> PublicKeyHash:
    """Parse an unknown hash (as a multihash)."""
    if not address.startswith("MH"):
        raise ValidationError("Invalid MH address: bad prefix")
    try:
        decoded = base58.b58decode(address[2:])
    except ValueError: #
        raise ValidationError("Invalid MH address: decoding failed") #
    return PublicKeyHash("Multihash", decoded)

parse_wif(wif)

Parse a WIF (Wallet Import Format) encoded key.

Source code in accumulate\utils\address_parse.py
202
203
204
205
206
207
208
209
210
211
212
213
def parse_wif(wif: str) -> PrivateKey:
    """Parse a WIF (Wallet Import Format) encoded key."""
    try:
        decoded = base58.b58decode(wif)
    except ValueError:
        raise ValidationError("Invalid WIF encoding")

    if len(decoded) not in (37, 38):
        raise ValidationError("Invalid WIF length") #
    key = decoded[1:33]
    compressed = len(decoded) == 38
    return PrivateKey(key if not compressed else key[:32], "BTC")

parse_with_checksum(address, length, prefix)

Parse an address with a binary prefix and checksum.

Source code in accumulate\utils\address_parse.py
164
165
166
167
168
169
170
171
172
173
174
def parse_with_checksum(address: str, length: int, prefix: bytes) -> bytes:
    """Parse an address with a binary prefix and checksum."""
    decoded = base58.b58decode(address)
    if len(decoded) != len(prefix) + length + 4:
        raise ValidationError("Invalid length")
    if not decoded.startswith(prefix):
        raise ValidationError("Invalid prefix")
    checksum = decoded[-4:]
    data = decoded[len(prefix):-4]
    verify_checksum(decoded[:-4], checksum)
    return data

parse_with_prefix(address, length, prefix)

Parse an address with a specific prefix and length.

Source code in accumulate\utils\address_parse.py
150
151
152
153
154
155
156
157
158
159
160
def parse_with_prefix(address: str, length: int, prefix: str) -> bytes:
    """Parse an address with a specific prefix and length."""
    if not address.startswith(prefix):
        raise ValidationError(f"Invalid prefix for {prefix}") #
    decoded = base58.b58decode(address[len(prefix):])
    if len(decoded) != length + 4:
        raise ValidationError(f"Invalid length for {prefix} address") #
    checksum = decoded[-4:]
    data = decoded[:-4]
    verify_checksum(data, checksum)
    return data

verify_checksum(data, checksum)

Verify a double SHA-256 checksum.

Source code in accumulate\utils\address_parse.py
222
223
224
225
226
def verify_checksum(data: bytes, checksum: bytes):
    """Verify a double SHA-256 checksum."""
    calculated = hashlib.sha256(hashlib.sha256(data).digest()).digest()[:4]
    if calculated != checksum:
        raise ValidationError("Invalid checksum")

config

Config

Configuration utility for managing environment-specific settings.

Source code in accumulate\utils\config.py
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
class Config:
    """
    Configuration utility for managing environment-specific settings.
    """
    @staticmethod
    def is_testnet() -> bool:
        """
        Dynamically get the testnet status based on the environment variable.
        """
        return os.getenv("ACCUMULATE_IS_TESTNET", "false").lower() == "true"

    @staticmethod
    def initial_acme_oracle() -> float:
        """
        Dynamically get the initial ACME oracle value based on the network type.
        """
        return 5000 if Config.is_testnet() else 0.50

    @staticmethod
    def get_network_type() -> str:
        """
        Get the current network type.
        :return: 'testnet' or 'mainnet'
        """
        return "testnet" if Config.is_testnet() else "mainnet"

    @staticmethod
    def get_initial_oracle_value() -> float:
        """
        Get the initial ACME oracle value for the configured network.
        :return: The oracle value as a float.
        """
        return Config.initial_acme_oracle()

get_initial_oracle_value() staticmethod

Get the initial ACME oracle value for the configured network. :return: The oracle value as a float.

Source code in accumulate\utils\config.py
31
32
33
34
35
36
37
@staticmethod
def get_initial_oracle_value() -> float:
    """
    Get the initial ACME oracle value for the configured network.
    :return: The oracle value as a float.
    """
    return Config.initial_acme_oracle()

get_network_type() staticmethod

Get the current network type. :return: 'testnet' or 'mainnet'

Source code in accumulate\utils\config.py
23
24
25
26
27
28
29
@staticmethod
def get_network_type() -> str:
    """
    Get the current network type.
    :return: 'testnet' or 'mainnet'
    """
    return "testnet" if Config.is_testnet() else "mainnet"

initial_acme_oracle() staticmethod

Dynamically get the initial ACME oracle value based on the network type.

Source code in accumulate\utils\config.py
16
17
18
19
20
21
@staticmethod
def initial_acme_oracle() -> float:
    """
    Dynamically get the initial ACME oracle value based on the network type.
    """
    return 5000 if Config.is_testnet() else 0.50

is_testnet() staticmethod

Dynamically get the testnet status based on the environment variable.

Source code in accumulate\utils\config.py
 9
10
11
12
13
14
@staticmethod
def is_testnet() -> bool:
    """
    Dynamically get the testnet status based on the environment variable.
    """
    return os.getenv("ACCUMULATE_IS_TESTNET", "false").lower() == "true"

eip712

eth_chain_id(network_name)

Returns the Ethereum chain ID for an Accumulate network name

:param network_name: The name of the network (e.g., "mainnet") :return: The Ethereum chain ID

Source code in accumulate\utils\eip712.py
135
136
137
138
139
140
141
142
143
144
145
146
147
148
def eth_chain_id(network_name: str) -> int:
    """
    Returns the Ethereum chain ID for an Accumulate network name

    :param network_name: The name of the network (e.g., "mainnet")
    :return: The Ethereum chain ID
    """
    if network_name.lower() == "mainnet":
        return 281  # 0x119

    network_name = network_name.lower()
    network_hash = hashlib.sha256(network_name.encode()).digest()
    network_id = int.from_bytes(network_hash[-2:], "big")
    return 281 | (network_id << 16)

hash_eip712(transaction, signature)

Hashes an EIP-712 transaction and signature

:param transaction: The transaction object :param signature: The signature object :return: SHA-256 hash of the EIP-712 message

Source code in accumulate\utils\eip712.py
172
173
174
175
176
177
178
179
180
181
182
def hash_eip712(transaction: Dict[str, Any], signature: Dict[str, Any]) -> bytes:
    """
    Hashes an EIP-712 transaction and signature

    :param transaction: The transaction object
    :param signature: The signature object
    :return: SHA-256 hash of the EIP-712 message
    """
    eip712_message = marshal_eip712(transaction, signature)
    eip712_json = json.dumps(eip712_message, separators=(',', ':')).encode()
    return hashlib.sha256(eip712_json).digest()

marshal_eip712(transaction, signature)

Creates the EIP-712 JSON message for a transaction and signature

:param transaction: The transaction object :param signature: The signature object :return: Serialized EIP-712 JSON message

Source code in accumulate\utils\eip712.py
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
def marshal_eip712(transaction: Dict[str, Any], signature: Dict[str, Any]) -> Dict[str, Any]:
    """
    Creates the EIP-712 JSON message for a transaction and signature

    :param transaction: The transaction object
    :param signature: The signature object
    :return: Serialized EIP-712 JSON message
    """
    serialized_tx = {
        "header": transaction.get("header", {}),
        "signature": signature.get("metadata", {}),
    }

    body = transaction.get("body", {})
    body_type = transaction.get("type")
    if body and body_type:
        serialized_tx[body_type] = body

    return serialized_tx

fields

BoolField

Bases: Field

Field for boolean values

Source code in accumulate\utils\fields.py
48
49
50
51
52
53
54
class BoolField(Field):
    """Field for boolean values"""

    def to_json(self, value: bool) -> Optional[bool]:
        if self.omit_empty and value is False:
            return None
        return value

DateTimeField

Bases: Field

Field for datetime values

Source code in accumulate\utils\fields.py
57
58
59
60
61
62
63
64
65
66
67
68
69
70
class DateTimeField(Field):
    """Field for datetime values"""

    def to_json(self, value: datetime) -> Optional[str]:
        if self.omit_empty and value is None:
            return None
        return value.isoformat() if isinstance(value, datetime) else None

    def from_json(self, data: dict, instance: Any) -> None:
        if self.name in data:
            try:
                setattr(instance, self.name, datetime.fromisoformat(data[self.name]))
            except ValueError:
                raise ValueError(f"Invalid datetime format for field {self.name}: {data[self.name]}")

DurationField

Bases: Field

Field for timedelta (duration) values.

Source code in accumulate\utils\fields.py
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
class DurationField(Field):
    """Field for timedelta (duration) values."""

    def to_json(self, value: timedelta) -> Optional[dict]:
        """Convert a timedelta to a JSON-compatible dictionary"""
        if self.omit_empty and value == timedelta(0):
            return None
        seconds = value.seconds + value.days * 86400  # Total seconds including days
        nanoseconds = value.microseconds * 1000  # Convert microseconds to nanoseconds
        return {"seconds": seconds, "nanoseconds": nanoseconds}

    def from_json(self, data: dict, instance: Any) -> None:
        """Convert a JSON-compatible dictionary back to a timedelta"""
        if self.name in data:
            fields = data[self.name]
            seconds = fields.get("seconds", 0)
            nanoseconds = fields.get("nanoseconds", 0)
            setattr(
                instance,
                self.name,
                timedelta(seconds=seconds, microseconds=nanoseconds / 1000),
            )

    def is_empty(self, value: timedelta) -> bool:
        """Check if the timedelta is empty (default value)"""
        return value == timedelta(0)

from_json(data, instance)

Convert a JSON-compatible dictionary back to a timedelta

Source code in accumulate\utils\fields.py
151
152
153
154
155
156
157
158
159
160
161
def from_json(self, data: dict, instance: Any) -> None:
    """Convert a JSON-compatible dictionary back to a timedelta"""
    if self.name in data:
        fields = data[self.name]
        seconds = fields.get("seconds", 0)
        nanoseconds = fields.get("nanoseconds", 0)
        setattr(
            instance,
            self.name,
            timedelta(seconds=seconds, microseconds=nanoseconds / 1000),
        )

is_empty(value)

Check if the timedelta is empty (default value)

Source code in accumulate\utils\fields.py
163
164
165
def is_empty(self, value: timedelta) -> bool:
    """Check if the timedelta is empty (default value)"""
    return value == timedelta(0)

to_json(value)

Convert a timedelta to a JSON-compatible dictionary

Source code in accumulate\utils\fields.py
143
144
145
146
147
148
149
def to_json(self, value: timedelta) -> Optional[dict]:
    """Convert a timedelta to a JSON-compatible dictionary"""
    if self.omit_empty and value == timedelta(0):
        return None
    seconds = value.seconds + value.days * 86400  # Total seconds including days
    nanoseconds = value.microseconds * 1000  # Convert microseconds to nanoseconds
    return {"seconds": seconds, "nanoseconds": nanoseconds}

Field

Base class for field access and validation

Source code in accumulate\utils\fields.py
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
class Field:
    """Base class for field access and validation"""

    def __init__(self, name: str, required: bool = False, omit_empty: bool = False):
        self.name = name
        self.required = required
        self.omit_empty = omit_empty

    def is_empty(self, value: Any) -> bool:
        """Check if a field value is empty"""
        return value is None or (isinstance(value, (str, list, dict)) and len(value) == 0)

    def to_json(self, value: Any) -> Optional[Any]:
        """Serialize the field to JSON"""
        if self.omit_empty and self.is_empty(value):
            return None
        return value

    def from_json(self, data: dict, instance: Any) -> None:
        """Deserialize the field from JSON"""
        if self.name in data:
            setattr(instance, self.name, data[self.name])

from_json(data, instance)

Deserialize the field from JSON

Source code in accumulate\utils\fields.py
24
25
26
27
def from_json(self, data: dict, instance: Any) -> None:
    """Deserialize the field from JSON"""
    if self.name in data:
        setattr(instance, self.name, data[self.name])

is_empty(value)

Check if a field value is empty

Source code in accumulate\utils\fields.py
14
15
16
def is_empty(self, value: Any) -> bool:
    """Check if a field value is empty"""
    return value is None or (isinstance(value, (str, list, dict)) and len(value) == 0)

to_json(value)

Serialize the field to JSON

Source code in accumulate\utils\fields.py
18
19
20
21
22
def to_json(self, value: Any) -> Optional[Any]:
    """Serialize the field to JSON"""
    if self.omit_empty and self.is_empty(value):
        return None
    return value

FloatField

Bases: Field

Field for float values

Source code in accumulate\utils\fields.py
73
74
75
76
77
78
79
class FloatField(Field):
    """Field for float values"""

    def to_json(self, value: float) -> Optional[float]:
        if self.omit_empty and value == 0.0:
            return None
        return value

IntField

Bases: Field

Field for integer values

Source code in accumulate\utils\fields.py
30
31
32
33
34
35
36
class IntField(Field):
    """Field for integer values"""

    def to_json(self, value: int) -> Optional[int]:
        if self.omit_empty and value == 0:
            return None
        return value

ReadOnlyAccessor

Read-only accessor for managing field serialization and equality checks

Source code in accumulate\utils\fields.py
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
class ReadOnlyAccessor:
    """Read-only accessor for managing field serialization and equality checks"""

    def __init__(self, accessor: Callable[[Any], Any]):
        """
        Initialize with a callable that provides access to the field value
        :param accessor: A callable that takes a parent object and returns the field value
        """
        self._accessor = accessor

    def is_empty(self, obj: Any) -> bool:
        """Check if the field is empty"""
        value = self._accessor(obj)
        return value is None or value == "" or value == 0

    def equal(self, obj1: Any, obj2: Any) -> bool:
        """Check if two objects have equal field values"""
        return self._accessor(obj1) == self._accessor(obj2)

    def to_json(self, obj: Any) -> Any:
        """Convert the field value to a JSON-compatible format"""
        value = self._accessor(obj)
        if isinstance(value, (int, float, str, dict, list)):
            return value
        if hasattr(value, "to_dict"):
            return value.to_dict()
        if hasattr(value, "__dict__"):
            return value.__dict__
        raise ValueError(f"Cannot serialize value of type {type(value).__name__}")

    def write_to(self, obj: Any) -> bytes:
        """
        Serialize the field value into binary
        For demonstration, this simply converts the value to bytes if possible
        """
        value = self._accessor(obj)
        if isinstance(value, (int, float)):
            return str(value).encode()
        if isinstance(value, str):
            return value.encode()
        if isinstance(value, bytes):
            return value
        raise ValueError(f"Cannot write value of type {type(value).__name__} to binary")

    # Prevent modifications
    def copy_to(self, dst: Any, src: Any):
        """Read-only accessor does not support copying"""
        raise NotImplementedError("ReadOnlyAccessor does not support copying values")

    def read_from(self, data: bytes, obj: Any):
        """Read-only accessor does not support deserialization"""
        raise NotImplementedError("ReadOnlyAccessor does not support deserialization")

    def from_json(self, json_data: Any, obj: Any):
        """Read-only accessor does not support deserialization from JSON"""
        raise NotImplementedError("ReadOnlyAccessor does not support deserialization from JSON")

__init__(accessor)

Initialize with a callable that provides access to the field value :param accessor: A callable that takes a parent object and returns the field value

Source code in accumulate\utils\fields.py
85
86
87
88
89
90
def __init__(self, accessor: Callable[[Any], Any]):
    """
    Initialize with a callable that provides access to the field value
    :param accessor: A callable that takes a parent object and returns the field value
    """
    self._accessor = accessor

copy_to(dst, src)

Read-only accessor does not support copying

Source code in accumulate\utils\fields.py
127
128
129
def copy_to(self, dst: Any, src: Any):
    """Read-only accessor does not support copying"""
    raise NotImplementedError("ReadOnlyAccessor does not support copying values")

equal(obj1, obj2)

Check if two objects have equal field values

Source code in accumulate\utils\fields.py
97
98
99
def equal(self, obj1: Any, obj2: Any) -> bool:
    """Check if two objects have equal field values"""
    return self._accessor(obj1) == self._accessor(obj2)

from_json(json_data, obj)

Read-only accessor does not support deserialization from JSON

Source code in accumulate\utils\fields.py
135
136
137
def from_json(self, json_data: Any, obj: Any):
    """Read-only accessor does not support deserialization from JSON"""
    raise NotImplementedError("ReadOnlyAccessor does not support deserialization from JSON")

is_empty(obj)

Check if the field is empty

Source code in accumulate\utils\fields.py
92
93
94
95
def is_empty(self, obj: Any) -> bool:
    """Check if the field is empty"""
    value = self._accessor(obj)
    return value is None or value == "" or value == 0

read_from(data, obj)

Read-only accessor does not support deserialization

Source code in accumulate\utils\fields.py
131
132
133
def read_from(self, data: bytes, obj: Any):
    """Read-only accessor does not support deserialization"""
    raise NotImplementedError("ReadOnlyAccessor does not support deserialization")

to_json(obj)

Convert the field value to a JSON-compatible format

Source code in accumulate\utils\fields.py
101
102
103
104
105
106
107
108
109
110
def to_json(self, obj: Any) -> Any:
    """Convert the field value to a JSON-compatible format"""
    value = self._accessor(obj)
    if isinstance(value, (int, float, str, dict, list)):
        return value
    if hasattr(value, "to_dict"):
        return value.to_dict()
    if hasattr(value, "__dict__"):
        return value.__dict__
    raise ValueError(f"Cannot serialize value of type {type(value).__name__}")

write_to(obj)

Serialize the field value into binary For demonstration, this simply converts the value to bytes if possible

Source code in accumulate\utils\fields.py
112
113
114
115
116
117
118
119
120
121
122
123
124
def write_to(self, obj: Any) -> bytes:
    """
    Serialize the field value into binary
    For demonstration, this simply converts the value to bytes if possible
    """
    value = self._accessor(obj)
    if isinstance(value, (int, float)):
        return str(value).encode()
    if isinstance(value, str):
        return value.encode()
    if isinstance(value, bytes):
        return value
    raise ValueError(f"Cannot write value of type {type(value).__name__} to binary")

StringField

Bases: Field

Field for string values

Source code in accumulate\utils\fields.py
39
40
41
42
43
44
45
class StringField(Field):
    """Field for string values"""

    def to_json(self, value: str) -> Optional[str]:
        if self.omit_empty and not value:
            return None
        return value

TimeAccessor

Bases: ReadOnlyAccessor

Accessor for managing datetime fields

Source code in accumulate\utils\fields.py
168
169
170
171
172
173
174
175
176
177
class TimeAccessor(ReadOnlyAccessor):
    """Accessor for managing datetime fields"""

    def __init__(self, accessor: Callable[[Any], datetime]):
        super().__init__(accessor)

    def to_json(self, obj: Any) -> Optional[str]:
        """Convert a datetime field to JSON-compatible ISO format"""
        value = self._accessor(obj)
        return None if value is None else value.isoformat()

to_json(obj)

Convert a datetime field to JSON-compatible ISO format

Source code in accumulate\utils\fields.py
174
175
176
177
def to_json(self, obj: Any) -> Optional[str]:
    """Convert a datetime field to JSON-compatible ISO format"""
    value = self._accessor(obj)
    return None if value is None else value.isoformat()

formatting

_calculate_checksum(data)

Calculates a double SHA-256 checksum

Source code in accumulate\utils\formatting.py
120
121
122
123
def _calculate_checksum(data: bytes) -> bytes:
    """Calculates a double SHA-256 checksum"""
    checksum = hashlib.sha256(data).digest()
    return hashlib.sha256(checksum).digest()[:4]

_format_with_checksum(hash_bytes, prefix)

Formats the address with a checksum

Source code in accumulate\utils\formatting.py
110
111
112
113
114
115
116
117
118
def _format_with_checksum(hash_bytes: bytes, prefix: bytes) -> str:
    """Formats the address with a checksum"""
    if not hash_bytes:
        raise ValueError("Hash bytes cannot be empty") #

    address = prefix + hash_bytes
    checksum = _calculate_checksum(address)
    address += checksum
    return base58.b58encode(address).decode()

_format_with_prefix(hash_bytes, prefix)

Formats the address with a prefix and checksum

Source code in accumulate\utils\formatting.py
100
101
102
103
104
105
106
107
108
def _format_with_prefix(hash_bytes: bytes, prefix: str) -> str:
    """Formats the address with a prefix and checksum"""
    if not hash_bytes:
        raise ValueError("Hash bytes cannot be empty") #

    address = prefix.encode() + hash_bytes
    checksum = _calculate_checksum(address)
    address += checksum
    return prefix + base58.b58encode(address[len(prefix):]).decode()

_hash_with_algorithm(data, algorithm)

Hashes data using the specified algorithm

:param data: Data to hash :param algorithm: Hashing algorithm (e.g., 'sha256', 'sha512'). :return: Hashed bytes

Source code in accumulate\utils\formatting.py
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
def _hash_with_algorithm(data: bytes, algorithm: str) -> bytes:
    """
    Hashes data using the specified algorithm

    :param data: Data to hash
    :param algorithm: Hashing algorithm (e.g., 'sha256', 'sha512').
    :return: Hashed bytes
    """
    algorithms = {
        "sha256": hashes.SHA256,
        "sha512": hashes.SHA512,
    }
    if algorithm not in algorithms:
        raise ValueError(f"Unsupported hash algorithm: {algorithm}")

    digest = hashes.Hash(algorithms[algorithm](), backend=default_backend())
    digest.update(data)
    return digest.finalize()

format_ac1(hash_bytes)

Formats an Accumulate AC1 (ed25519) public key hash

Source code in accumulate\utils\formatting.py
11
12
13
def format_ac1(hash_bytes: bytes) -> str:
    """Formats an Accumulate AC1 (ed25519) public key hash"""
    return _format_with_prefix(hash_bytes, "AC1")

format_ac2(hash_bytes)

Formats an Accumulate AC2 (ecdsa) public key hash

Source code in accumulate\utils\formatting.py
19
20
21
def format_ac2(hash_bytes: bytes) -> str:
    """Formats an Accumulate AC2 (ecdsa) public key hash"""
    return _format_with_prefix(hash_bytes, "AC2")

format_ac3(hash_bytes)

Formats an Accumulate AC3 (rsa) public key hash

Source code in accumulate\utils\formatting.py
27
28
29
def format_ac3(hash_bytes: bytes) -> str:
    """Formats an Accumulate AC3 (rsa) public key hash"""
    return _format_with_prefix(hash_bytes, "AC3")

format_as1(seed)

Formats an Accumulate AS1 (ed25519) private key

Source code in accumulate\utils\formatting.py
15
16
17
def format_as1(seed: bytes) -> str:
    """Formats an Accumulate AS1 (ed25519) private key"""
    return _format_with_prefix(seed, "AS1")

format_as2(seed)

Formats an Accumulate AS2 (ecdsa) private key

Source code in accumulate\utils\formatting.py
23
24
25
def format_as2(seed: bytes) -> str:
    """Formats an Accumulate AS2 (ecdsa) private key"""
    return _format_with_prefix(seed, "AS2")

format_as3(seed)

Formats an Accumulate AS3 (rsa) private key

Source code in accumulate\utils\formatting.py
31
32
33
def format_as3(seed: bytes) -> str:
    """Formats an Accumulate AS3 (rsa) private key"""
    return _format_with_prefix(seed, "AS3")

format_btc(hash_bytes)

Formats a Bitcoin P2PKH address prefixed with 'BT'

Source code in accumulate\utils\formatting.py
43
44
45
def format_btc(hash_bytes: bytes) -> str:
    """Formats a Bitcoin P2PKH address prefixed with 'BT'"""
    return "BT" + _format_with_checksum(hash_bytes, b'\x00')

format_eth(hash_bytes)

Formats an Ethereum address

Source code in accumulate\utils\formatting.py
47
48
49
50
51
52
53
54
55
56
def format_eth(hash_bytes: bytes) -> str:
    """Formats an Ethereum address"""
    # Ensure the hash is exactly 20 bytes long by truncating or padding with zeros
    if len(hash_bytes) > 20:
        hash_bytes = hash_bytes[-20:]  # Take the last 20 bytes #
    elif len(hash_bytes) < 20:
        hash_bytes = hash_bytes.rjust(20, b'\x00')  # Pad with leading zeros

    # Convert to hex and prepend the '0x' prefix
    return "0x" + hash_bytes.hex()

format_fa(hash_bytes)

Formats a Factom FA public key hash

Source code in accumulate\utils\formatting.py
35
36
37
def format_fa(hash_bytes: bytes) -> str:
    """Formats a Factom FA public key hash"""
    return _format_with_checksum(hash_bytes, b'\x5f\xb1')

format_fs(seed)

Formats a Factom Fs private key

Source code in accumulate\utils\formatting.py
39
40
41
def format_fs(seed: bytes) -> str:
    """Formats a Factom Fs private key"""
    return _format_with_checksum(seed, b'\x64\x78')

format_mh(hash_bytes, code='sha256')

Formats a hash using a specified hashing algorithm and appends a checksum

:param hash_bytes: Input data to be hashed :param code: Hashing algorithm (e.g., 'sha256', 'sha512') :return: Multihash-formatted string

Source code in accumulate\utils\formatting.py
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
def format_mh(hash_bytes: bytes, code: Optional[str] = "sha256") -> str:
    """
    Formats a hash using a specified hashing algorithm and appends a checksum

    :param hash_bytes: Input data to be hashed
    :param code: Hashing algorithm (e.g., 'sha256', 'sha512')
    :return: Multihash-formatted string
    """
    if not hash_bytes:
        raise ValueError("Hash bytes cannot be empty")

    # Hash the input using the specified algorithm
    hashed_data = _hash_with_algorithm(hash_bytes, code)

    # Add checksum
    checksum = _calculate_checksum(b"MH" + hashed_data)
    hashed_data += checksum

    # Encode with base58 and add 'MH' prefix
    return "MH" + base58.b58encode(hashed_data).decode()

import_helpers

get_signer()

Dynamically import Signer to prevent circular imports.

Source code in accumulate\utils\import_helpers.py
 9
10
11
12
def get_signer():
    """Dynamically import `Signer` to prevent circular imports."""
    from accumulate.signing.signer import Signer
    return Signer

is_lite_account_lazy(url)

Lazy-load is_lite_account() to prevent circular imports.

Source code in accumulate\utils\import_helpers.py
14
15
16
17
def is_lite_account_lazy(url: URL) -> bool:
    """Lazy-load `is_lite_account()` to prevent circular imports."""
    from accumulate.utils.validation import is_lite_account
    return is_lite_account(url)

query_signer_version(account_url, client=None) async

Fetch the signer version from the network API using AccumulateClient.

Source code in accumulate\utils\import_helpers.py
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
async def query_signer_version(account_url: URL, client: Optional["AccumulateClient"] = None) -> Optional[int]:
    """Fetch the signer version from the network API using AccumulateClient."""

    if client is None:
        from accumulate.api.client import AccumulateClient  
        from accumulate.config import get_accumulate_rpc_url  
        client = AccumulateClient(get_accumulate_rpc_url())

    try:
        logger.info(f" Querying signer version for {account_url}...")

        query_type = "liteIdentity" if is_lite_account_lazy(account_url, client) else "default"

        params = {"scope": str(account_url), "query": {"queryType": query_type}}
        response = await client.json_rpc_request("query", params)

        signer_version = response.get("result", {}).get("account", {}).get("signerVersion", 1)

        logger.info(f" Signer version for {account_url}: {signer_version}")
        return signer_version

    except Exception as e:
        logger.error(f" Failed to fetch signer version for {account_url}: {e}")
        return None  # Return None instead of 1 to indicate an error

    finally:
        await client.close()

protocols

BinaryValue

Bases: Protocol

Protocol for objects supporting binary serialization and deserialization.

Source code in accumulate\utils\protocols.py
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
@runtime_checkable
class BinaryValue(Protocol):
    """Protocol for objects supporting binary serialization and deserialization."""
    def marshal_binary(self) -> bytes:
        """Serialize to binary format."""
        raise NotImplementedError("marshal_binary must be implemented")

    def unmarshal_binary(self, data: bytes) -> None:
        """Deserialize from binary format."""
        raise NotImplementedError("unmarshal_binary must be implemented")

    def copy_as_interface(self) -> Any:
        """Create a copy of the instance."""
        raise NotImplementedError("copy_as_interface must be implemented")

    def unmarshal_binary_from(self, reader: BinaryIO) -> None:
        """Unmarshal binary data from a stream."""
        raise NotImplementedError("unmarshal_binary_from must be implemented")

copy_as_interface()

Create a copy of the instance.

Source code in accumulate\utils\protocols.py
16
17
18
def copy_as_interface(self) -> Any:
    """Create a copy of the instance."""
    raise NotImplementedError("copy_as_interface must be implemented")

marshal_binary()

Serialize to binary format.

Source code in accumulate\utils\protocols.py
 8
 9
10
def marshal_binary(self) -> bytes:
    """Serialize to binary format."""
    raise NotImplementedError("marshal_binary must be implemented")

unmarshal_binary(data)

Deserialize from binary format.

Source code in accumulate\utils\protocols.py
12
13
14
def unmarshal_binary(self, data: bytes) -> None:
    """Deserialize from binary format."""
    raise NotImplementedError("unmarshal_binary must be implemented")

unmarshal_binary_from(reader)

Unmarshal binary data from a stream.

Source code in accumulate\utils\protocols.py
20
21
22
def unmarshal_binary_from(self, reader: BinaryIO) -> None:
    """Unmarshal binary data from a stream."""
    raise NotImplementedError("unmarshal_binary_from must be implemented")

UnionValue

Bases: BinaryValue, Protocol

Protocol for objects supporting field unmarshaling.

Source code in accumulate\utils\protocols.py
24
25
26
27
28
29
@runtime_checkable
class UnionValue(BinaryValue, Protocol):
    """Protocol for objects supporting field unmarshaling."""
    def unmarshal_fields_from(self, reader: BinaryIO) -> None:
        """Unmarshal fields from a binary stream."""
        raise NotImplementedError("unmarshal_fields_from must be implemented")

unmarshal_fields_from(reader)

Unmarshal fields from a binary stream.

Source code in accumulate\utils\protocols.py
27
28
29
def unmarshal_fields_from(self, reader: BinaryIO) -> None:
    """Unmarshal fields from a binary stream."""
    raise NotImplementedError("unmarshal_fields_from must be implemented")

rational

Rational

Source code in accumulate\utils\rational.py
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
class Rational:
    def __init__(self, numerator: Union[int, float], denominator: Union[int, float]):
        if denominator == 0:
            raise ValueError("Denominator cannot be zero")
        self.numerator = numerator
        self.denominator = denominator

    def set(self, numerator: Union[int, float], denominator: Union[int, float]):
        """Set the numerator and denominator of the rational value"""
        if denominator == 0:
            raise ValueError("Denominator cannot be zero")
        self.numerator = numerator
        self.denominator = denominator

    def threshold(self, key_count: int) -> int:
        """
        Calculate the threshold based on the ratio and key count
        Equivalent to keyCount * numerator / denominator, rounded up
        """
        if key_count < 0:
            raise ValueError("Key count cannot be negative")
        value = key_count * self.numerator / self.denominator
        return math.ceil(value)

    def __repr__(self):
        return f"Rational({self.numerator}, {self.denominator})"

set(numerator, denominator)

Set the numerator and denominator of the rational value

Source code in accumulate\utils\rational.py
13
14
15
16
17
18
def set(self, numerator: Union[int, float], denominator: Union[int, float]):
    """Set the numerator and denominator of the rational value"""
    if denominator == 0:
        raise ValueError("Denominator cannot be zero")
    self.numerator = numerator
    self.denominator = denominator

threshold(key_count)

Calculate the threshold based on the ratio and key count Equivalent to keyCount * numerator / denominator, rounded up

Source code in accumulate\utils\rational.py
20
21
22
23
24
25
26
27
28
def threshold(self, key_count: int) -> int:
    """
    Calculate the threshold based on the ratio and key count
    Equivalent to keyCount * numerator / denominator, rounded up
    """
    if key_count < 0:
        raise ValueError("Key count cannot be negative")
    value = key_count * self.numerator / self.denominator
    return math.ceil(value)

union

UnionValue

A Pythonic implementation for managing values with multiple representations, inspired by Go's UnionValue interface

Source code in accumulate\utils\union.py
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
class UnionValue:
    """
    A Pythonic implementation for managing values with multiple representations,
    inspired by Go's UnionValue interface
    """

    def __init__(self, value: Union[bytes, str, int, float, None] = None):
        self.value = value

    def marshal_binary(self) -> bytes:
        """Convert the value to its binary representation"""
        if isinstance(self.value, bytes):
            return self.value
        elif isinstance(self.value, str):
            return self.value.encode("utf-8")
        elif isinstance(self.value, (int, float)):
            return str(self.value).encode("utf-8")
        else:
            raise ValueError("Cannot marshal value to binary")

    def unmarshal_binary(self, data: bytes):
        """Set the value from its binary representation"""
        self.value = data

    def marshal_json(self) -> str:
        """Convert the value to its JSON representation"""
        try:
            return json.dumps(self.value)
        except TypeError:
            raise ValueError("Value cannot be converted to JSON")

    def unmarshal_json(self, data: str):
        """Set the value from its JSON representation"""
        try:
            self.value = json.loads(data)
        except json.JSONDecodeError:
            raise ValueError("Invalid JSON representation")

    def copy(self) -> "UnionValue":
        """Create a copy of the current UnionValue"""
        return UnionValue(self.value)

    def __eq__(self, other: Any) -> bool:
        """Check equality between two UnionValue instances"""
        if not isinstance(other, UnionValue):
            return False
        return self.value == other.value

    def __hash__(self):
        """Allow the UnionValue to be used in hashable collections"""
        return hash(self.value)

    def __repr__(self):
        """Human-readable representation"""
        return f"UnionValue(value={repr(self.value)})"

__eq__(other)

Check equality between two UnionValue instances

Source code in accumulate\utils\union.py
49
50
51
52
53
def __eq__(self, other: Any) -> bool:
    """Check equality between two UnionValue instances"""
    if not isinstance(other, UnionValue):
        return False
    return self.value == other.value

__hash__()

Allow the UnionValue to be used in hashable collections

Source code in accumulate\utils\union.py
55
56
57
def __hash__(self):
    """Allow the UnionValue to be used in hashable collections"""
    return hash(self.value)

__repr__()

Human-readable representation

Source code in accumulate\utils\union.py
59
60
61
def __repr__(self):
    """Human-readable representation"""
    return f"UnionValue(value={repr(self.value)})"

copy()

Create a copy of the current UnionValue

Source code in accumulate\utils\union.py
45
46
47
def copy(self) -> "UnionValue":
    """Create a copy of the current UnionValue"""
    return UnionValue(self.value)

marshal_binary()

Convert the value to its binary representation

Source code in accumulate\utils\union.py
16
17
18
19
20
21
22
23
24
25
def marshal_binary(self) -> bytes:
    """Convert the value to its binary representation"""
    if isinstance(self.value, bytes):
        return self.value
    elif isinstance(self.value, str):
        return self.value.encode("utf-8")
    elif isinstance(self.value, (int, float)):
        return str(self.value).encode("utf-8")
    else:
        raise ValueError("Cannot marshal value to binary")

marshal_json()

Convert the value to its JSON representation

Source code in accumulate\utils\union.py
31
32
33
34
35
36
def marshal_json(self) -> str:
    """Convert the value to its JSON representation"""
    try:
        return json.dumps(self.value)
    except TypeError:
        raise ValueError("Value cannot be converted to JSON")

unmarshal_binary(data)

Set the value from its binary representation

Source code in accumulate\utils\union.py
27
28
29
def unmarshal_binary(self, data: bytes):
    """Set the value from its binary representation"""
    self.value = data

unmarshal_json(data)

Set the value from its JSON representation

Source code in accumulate\utils\union.py
38
39
40
41
42
43
def unmarshal_json(self, data: str):
    """Set the value from its JSON representation"""
    try:
        self.value = json.loads(data)
    except json.JSONDecodeError:
        raise ValueError("Invalid JSON representation")

url

InvalidHashError

Bases: URLParseError

Raised when a transaction ID includes an invalid hash

Source code in accumulate\utils\url.py
28
29
class InvalidHashError(URLParseError):
    """Raised when a transaction ID includes an invalid hash"""

MissingHashError

Bases: URLParseError

Raised when a transaction ID does not include a hash

Source code in accumulate\utils\url.py
24
25
class MissingHashError(URLParseError):
    """Raised when a transaction ID does not include a hash"""

MissingHostError

Bases: URLParseError

Raised when a URL does not include a hostname

Source code in accumulate\utils\url.py
16
17
class MissingHostError(URLParseError):
    """Raised when a URL does not include a hostname"""

URL

Source code in accumulate\utils\url.py
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
class URL:
    def __init__(self, user_info: str = "", authority: Optional[str] = None, path: Optional[str] = "", query: str = "", fragment: str = ""):
        self.user_info = user_info
        self.authority = authority or ""
        self.path = self._normalize_path(path or "") 
        self.query = query
        self.fragment = fragment

        # Memoized values
        self._str_cache = None
        self._hash_cache = None
        self._account_id_cache = None
        self._identity_id_cache = None
        self._lock = Lock()


    @staticmethod
    def _normalize_path(path: str) -> str:
        """Normalize a path to ensure it is clean and starts with a '/'."""
        path = path.strip("/")
        return f"/{path}" if path else ""



    @staticmethod
    def parse(url_str: str) -> "URL":
        """Parse a string into an Accumulate URL"""
        print(f"DEBUG: Starting parse method with URL string: {url_str}")

        # Validate input
        if not url_str:
            print(f"ERROR: Received empty URL string")
            raise ValueError("URL string cannot be empty")

        # Ensure correct scheme
        if not url_str.startswith("acc://"):
            print(f"ERROR: Invalid scheme detected. URL must start with 'acc://', got: {url_str}")
            raise wrong_scheme(url_str)

        # Normalize URL by removing redundant prefixes
        original_url_str = url_str
        while url_str.startswith("acc://acc://"):
            url_str = url_str.replace("acc://acc://", "acc://")
        if url_str != original_url_str:
            print(f"DEBUG: Normalized URL by removing redundant prefixes. Before: {original_url_str}, After: {url_str}")
        else:
            print(f"DEBUG: No redundant prefixes detected in URL string: {url_str}")

        # Prevent URLs from ending with '@'
        if url_str.endswith("@"):
            print(f"WARNING: URL ends with '@'. Cleaning it up: {url_str}")
            url_str = url_str.rstrip("@")

        # Parse components using urllib
        print(f"DEBUG: Parsing URL components using urllib.parse: {url_str}")
        parsed = urllib.parse.urlparse(url_str)
        print(f"DEBUG: Parsed URL result: {parsed}")

        # Verify scheme consistency
        if parsed.scheme != "acc":
            raise wrong_scheme(url_str)

        # Ensure a valid netloc (authority)
        if not parsed.netloc:
            print(f"ERROR: Parsed URL missing authority component. URL: {url_str}, netloc: {parsed.netloc}")
            raise ValueError("Invalid URL: Authority cannot be empty")

        # Validate and handle user_info and authority
        user_info, authority = "", parsed.netloc
        print(f"DEBUG: Initial netloc value: {parsed.netloc}")

        if "@" in parsed.netloc:
            print(f"DEBUG: '@' character found in netloc. Splitting into user_info and authority.")
            parts = parsed.netloc.split("@", 1)
            if len(parts) != 2 or not parts[0] or not parts[1]:
                print(f"ERROR: Invalid '@' usage in netloc. Netloc: {parsed.netloc}")
                raise ValueError("Invalid URL: '@' must separate valid user info and authority.")
            user_info, authority = parts
            print(f"DEBUG: Extracted user_info: {user_info}, authority: {authority}")
        else:
            print(f"DEBUG: No user_info detected in netloc. Authority: {authority}")

        # Ensure the authority is not empty
        if not authority:
            raise ValueError("Invalid URL: Authority cannot be empty.")

        # Reject .com domains in the authority
        if authority.endswith(".com"):
            print(f"ERROR: Authority ends with '.com', which is not allowed: {authority}")
            raise ValueError(f"Invalid authority domain: {authority}. Domains ending with '.com' are not allowed.")

        #  Carefully ensure the authority **ALWAYS** starts with `acc://` but **DO NOT** duplicate it
        if not authority.startswith("acc://"):
            authority = f"acc://{authority}"

        print(f"DEBUG: Finalized components - user_info: {user_info}, authority: {authority}, path: {parsed.path}, query: {parsed.query}, fragment: {parsed.fragment}")

        return URL(
            user_info=user_info,
            authority=authority,  #  Always ensures "acc://" is part of authority
            path=parsed.path,
            query=parsed.query,
            fragment=parsed.fragment,
        )


    def marshal(self) -> bytes:
        url_str = f"acc://{self.user_info + '@' if self.user_info else ''}{self.authority}{self.path or ''}"
        print(f"DEBUG: Marshaling URL to string: {url_str}")
        return url_str.encode('utf-8')


    @staticmethod
    def unmarshal(data: bytes) -> "URL":
        url_str = data.decode('utf-8')
        print(f"DEBUG: Unmarshaling URL from string: {url_str}")
        return URL.parse(url_str)

    def __str__(self) -> str:
        if self._str_cache is None:
            components = []

            #  Preserve user_info if present
            if self.user_info:
                components.append(self.user_info + "@")

            #  Ensure authority **always** starts with "acc://", but avoid duplication
            authority = self.authority
            if not authority.startswith("acc://"):
                authority = f"acc://{authority}"

            components.append(authority)

            #  Append path if present
            if self.path:
                components.append(self.path)

            #  Append query if present
            if self.query:
                components.append(f"?{self.query}")

            #  Append fragment if present
            if self.fragment:
                components.append(f"#{self.fragment}")

            #  Construct final URL string and cache it
            self._str_cache = "".join(components)

        return self._str_cache



    def is_key_page_url(self) -> bool:
        """Check if the URL represents a valid key page."""
        path_parts = self.path.strip("/").split("/") #
        if len(path_parts) == 3 and path_parts[-1].isdigit(): #
            return True #
        return False #


    def __eq__(self, other: Any) -> bool:
        """Equality operator for URLs (case-insensitive)."""
        return isinstance(other, URL) and str(self).lower() == str(other).lower()

    def __lt__(self, other: "URL") -> bool:
        """Comparison operator for URLs."""
        return str(self).lower() < str(other).lower()

    def with_user_info(self, user_info: str) -> "URL":
        """Return a new URL with modified user info."""
        return URL(user_info=user_info, authority=self.authority, path=self.path, query=self.query, fragment=self.fragment)

    def with_path(self, path: str) -> "URL":
        """Return a new URL with modified path."""
        return URL(user_info=self.user_info, authority=self.authority, path=path, query=self.query, fragment=self.fragment)

    def with_query(self, query: str) -> "URL":
        """Return a new URL with modified query."""
        return URL(user_info=self.user_info, authority=self.authority, path=self.path, query=query, fragment=self.fragment)

    def strip_extras(self) -> "URL":
        """Return a URL with only the authority and path."""
        return URL(authority=self.authority, path=self.path)

    def root_identity(self) -> "URL":
        """Return the root identity (authority only)."""
        return URL(authority=self.authority) #

    def identity(self) -> "URL":
        """Return the Accumulate Digital Identity (ADI), which is the root authority."""
        print(f"DEBUG: Original path: {self.path}")

        # The ADI is just the authority, no path.
        result = URL(authority=self.authority, path="")

        print(f"DEBUG: Returning identity URL: {result.authority} with path: {result.path}")
        return result

    def account_id(self) -> bytes:
        """Generate the Account ID hash."""
        if not self._account_id_cache:
            normalized = f"{self.authority}{self.path}".lower()
            self._account_id_cache = hashlib.sha256(normalized.encode()).digest()
        return self._account_id_cache

    def identity_id(self) -> bytes:
        """Generate the Identity ID hash."""
        if not self._identity_id_cache:
            normalized = self.authority.split(":")[0].lower()
            self._identity_id_cache = hashlib.sha256(normalized.encode()).digest()
        return self._identity_id_cache

    def hash(self) -> bytes:
        """Generate a hash of the entire URL."""
        if not self._hash_cache:
            account_hash = self.account_id()
            query_hash = hashlib.sha256(self.query.encode()).digest() if self.query else b""
            fragment_hash = hashlib.sha256(self.fragment.encode()).digest() if self.fragment else b""
            self._hash_cache = hashlib.sha256(account_hash + query_hash + fragment_hash).digest()
        return self._hash_cache

    def valid_utf8(self) -> bool:
        """Validate that all components are UTF-8."""
        components = [self.user_info, self.authority, self.path, self.query, self.fragment]

        try:
            for comp in components:
                if comp:
                    comp.encode("utf-8", "strict")
            return True
        except UnicodeEncodeError:
            return False

__eq__(other)

Equality operator for URLs (case-insensitive).

Source code in accumulate\utils\url.py
209
210
211
def __eq__(self, other: Any) -> bool:
    """Equality operator for URLs (case-insensitive)."""
    return isinstance(other, URL) and str(self).lower() == str(other).lower()

__lt__(other)

Comparison operator for URLs.

Source code in accumulate\utils\url.py
213
214
215
def __lt__(self, other: "URL") -> bool:
    """Comparison operator for URLs."""
    return str(self).lower() < str(other).lower()

_normalize_path(path) staticmethod

Normalize a path to ensure it is clean and starts with a '/'.

Source code in accumulate\utils\url.py
65
66
67
68
69
@staticmethod
def _normalize_path(path: str) -> str:
    """Normalize a path to ensure it is clean and starts with a '/'."""
    path = path.strip("/")
    return f"/{path}" if path else ""

account_id()

Generate the Account ID hash.

Source code in accumulate\utils\url.py
247
248
249
250
251
252
def account_id(self) -> bytes:
    """Generate the Account ID hash."""
    if not self._account_id_cache:
        normalized = f"{self.authority}{self.path}".lower()
        self._account_id_cache = hashlib.sha256(normalized.encode()).digest()
    return self._account_id_cache

hash()

Generate a hash of the entire URL.

Source code in accumulate\utils\url.py
261
262
263
264
265
266
267
268
def hash(self) -> bytes:
    """Generate a hash of the entire URL."""
    if not self._hash_cache:
        account_hash = self.account_id()
        query_hash = hashlib.sha256(self.query.encode()).digest() if self.query else b""
        fragment_hash = hashlib.sha256(self.fragment.encode()).digest() if self.fragment else b""
        self._hash_cache = hashlib.sha256(account_hash + query_hash + fragment_hash).digest()
    return self._hash_cache

identity()

Return the Accumulate Digital Identity (ADI), which is the root authority.

Source code in accumulate\utils\url.py
237
238
239
240
241
242
243
244
245
def identity(self) -> "URL":
    """Return the Accumulate Digital Identity (ADI), which is the root authority."""
    print(f"DEBUG: Original path: {self.path}")

    # The ADI is just the authority, no path.
    result = URL(authority=self.authority, path="")

    print(f"DEBUG: Returning identity URL: {result.authority} with path: {result.path}")
    return result

identity_id()

Generate the Identity ID hash.

Source code in accumulate\utils\url.py
254
255
256
257
258
259
def identity_id(self) -> bytes:
    """Generate the Identity ID hash."""
    if not self._identity_id_cache:
        normalized = self.authority.split(":")[0].lower()
        self._identity_id_cache = hashlib.sha256(normalized.encode()).digest()
    return self._identity_id_cache

is_key_page_url()

Check if the URL represents a valid key page.

Source code in accumulate\utils\url.py
201
202
203
204
205
206
def is_key_page_url(self) -> bool:
    """Check if the URL represents a valid key page."""
    path_parts = self.path.strip("/").split("/") #
    if len(path_parts) == 3 and path_parts[-1].isdigit(): #
        return True #
    return False #

parse(url_str) staticmethod

Parse a string into an Accumulate URL

Source code in accumulate\utils\url.py
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
@staticmethod
def parse(url_str: str) -> "URL":
    """Parse a string into an Accumulate URL"""
    print(f"DEBUG: Starting parse method with URL string: {url_str}")

    # Validate input
    if not url_str:
        print(f"ERROR: Received empty URL string")
        raise ValueError("URL string cannot be empty")

    # Ensure correct scheme
    if not url_str.startswith("acc://"):
        print(f"ERROR: Invalid scheme detected. URL must start with 'acc://', got: {url_str}")
        raise wrong_scheme(url_str)

    # Normalize URL by removing redundant prefixes
    original_url_str = url_str
    while url_str.startswith("acc://acc://"):
        url_str = url_str.replace("acc://acc://", "acc://")
    if url_str != original_url_str:
        print(f"DEBUG: Normalized URL by removing redundant prefixes. Before: {original_url_str}, After: {url_str}")
    else:
        print(f"DEBUG: No redundant prefixes detected in URL string: {url_str}")

    # Prevent URLs from ending with '@'
    if url_str.endswith("@"):
        print(f"WARNING: URL ends with '@'. Cleaning it up: {url_str}")
        url_str = url_str.rstrip("@")

    # Parse components using urllib
    print(f"DEBUG: Parsing URL components using urllib.parse: {url_str}")
    parsed = urllib.parse.urlparse(url_str)
    print(f"DEBUG: Parsed URL result: {parsed}")

    # Verify scheme consistency
    if parsed.scheme != "acc":
        raise wrong_scheme(url_str)

    # Ensure a valid netloc (authority)
    if not parsed.netloc:
        print(f"ERROR: Parsed URL missing authority component. URL: {url_str}, netloc: {parsed.netloc}")
        raise ValueError("Invalid URL: Authority cannot be empty")

    # Validate and handle user_info and authority
    user_info, authority = "", parsed.netloc
    print(f"DEBUG: Initial netloc value: {parsed.netloc}")

    if "@" in parsed.netloc:
        print(f"DEBUG: '@' character found in netloc. Splitting into user_info and authority.")
        parts = parsed.netloc.split("@", 1)
        if len(parts) != 2 or not parts[0] or not parts[1]:
            print(f"ERROR: Invalid '@' usage in netloc. Netloc: {parsed.netloc}")
            raise ValueError("Invalid URL: '@' must separate valid user info and authority.")
        user_info, authority = parts
        print(f"DEBUG: Extracted user_info: {user_info}, authority: {authority}")
    else:
        print(f"DEBUG: No user_info detected in netloc. Authority: {authority}")

    # Ensure the authority is not empty
    if not authority:
        raise ValueError("Invalid URL: Authority cannot be empty.")

    # Reject .com domains in the authority
    if authority.endswith(".com"):
        print(f"ERROR: Authority ends with '.com', which is not allowed: {authority}")
        raise ValueError(f"Invalid authority domain: {authority}. Domains ending with '.com' are not allowed.")

    #  Carefully ensure the authority **ALWAYS** starts with `acc://` but **DO NOT** duplicate it
    if not authority.startswith("acc://"):
        authority = f"acc://{authority}"

    print(f"DEBUG: Finalized components - user_info: {user_info}, authority: {authority}, path: {parsed.path}, query: {parsed.query}, fragment: {parsed.fragment}")

    return URL(
        user_info=user_info,
        authority=authority,  #  Always ensures "acc://" is part of authority
        path=parsed.path,
        query=parsed.query,
        fragment=parsed.fragment,
    )

root_identity()

Return the root identity (authority only).

Source code in accumulate\utils\url.py
233
234
235
def root_identity(self) -> "URL":
    """Return the root identity (authority only)."""
    return URL(authority=self.authority) #

strip_extras()

Return a URL with only the authority and path.

Source code in accumulate\utils\url.py
229
230
231
def strip_extras(self) -> "URL":
    """Return a URL with only the authority and path."""
    return URL(authority=self.authority, path=self.path)

valid_utf8()

Validate that all components are UTF-8.

Source code in accumulate\utils\url.py
270
271
272
273
274
275
276
277
278
279
280
def valid_utf8(self) -> bool:
    """Validate that all components are UTF-8."""
    components = [self.user_info, self.authority, self.path, self.query, self.fragment]

    try:
        for comp in components:
            if comp:
                comp.encode("utf-8", "strict")
        return True
    except UnicodeEncodeError:
        return False

with_path(path)

Return a new URL with modified path.

Source code in accumulate\utils\url.py
221
222
223
def with_path(self, path: str) -> "URL":
    """Return a new URL with modified path."""
    return URL(user_info=self.user_info, authority=self.authority, path=path, query=self.query, fragment=self.fragment)

with_query(query)

Return a new URL with modified query.

Source code in accumulate\utils\url.py
225
226
227
def with_query(self, query: str) -> "URL":
    """Return a new URL with modified query."""
    return URL(user_info=self.user_info, authority=self.authority, path=self.path, query=query, fragment=self.fragment)

with_user_info(user_info)

Return a new URL with modified user info.

Source code in accumulate\utils\url.py
217
218
219
def with_user_info(self, user_info: str) -> "URL":
    """Return a new URL with modified user info."""
    return URL(user_info=user_info, authority=self.authority, path=self.path, query=self.query, fragment=self.fragment)

URLParseError

Bases: Exception

Base class for URL parsing errors

Source code in accumulate\utils\url.py
12
13
class URLParseError(Exception):
    """Base class for URL parsing errors"""

WrongSchemeError

Bases: URLParseError

Raised when a URL includes an invalid scheme

Source code in accumulate\utils\url.py
20
21
class WrongSchemeError(URLParseError):
    """Raised when a URL includes an invalid scheme"""

validation

is_lite_account(account_type)

Returns True if the account type is a Lite Account.

Source code in accumulate\utils\validation.py
125
126
127
def is_lite_account(account_type: str) -> bool:
    """Returns True if the account type is a Lite Account."""
    return account_type.lower() in ["liteidentity", "litetokenaccount"]

is_reserved_url(url)

Checks if a URL object or string is reserved.

Source code in accumulate\utils\validation.py
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
def is_reserved_url(url: URL | str) -> bool:
    """Checks if a URL object or string is reserved."""
    try:
        if isinstance(url, str):
            if not url.startswith("acc://"):
                authority = url.split(".")[0].lower()
            else:
                url = URL.parse(url)
                authority = url.authority.lower()
        else:
            authority = url.authority.lower()
    except ValueError:
        return False

    # Strip the scheme prefix if present
    if authority.startswith("acc://"):
        authority = authority[len("acc://"):]

    reserved_keywords = {"unknown", "dn", "bvn-"}
    return any(authority.startswith(keyword) for keyword in reserved_keywords)

is_valid_adi_url(url, allow_reserved=False)

Validates an ADI URL according to protocol rules.

Source code in accumulate\utils\validation.py
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
def is_valid_adi_url(url: str, allow_reserved=False) -> bool:
    """Validates an ADI URL according to protocol rules."""
    if not url or len(url) > 500:  # Max length
        return False

    # Check reserved URLs
    if is_reserved_url(url) and not allow_reserved:
        return False

    # Ensure it ends with '.acme'
    tld = ".acme"
    if not url.endswith(tld):
        return False

    authority = url[:-len(tld)]
    if not authority or re.fullmatch(r"\d+", authority):
        # Must not be empty or all digits
        return False

    if len(authority) == 48 and re.fullmatch(r"[a-fA-F0-9]{48}", authority):
        # Must not be exactly 48 hexadecimal characters
        return False

    if "." in authority:
        # Subdomains are not allowed
        return False

    # Must contain only valid characters
    valid_chars = set("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_")
    if not set(authority).issubset(valid_chars):
        return False

    return True

process_signer_url(url, client=None) async

Determines if a signer is a Lite Identity, Key Page, or ADI and fetches signer version

Returns:

Name Type Description
dict dict

{ "url": str (Processed signer URL), "signer_type": str ("liteIdentity", "keyPage", "adi"), "signer_version": int (1 for Lite, actual version for Key Page)

dict

}

Source code in accumulate\utils\validation.py
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
async def process_signer_url(url: URL, client: Optional["AccumulateClient"] = None) -> dict:
    """
    Determines if a signer is a Lite Identity, Key Page, or ADI and fetches signer version

    Returns:
        dict: {
            "url": str (Processed signer URL),
            "signer_type": str ("liteIdentity", "keyPage", "adi"),
            "signer_version": int (1 for Lite, actual version for Key Page)
        }
    """

    if client is None:
        from accumulate.api.client import AccumulateClient  
        from accumulate.config import get_accumulate_rpc_url  
        client = AccumulateClient(get_accumulate_rpc_url())

    logger.info(f" Querying Accumulate API for signer account type and version: {url}")

    try:
        #  Query the Accumulate API
        query = Query(query_type=QueryType.DEFAULT)
        response = await client.query(str(url), query)

        if not isinstance(response, AccountRecord) or "account" not in response.__dict__:
            logger.warning(f" Unexpected response format for {url}: {response}")
            return {"url": str(url), "signer_type": "unknown", "signer_version": 1}

        #  Extract account type from response
        account_type = response.account.get("type", "unknown").strip().lower()
        logger.info(f" Retrieved Account Type: {account_type} for {url}")

        #  Handle Lite Identity (Always version 1)
        if account_type == "liteidentity":
            return {"url": str(url), "signer_type": "liteIdentity", "signer_version": 1}

        #  Handle Lite Token Accounts (Also version 1)
        elif account_type == "litetokenaccount":
            processed_url = str(url).rsplit("/ACME", 1)[0]
            return {"url": processed_url, "signer_type": "liteTokenAccount", "signer_version": 1}

        #  Handle Key Pages (Extract version from response)
        elif account_type == "keypage":
            signer_version = response.account.get("version", 1)  #  Extract signer version
            logger.info(f" Using Key Page signer. Version: {signer_version}")
            return {"url": str(url), "signer_type": "keyPage", "signer_version": signer_version}

        #  Handle ADI (Assume version 1)
        elif account_type.endswith(".acme"):
            return {"url": str(url), "signer_type": "adi", "signer_version": 1}

        else:
            logger.error(f" Unknown signer type for {url}: {account_type}")
            return {"url": str(url), "signer_type": "unknown", "signer_version": 1}

    except Exception as e:
        logger.error(f" Error processing signer URL for {url}: {e}")
        return {"url": str(url), "signer_type": "unknown", "signer_version": 1}

validate_accumulate_url(url)

Validate if a URL object or string is a valid Accumulate URL.

Source code in accumulate\utils\validation.py
87
88
89
90
91
92
93
94
95
96
97
98
99
def validate_accumulate_url(url: URL | str) -> bool:
    """Validate if a URL object or string is a valid Accumulate URL."""
    if isinstance(url, str):
        if not url.startswith("acc://"):
            return False  # Reject URLs that don't start with 'acc://'
        try:
            url = URL.parse(url)
        except ValueError:
            return False
    # Validate the URL object
    if not url.authority:
        return False
    return True

Signing

builder

Builder

Source code in accumulate\signing\builder.py
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
class Builder:
    def __init__(self):
        self.init_mode: str = InitHashMode.INIT_WITH_SIMPLE_HASH
        self.type: SignatureType = SignatureType.UNKNOWN
        self.url: Optional[URL] = None
        self.delegators: List[URL] = []
        self.signer: Optional[object] = None
        self.version: int = 1  # Set signer version to 1 by default
        self.timestamp = timestamp_generator.get()
        self.memo: str = ""
        self.data: bytes = b""
        self.ignore_64_byte: bool = False

    def set_type(self, signature_type: SignatureType) -> "Builder":
        self.type = signature_type
        return self

    def get_type(self) -> str:
        return self.type.to_rpc_format()

    def set_url(self, url: URL) -> "Builder":
        if is_reserved_url(url):
            raise ValueError("Reserved URL cannot be used as a signer URL")
        if not validate_accumulate_url(url):
            raise ValueError("Invalid Accumulate URL")
        self.url = url
        return self

    def set_signer(self, signer: Optional[object]) -> "Builder":
        """Sets the signer and assigns it to the builder."""
        Signer = get_signer()  # Dynamically import Signer
        if not isinstance(signer, Signer):
            raise TypeError("Expected an instance of Signer")

        self.signer = signer
        return self

    def set_version(self, version: int) -> "Builder":
        self.version = version
        return self

    def set_timestamp(self, timestamp: int) -> "Builder":
        self.timestamp = timestamp
        return self

    def set_timestamp_to_now(self) -> "Builder":
        self.timestamp = timestamp_generator.get()  # Ensure consistent timestamping
        return self

    def set_memo(self, memo: str) -> "Builder":
        self.memo = memo
        return self

    def set_data(self, data: bytes) -> "Builder":
        self.data = data
        return self

    def add_delegator(self, delegator: URL) -> "Builder":
        if not validate_accumulate_url(delegator):
            raise ValueError("Invalid delegator URL")
        self.delegators.append(delegator)
        return self

    def _validate_signature_requirements(self, init: bool):
        if not self.url:
            raise ValueError("Missing signer URL")
        if not self.signer:
            raise ValueError("Missing signer")
        if init and not self.version:
            raise ValueError("Missing version")
        if init and self.timestamp is None:
            raise ValueError("Missing timestamp")

    def _create_signature(self, transaction_data: bytes) -> Signature:
        """Create a signature object based on the specified type."""
        signature_map = {
            SignatureType.ED25519: ED25519Signature,
            SignatureType.LEGACY_ED25519: LegacyED25519Signature,
            SignatureType.RCD1: RCD1Signature,
            SignatureType.BTC: BTCSignature,
            SignatureType.ETH: ETHSignature,
            SignatureType.RSA_SHA256: RSASignature,
            SignatureType.ECDSA_SHA256: ECDSA_SHA256Signature,
        }
        sig_class = signature_map.get(self.type)
        if not sig_class:
            raise ValueError(f"Unsupported signature type: {self.type}")

        signature = sig_class(
            signer=self.url,
            publicKey=self.signer.get_public_key() if self.signer else None, 
            signature=None,  # Placeholder; set after signing
            transaction_data=transaction_data
        )
        signature.memo = self.memo
        signature.data = self.data
        signature.type = self.get_type().lower()  # Ensure lowercase
        signature.timestamp = self.timestamp
        signature.signerVersion = self.version
        return signature

    def prepare(self, init: bool, transaction_data: bytes) -> Signature:
        """Prepare a signature ensuring transaction data is included."""
        self._validate_signature_requirements(init)
        if self.type == SignatureType.UNKNOWN:
            self.type = SignatureType.ED25519
        return self._create_signature(transaction_data)

    async def sign(self, message: bytes) -> dict:
        """Sign the provided message and return a dictionary."""
        transaction_data = message  # Use the message as transaction data
        signature = self.prepare(init=False, transaction_data=transaction_data)

        for delegator in self.delegators:
            signature = DelegatedSignature(
                delegator=delegator,
                metadata_hash=None,
                signature=signature,
            )

        # Use consistent camelCase for the field name
        signature.transactionHash = hash_data(transaction_data).hex()
        signature_data = await self.signer.sign_transaction(self.type, transaction_data)

        if isinstance(signature_data["signature"], bytes):
            signature_data["signature"] = signature_data["signature"].hex()

        logger.info(f"Debug: Signature Created - {signature_data['signature']}")

        return dict(signature_data)

    async def initiate(self, txn: Transaction) -> Signature:
        """Initiate a transaction and prepare the signature."""
        txn_hash = txn.get_hash()
        logger.info(f"Transaction Hash from txn.get_hash(): {txn_hash.hex()}")

        # Prepare the signature using the txn_hash as the signing data
        signature = self.prepare(init=True, transaction_data=txn_hash)

        for delegator in self.delegators:
            signature = DelegatedSignature(
                delegator=delegator,
                metadata_hash=None,
                signature=signature,
            )

        if self.init_mode == InitHashMode.INIT_WITH_SIMPLE_HASH:
            txn.header.initiator = txn_hash
        else:
            txn.header.initiator = self.calculate_merkle_hash(txn_hash)

        signature.transactionHash = txn_hash

        # Log the value of txn_hash being passed to the signer
        logger.info(f"Passing Transaction Hash to signer.sign_transaction(): {txn_hash.hex()}")

        signature_data = await self.signer.sign_transaction(self.type, txn_hash)
        signature.signature = signature_data["signature"]
        signature.signerVersion = signature_data["signerVersion"]

        return signature

_create_signature(transaction_data)

Create a signature object based on the specified type.

Source code in accumulate\signing\builder.py
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
def _create_signature(self, transaction_data: bytes) -> Signature:
    """Create a signature object based on the specified type."""
    signature_map = {
        SignatureType.ED25519: ED25519Signature,
        SignatureType.LEGACY_ED25519: LegacyED25519Signature,
        SignatureType.RCD1: RCD1Signature,
        SignatureType.BTC: BTCSignature,
        SignatureType.ETH: ETHSignature,
        SignatureType.RSA_SHA256: RSASignature,
        SignatureType.ECDSA_SHA256: ECDSA_SHA256Signature,
    }
    sig_class = signature_map.get(self.type)
    if not sig_class:
        raise ValueError(f"Unsupported signature type: {self.type}")

    signature = sig_class(
        signer=self.url,
        publicKey=self.signer.get_public_key() if self.signer else None, 
        signature=None,  # Placeholder; set after signing
        transaction_data=transaction_data
    )
    signature.memo = self.memo
    signature.data = self.data
    signature.type = self.get_type().lower()  # Ensure lowercase
    signature.timestamp = self.timestamp
    signature.signerVersion = self.version
    return signature

initiate(txn) async

Initiate a transaction and prepare the signature.

Source code in accumulate\signing\builder.py
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
async def initiate(self, txn: Transaction) -> Signature:
    """Initiate a transaction and prepare the signature."""
    txn_hash = txn.get_hash()
    logger.info(f"Transaction Hash from txn.get_hash(): {txn_hash.hex()}")

    # Prepare the signature using the txn_hash as the signing data
    signature = self.prepare(init=True, transaction_data=txn_hash)

    for delegator in self.delegators:
        signature = DelegatedSignature(
            delegator=delegator,
            metadata_hash=None,
            signature=signature,
        )

    if self.init_mode == InitHashMode.INIT_WITH_SIMPLE_HASH:
        txn.header.initiator = txn_hash
    else:
        txn.header.initiator = self.calculate_merkle_hash(txn_hash)

    signature.transactionHash = txn_hash

    # Log the value of txn_hash being passed to the signer
    logger.info(f"Passing Transaction Hash to signer.sign_transaction(): {txn_hash.hex()}")

    signature_data = await self.signer.sign_transaction(self.type, txn_hash)
    signature.signature = signature_data["signature"]
    signature.signerVersion = signature_data["signerVersion"]

    return signature

prepare(init, transaction_data)

Prepare a signature ensuring transaction data is included.

Source code in accumulate\signing\builder.py
137
138
139
140
141
142
def prepare(self, init: bool, transaction_data: bytes) -> Signature:
    """Prepare a signature ensuring transaction data is included."""
    self._validate_signature_requirements(init)
    if self.type == SignatureType.UNKNOWN:
        self.type = SignatureType.ED25519
    return self._create_signature(transaction_data)

set_signer(signer)

Sets the signer and assigns it to the builder.

Source code in accumulate\signing\builder.py
64
65
66
67
68
69
70
71
def set_signer(self, signer: Optional[object]) -> "Builder":
    """Sets the signer and assigns it to the builder."""
    Signer = get_signer()  # Dynamically import Signer
    if not isinstance(signer, Signer):
        raise TypeError("Expected an instance of Signer")

    self.signer = signer
    return self

sign(message) async

Sign the provided message and return a dictionary.

Source code in accumulate\signing\builder.py
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
async def sign(self, message: bytes) -> dict:
    """Sign the provided message and return a dictionary."""
    transaction_data = message  # Use the message as transaction data
    signature = self.prepare(init=False, transaction_data=transaction_data)

    for delegator in self.delegators:
        signature = DelegatedSignature(
            delegator=delegator,
            metadata_hash=None,
            signature=signature,
        )

    # Use consistent camelCase for the field name
    signature.transactionHash = hash_data(transaction_data).hex()
    signature_data = await self.signer.sign_transaction(self.type, transaction_data)

    if isinstance(signature_data["signature"], bytes):
        signature_data["signature"] = signature_data["signature"].hex()

    logger.info(f"Debug: Signature Created - {signature_data['signature']}")

    return dict(signature_data)

signature_handler

SignatureHandler

Source code in accumulate\signing\signature_handler.py
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
class SignatureHandler:
    @staticmethod
    def btc_address(public_key: bytes) -> str:
        """Generate a BTC address from a public key"""
        return btc_address(public_key)

    @staticmethod
    def eth_address(public_key: bytes) -> str:
        """Generate an ETH address from a public key"""
        return eth_address(public_key)

    @staticmethod
    def verify_merkle_hash(metadata_hash: bytes, txn_hash: bytes, signature: Signature) -> bool:
        """Verify if a Merkle hash is valid."""
        try:
            calculated_merkle_hash = hashlib.sha256(metadata_hash + txn_hash).digest()
            return calculated_merkle_hash == signature.transactionHash
        except Exception:
            return False

    @staticmethod
    def create_authority_signature(origin: URL, authority: URL, vote: Optional[str], txid: Optional[str]) -> bytes:
        """Create a signature for an authority."""
        data = str(origin).encode() + str(authority).encode()
        if vote:
            data += vote.encode()
        if txid:
            data += txid.encode()
        return hashlib.sha256(data).digest()

    @staticmethod
    def verify_authority_signature(authority_signature: bytes, origin: URL, authority: URL, vote: Optional[str], txid: Optional[str]) -> bool:
        """Verify an authority signature."""
        expected_hash = SignatureHandler.create_authority_signature(origin, authority, vote, txid)
        return expected_hash == authority_signature

    # ========== ED25519 ==========
    @staticmethod
    def sign_ed25519(private_key: bytes, message: bytes) -> bytes:
        """Sign a message using ED25519."""
        private_key_obj = ed25519.Ed25519PrivateKey.from_private_bytes(private_key)
        return private_key_obj.sign(message)

    @staticmethod
    def verify_ed25519(public_key: bytes, message: bytes, signature: bytes) -> bool:
        """Verify an ED25519 signature."""
        try:
            vk = ed25519.Ed25519PublicKey.from_public_bytes(public_key)
            vk.verify(signature, message)
            return True
        except Exception:
            return False

    # ========== BTC (ECDSA SECP256k1) ==========
    @staticmethod
    def sign_btc(private_key: bytes, message: bytes) -> bytes:
        """Sign a message using Bitcoin ECDSA SECP256k1"""
        private_key_obj = ec.derive_private_key(int.from_bytes(private_key, "big"), ec.SECP256K1())
        return private_key_obj.sign(message, ec.ECDSA(SHA256()))

    @staticmethod
    def verify_btc(public_key: bytes, message: bytes, signature: bytes) -> bool:
        """Verify a BTC (ECDSA SECP256k1) signature"""
        try:
            vk = ec.EllipticCurvePublicKey.from_encoded_point(ec.SECP256K1(), public_key)
            vk.verify(signature, message, ec.ECDSA(SHA256()))
            return True
        except Exception:
            return False

    # ========== ETH (EIP-712) ==========
    @staticmethod
    def sign_eth(private_key: bytes, message_hash: bytes) -> bytes:
        """Sign an Ethereum message"""
        try:
            eth_key = eth_keys.PrivateKey(private_key)
            return eth_key.sign_msg_hash(message_hash).to_bytes()
        except Exception:
            raise ValueError("Failed to sign Ethereum message")

    @staticmethod
    def verify_eth(public_key: bytes, message_hash: bytes, signature: bytes) -> bool:
        """Verify an Ethereum (EIP-712) signature"""
        try:
            eth_key = eth_keys.PublicKey(public_key)
            eth_signature = eth_keys.Signature(signature)
            return eth_key.verify_msg_hash(message_hash, eth_signature)
        except (ValidationError, BadSignature):
            return False

    # ========== RSA SHA256 ==========
    @staticmethod
    def sign_rsa_sha256(private_key: bytes, message: bytes) -> bytes:
        """Sign a message with RSA SHA-256"""
        private_key_obj = serialization.load_pem_private_key(
            private_key, password=None, backend=default_backend()
        )
        return private_key_obj.sign(
            message,
            PKCS1v15(),
            SHA256(),
        )

    @staticmethod
    def verify_rsa_sha256(public_key: bytes, message: bytes, signature: bytes) -> bool:
        """Verify an RSA SHA-256 signature"""
        try:
            public_key_obj = serialization.load_pem_public_key(
                public_key, backend=default_backend()
            )
            public_key_obj.verify(
                signature,
                message,
                PKCS1v15(),
                SHA256(),
            )
            return True
        except Exception:
            return False

    # ========== ECDSA SHA256 ==========
    @staticmethod
    def sign_ecdsa_sha256(private_key: bytes, message: bytes) -> bytes:
        """Sign a message using ECDSA SHA256"""
        private_key_obj = ec.derive_private_key(int.from_bytes(private_key, "big"), ec.SECP256K1())
        return private_key_obj.sign(message, ec.ECDSA(SHA256()))

    @staticmethod
    def verify_ecdsa_sha256(public_key: bytes, message: bytes, signature: bytes) -> bool:
        """Verify an ECDSA SHA256 signature"""
        try:
            public_key_obj = ec.EllipticCurvePublicKey.from_encoded_point(ec.SECP256K1(), public_key)
            public_key_obj.verify(signature, message, ec.ECDSA(SHA256()))
            return True
        except Exception:
            return False

    # ========== TypedData (EIP-712 Compliant) ==========
    @staticmethod
    def sign_typed_data(private_key: bytes, message_hash: bytes) -> bytes:
        """Sign an Ethereum message using EIP-712 Typed Data"""
        return SignatureHandler.sign_eth(private_key, message_hash)

    # ========== Delegated Signature ==========
    @staticmethod
    def sign_delegated_signature(inner_signature: bytes, delegator: URL) -> bytes:
        """Create a delegated signature"""
        return hashlib.sha256(inner_signature + str(delegator).encode()).digest()

    @staticmethod
    def verify_delegated_signature(delegated_signature: bytes, inner_signature: bytes, delegator: URL) -> bool:
        """Verify a delegated signature."""
        expected_hash = hashlib.sha256(inner_signature + str(delegator).encode()).digest()
        return expected_hash == delegated_signature

    # ========== General Signature Verification ==========
    @staticmethod
    def verify_signature_with_timestamp(public_key: bytes, message: bytes, signature: Signature, sig_type: SignatureType) -> bool:
        verification_methods = {
            SignatureType.ED25519: SignatureHandler.verify_ed25519,
            SignatureType.ECDSA_SHA256: SignatureHandler.verify_ecdsa_sha256,
            SignatureType.BTC: SignatureHandler.verify_btc,
            SignatureType.ETH: SignatureHandler.verify_eth,
            SignatureType.RSA_SHA256: SignatureHandler.verify_rsa_sha256,
        }

        verify_func = verification_methods.get(sig_type)
        if verify_func:
            return verify_func(public_key, message, signature.signature)
        else:
            raise ValueError(f"Unsupported signature type: {sig_type}")

btc_address(public_key) staticmethod

Generate a BTC address from a public key

Source code in accumulate\signing\signature_handler.py
27
28
29
30
@staticmethod
def btc_address(public_key: bytes) -> str:
    """Generate a BTC address from a public key"""
    return btc_address(public_key)

create_authority_signature(origin, authority, vote, txid) staticmethod

Create a signature for an authority.

Source code in accumulate\signing\signature_handler.py
46
47
48
49
50
51
52
53
54
@staticmethod
def create_authority_signature(origin: URL, authority: URL, vote: Optional[str], txid: Optional[str]) -> bytes:
    """Create a signature for an authority."""
    data = str(origin).encode() + str(authority).encode()
    if vote:
        data += vote.encode()
    if txid:
        data += txid.encode()
    return hashlib.sha256(data).digest()

eth_address(public_key) staticmethod

Generate an ETH address from a public key

Source code in accumulate\signing\signature_handler.py
32
33
34
35
@staticmethod
def eth_address(public_key: bytes) -> str:
    """Generate an ETH address from a public key"""
    return eth_address(public_key)

sign_btc(private_key, message) staticmethod

Sign a message using Bitcoin ECDSA SECP256k1

Source code in accumulate\signing\signature_handler.py
80
81
82
83
84
@staticmethod
def sign_btc(private_key: bytes, message: bytes) -> bytes:
    """Sign a message using Bitcoin ECDSA SECP256k1"""
    private_key_obj = ec.derive_private_key(int.from_bytes(private_key, "big"), ec.SECP256K1())
    return private_key_obj.sign(message, ec.ECDSA(SHA256()))

sign_delegated_signature(inner_signature, delegator) staticmethod

Create a delegated signature

Source code in accumulate\signing\signature_handler.py
170
171
172
173
@staticmethod
def sign_delegated_signature(inner_signature: bytes, delegator: URL) -> bytes:
    """Create a delegated signature"""
    return hashlib.sha256(inner_signature + str(delegator).encode()).digest()

sign_ecdsa_sha256(private_key, message) staticmethod

Sign a message using ECDSA SHA256

Source code in accumulate\signing\signature_handler.py
147
148
149
150
151
@staticmethod
def sign_ecdsa_sha256(private_key: bytes, message: bytes) -> bytes:
    """Sign a message using ECDSA SHA256"""
    private_key_obj = ec.derive_private_key(int.from_bytes(private_key, "big"), ec.SECP256K1())
    return private_key_obj.sign(message, ec.ECDSA(SHA256()))

sign_ed25519(private_key, message) staticmethod

Sign a message using ED25519.

Source code in accumulate\signing\signature_handler.py
63
64
65
66
67
@staticmethod
def sign_ed25519(private_key: bytes, message: bytes) -> bytes:
    """Sign a message using ED25519."""
    private_key_obj = ed25519.Ed25519PrivateKey.from_private_bytes(private_key)
    return private_key_obj.sign(message)

sign_eth(private_key, message_hash) staticmethod

Sign an Ethereum message

Source code in accumulate\signing\signature_handler.py
 97
 98
 99
100
101
102
103
104
@staticmethod
def sign_eth(private_key: bytes, message_hash: bytes) -> bytes:
    """Sign an Ethereum message"""
    try:
        eth_key = eth_keys.PrivateKey(private_key)
        return eth_key.sign_msg_hash(message_hash).to_bytes()
    except Exception:
        raise ValueError("Failed to sign Ethereum message")

sign_rsa_sha256(private_key, message) staticmethod

Sign a message with RSA SHA-256

Source code in accumulate\signing\signature_handler.py
117
118
119
120
121
122
123
124
125
126
127
@staticmethod
def sign_rsa_sha256(private_key: bytes, message: bytes) -> bytes:
    """Sign a message with RSA SHA-256"""
    private_key_obj = serialization.load_pem_private_key(
        private_key, password=None, backend=default_backend()
    )
    return private_key_obj.sign(
        message,
        PKCS1v15(),
        SHA256(),
    )

sign_typed_data(private_key, message_hash) staticmethod

Sign an Ethereum message using EIP-712 Typed Data

Source code in accumulate\signing\signature_handler.py
164
165
166
167
@staticmethod
def sign_typed_data(private_key: bytes, message_hash: bytes) -> bytes:
    """Sign an Ethereum message using EIP-712 Typed Data"""
    return SignatureHandler.sign_eth(private_key, message_hash)

verify_authority_signature(authority_signature, origin, authority, vote, txid) staticmethod

Verify an authority signature.

Source code in accumulate\signing\signature_handler.py
56
57
58
59
60
@staticmethod
def verify_authority_signature(authority_signature: bytes, origin: URL, authority: URL, vote: Optional[str], txid: Optional[str]) -> bool:
    """Verify an authority signature."""
    expected_hash = SignatureHandler.create_authority_signature(origin, authority, vote, txid)
    return expected_hash == authority_signature

verify_btc(public_key, message, signature) staticmethod

Verify a BTC (ECDSA SECP256k1) signature

Source code in accumulate\signing\signature_handler.py
86
87
88
89
90
91
92
93
94
@staticmethod
def verify_btc(public_key: bytes, message: bytes, signature: bytes) -> bool:
    """Verify a BTC (ECDSA SECP256k1) signature"""
    try:
        vk = ec.EllipticCurvePublicKey.from_encoded_point(ec.SECP256K1(), public_key)
        vk.verify(signature, message, ec.ECDSA(SHA256()))
        return True
    except Exception:
        return False

verify_delegated_signature(delegated_signature, inner_signature, delegator) staticmethod

Verify a delegated signature.

Source code in accumulate\signing\signature_handler.py
175
176
177
178
179
@staticmethod
def verify_delegated_signature(delegated_signature: bytes, inner_signature: bytes, delegator: URL) -> bool:
    """Verify a delegated signature."""
    expected_hash = hashlib.sha256(inner_signature + str(delegator).encode()).digest()
    return expected_hash == delegated_signature

verify_ecdsa_sha256(public_key, message, signature) staticmethod

Verify an ECDSA SHA256 signature

Source code in accumulate\signing\signature_handler.py
153
154
155
156
157
158
159
160
161
@staticmethod
def verify_ecdsa_sha256(public_key: bytes, message: bytes, signature: bytes) -> bool:
    """Verify an ECDSA SHA256 signature"""
    try:
        public_key_obj = ec.EllipticCurvePublicKey.from_encoded_point(ec.SECP256K1(), public_key)
        public_key_obj.verify(signature, message, ec.ECDSA(SHA256()))
        return True
    except Exception:
        return False

verify_ed25519(public_key, message, signature) staticmethod

Verify an ED25519 signature.

Source code in accumulate\signing\signature_handler.py
69
70
71
72
73
74
75
76
77
@staticmethod
def verify_ed25519(public_key: bytes, message: bytes, signature: bytes) -> bool:
    """Verify an ED25519 signature."""
    try:
        vk = ed25519.Ed25519PublicKey.from_public_bytes(public_key)
        vk.verify(signature, message)
        return True
    except Exception:
        return False

verify_eth(public_key, message_hash, signature) staticmethod

Verify an Ethereum (EIP-712) signature

Source code in accumulate\signing\signature_handler.py
106
107
108
109
110
111
112
113
114
@staticmethod
def verify_eth(public_key: bytes, message_hash: bytes, signature: bytes) -> bool:
    """Verify an Ethereum (EIP-712) signature"""
    try:
        eth_key = eth_keys.PublicKey(public_key)
        eth_signature = eth_keys.Signature(signature)
        return eth_key.verify_msg_hash(message_hash, eth_signature)
    except (ValidationError, BadSignature):
        return False

verify_merkle_hash(metadata_hash, txn_hash, signature) staticmethod

Verify if a Merkle hash is valid.

Source code in accumulate\signing\signature_handler.py
37
38
39
40
41
42
43
44
@staticmethod
def verify_merkle_hash(metadata_hash: bytes, txn_hash: bytes, signature: Signature) -> bool:
    """Verify if a Merkle hash is valid."""
    try:
        calculated_merkle_hash = hashlib.sha256(metadata_hash + txn_hash).digest()
        return calculated_merkle_hash == signature.transactionHash
    except Exception:
        return False

verify_rsa_sha256(public_key, message, signature) staticmethod

Verify an RSA SHA-256 signature

Source code in accumulate\signing\signature_handler.py
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
@staticmethod
def verify_rsa_sha256(public_key: bytes, message: bytes, signature: bytes) -> bool:
    """Verify an RSA SHA-256 signature"""
    try:
        public_key_obj = serialization.load_pem_public_key(
            public_key, backend=default_backend()
        )
        public_key_obj.verify(
            signature,
            message,
            PKCS1v15(),
            SHA256(),
        )
        return True
    except Exception:
        return False

signer

Signer

Source code in accumulate\signing\signer.py
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
class Signer:
    _signer_version = 1
    _signature_type: Optional[SignatureType] = None  #  Store signature type

    def __init__(self, url: Optional[URL] = None, signer_version: int = 1, signature_type: Optional[SignatureType] = None):
        self._private_key = None
        self._public_key = None
        self.url = url
        self._signer_version = signer_version
        self._signature_type = signature_type

    @staticmethod
    async def select_signer(account_url: URL, private_key: bytes, client: Optional["AccumulateClient"] = None) -> "Signer":
        if client is None:
            from accumulate.api.client import AccumulateClient  
            from accumulate.config import get_accumulate_rpc_url  
            client = AccumulateClient(get_accumulate_rpc_url())

        from accumulate.utils.validation import process_signer_url  
        logger.info(f"Checking signer type and version for {account_url}...")

        signer_info = await process_signer_url(account_url, client)

        processed_url = signer_info["url"]
        signer_version = signer_info["signer_version"]
        signer_type = signer_info["signer_type"]

        #  Convert the fetched signer_type string to a SignatureType Enum
        signature_type_enum = SignatureType[signer_type] if signer_type in SignatureType.__members__ else SignatureType.ED25519

        logger.info(f"Using {processed_url} as signer ({signer_type}), Version: {signer_version}")

        signer = Signer(URL.parse(processed_url), signer_version, signature_type_enum)
        signer.set_keys(private_key)

        return signer

    async def get_signature_type(self) -> SignatureType:
        """
        Fetch the appropriate signature type dynamically.

        - If already cached, return it.
        - Otherwise, determine it dynamically.
        """
        if self._signature_type:
            return self._signature_type  #  Use cached signature type if available

        #  Implement logic to fetch the actual signature type from signer metadata
        # Currently defaulting to ED25519, but should be dynamically set
        self._signature_type = SignatureType.ED25519  # Replace with dynamic lookup if needed

        logger.info(f" Determined Signature Type: {self._signature_type.name}")
        return self._signature_type

    def set_keys(self, private_key: bytes) -> None:
        if len(private_key) == 64:
            private_scalar = private_key[:32]
            derived_public_key = private_key[32:]
        elif len(private_key) == 32:
            private_scalar = private_key
            derived_public_key = None

        self._private_key = ed25519.Ed25519PrivateKey.from_private_bytes(private_scalar)
        computed_public_key = self._private_key.public_key().public_bytes(
            encoding=serialization.Encoding.Raw,
            format=serialization.PublicFormat.Raw
        )

        if derived_public_key and computed_public_key != derived_public_key:
            raise ValueError("Derived public key does not match computed public key!")

        self._public_key = computed_public_key
        logger.info(f"Public Key Correctly Set: {self._public_key.hex()}")

    def get_public_key(self) -> bytes:
        if not self._public_key:
            logger.warning("Public key missing. Regenerating from private key.")
            if self._private_key:
                self._public_key = self._private_key.public_key().public_bytes(
                    encoding=serialization.Encoding.Raw,
                    format=serialization.PublicFormat.Raw
                )
            else:
                raise ValueError("Public key has not been set. Call set_keys() first.")
        return self._public_key

    def get_private_key(self) -> bytes:
        if not self._private_key:
            raise ValueError("Private key has not been set. Call set_keys() first.")
        return self._private_key.private_bytes(
            encoding=serialization.Encoding.Raw,
            format=serialization.PrivateFormat.Raw,
            encryption_algorithm=serialization.NoEncryption()
        )

    async def get_signer_version(self) -> int:
        if self.url is None:
            raise ValueError("Signer URL is missing, cannot determine version.")
        return self._signer_version

    def set_signer_version(self, version: int):
        self._signer_version = version


#    async def sign_transaction(self, signature_type: SignatureType, message: bytes, signer_version: Optional[int] = None) -> dict:
    async def sign_transaction(
        self, 
        signature_type: SignatureType, 
        message: bytes, 
        txn_header: TransactionHeader,  # Use the TransactionHeader object to access timestamp
        signer_version: Optional[int] = None
    ) -> dict:
        """
        Signs the transaction using the timestamp from the TransactionHeader.

        :param signature_type: The signature type (e.g., ED25519, BTC, etc.).
        :param message: The transaction hash to sign.
        :param txn_header: The TransactionHeader instance, containing the timestamp.
        :param signer_version: The signer's version (if None, uses the default).
        :return: A signed transaction dictionary.
        """
        if not self._private_key:
            raise ValueError("Private key not set. Call set_keys() first.")
        if not self._public_key:
            raise ValueError("Public key not set. Call set_keys() properly.")

        if signer_version is None:
            signer_version = self._signer_version

        #  Use timestamp from the TransactionHeader
        timestamp = txn_header.timestamp
        if timestamp is None:
            raise ValueError("Transaction header does not have a timestamp!")

        logger.info(f" Signer Using TransactionHeader Timestamp: {timestamp}")

        # Compute metadata (initiator) hash using uvarint encoding.
        metadata_hash = Signer.calculate_metadata_hash(
            self.get_public_key(), timestamp, str(self.url), signer_version, signature_type.value
        )

        logger.info(f" FROM SIGNER - Metadata (Public key) Hash: {self.get_public_key().hex()}")
        logger.info(f" FROM SIGNER - Metadata (timestamp) Hash: {timestamp}")
        logger.info(f" FROM SIGNER - Metadata (signer) Hash: {str(self.url)}")
        logger.info(f" FROM SIGNER - Metadata (signer_version) Hash: {signer_version}")
        logger.info(f" FROM SIGNER - Metadata (signature_type.vale) Hash: {signature_type.value}")

        logger.info(f" FROM SIGNER - Metadata (initiator) Hash: {metadata_hash.hex()}")

        # Here, 'message' is expected to be the final transaction hash as computed in your transaction marshalling
        logger.info(f"Message passed for signing (transaction hash): {message.hex()}")

        # Compute the final signing hash as sha256(metadata_hash + message)
        final_hash = hashlib.sha256(metadata_hash + message).digest()
        logger.info(f"Final Hash for Signing (sha256(metadata_hash + message)): {final_hash.hex()}")

        # Sign using the appropriate algorithm.
        if signature_type == SignatureType.ED25519:
            signature = self._private_key.sign(final_hash)
        elif signature_type == SignatureType.BTC:
            priv_key = ec.derive_private_key(int.from_bytes(self.get_private_key(), "big"), ec.SECP256K1())
            signature = priv_key.sign(final_hash, ec.ECDSA(hashes.SHA256()))
        elif signature_type == SignatureType.ETH:
            eth_key = eth_keys.PrivateKey(self.get_private_key())
            signature = eth_key.sign_msg_hash(final_hash).to_bytes()
        elif signature_type == SignatureType.RSA_SHA256:
            private_key_obj = serialization.load_pem_private_key(self.get_private_key(), password=None, backend=default_backend())
            if isinstance(private_key_obj, rsa.RSAPrivateKey):
                signature = private_key_obj.sign(
                    final_hash,
                    PKCS1v15(),
                    hashes.SHA256(),
                )
            else:
                raise ValueError("Invalid RSA private key")
        elif signature_type == SignatureType.ECDSA_SHA256:
            priv_key = ec.derive_private_key(int.from_bytes(self.get_private_key(), "big"), ec.SECP256K1())
            signature = priv_key.sign(final_hash, ec.ECDSA(hashes.SHA256()))
        else:
            raise ValueError(f"Unsupported signature type: {signature_type}")

        logger.info("sign_transaction() called successfully!")
        logger.info(f"Signature Generated: {signature.hex()}")

        signed_transaction = {
            "type": signature_type.name.lower(),
            "publicKey": self.get_public_key().hex(),
            "signature": signature.hex(),
            "signer": str(self.url),
            "signerVersion": signer_version,
            "timestamp": timestamp,
            "transactionHash": message.hex()  # The original transaction hash passed in
        }

        logger.info(f"Signed Transaction Data - {signed_transaction}")

        return signed_transaction

    async def sign(self, message: bytes, opts: dict) -> dict:
        signature_type = opts.get("signatureType", SignatureType.ED25519)
        signer_version = opts.get("signerVersion", self._signer_version)
        return await self.sign_transaction(signature_type, message, signer_version)

    @staticmethod
    def for_lite(signer: "Signer") -> "Signer":
        if signer._public_key is None or signer._private_key is None:
            raise ValueError("Signer must have keys set before calling for_lite().")

        key_str = hashlib.sha256(signer.get_public_key()).digest()[:20]
        check_sum = hashlib.sha256(key_str).digest()[-4:]
        lite_url = URL.parse(f"acc://{key_str.hex()}{check_sum.hex()}")

        logger.info(f"Created Lite Signer: {lite_url}")

        lite_signer = Signer(lite_url, signer_version=1)
        lite_signer._public_key = signer._public_key
        lite_signer._private_key = signer._private_key

        return lite_signer

    def set_public_key(self, signature: Dict, private_key: bytes) -> None:
        signature_type = signature.get("type")
        if signature_type in [SignatureType.LEGACY_ED25519, SignatureType.ED25519, SignatureType.RCD1]:
            private_key_obj = ed25519.Ed25519PrivateKey.from_private_bytes(private_key)
            self._public_key = private_key_obj.public_key().public_bytes(
                encoding=serialization.Encoding.Raw,
                format=serialization.PublicFormat.Raw
            )  
            signature["publicKey"] = self._public_key.hex()
        elif signature_type in [SignatureType.BTC]:
            priv_key = ec.derive_private_key(int.from_bytes(private_key, "big"), ec.SECP256K1())
            public_key = priv_key.public_key().public_bytes(
                encoding=serialization.Encoding.X962,
                format=serialization.PublicFormat.CompressedPoint
            )
            signature["publicKey"] = public_key.hex()
            signature["btc_address"] = btc_address(public_key)
        elif signature_type == SignatureType.ETH:
            eth_key = eth_keys.PrivateKey(private_key)
            public_key = eth_key.public_key.to_bytes()
            signature["eth_address"] = eth_address(public_key)
        elif signature_type == SignatureType.RSA_SHA256:
            private_key_obj = serialization.load_pem_private_key(private_key, password=None, backend=default_backend())
            if isinstance(private_key_obj, rsa.RSAPrivateKey):
                signature["publicKey"] = private_key_obj.public_key().public_bytes(
                    encoding=serialization.Encoding.PEM,
                    format=serialization.PublicFormat.SubjectPublicKeyInfo
                ).hex()
        elif signature_type == SignatureType.ECDSA_SHA256:
            priv_key = ec.derive_private_key(int.from_bytes(private_key, "big"), ec.SECP256K1())
            signature["publicKey"] = priv_key.public_key().public_bytes(
                encoding=serialization.Encoding.PEM,
                format=serialization.PublicFormat.SubjectPublicKeyInfo
            ).hex()
        else:
            raise ValueError(f"Cannot set the public key for {signature_type}")

    def sign_rcd1(self, private_key: bytes, message: bytes) -> dict:
        private_key_obj = ed25519.Ed25519PrivateKey.from_private_bytes(private_key)
        hashed_message = hashlib.sha256(message).digest()
        signature = private_key_obj.sign(hashed_message)
        return {"signature": signature.hex()}

    def verify_rcd1(self, public_key: bytes, signature: bytes, message: bytes) -> bool:
        try:
            vk = ed25519.Ed25519PublicKey.from_public_bytes(public_key)
            hashed_message = hashlib.sha256(message).digest()
            vk.verify(signature, hashed_message)
            return True
        except Exception:
            return False

    @staticmethod
    def sha256_concat(*data: bytes) -> bytes:
        return hashlib.sha256(b"".join(data)).digest()

    @staticmethod
    def encode_varint(value: int) -> bytes:
        if value < 0:
            raise ValueError("Varint encoding requires a non-negative integer.")
        encoded_bytes = bytearray()
        while value >= 0x80:
            encoded_bytes.append((value & 0x7F) | 0x80)
            value >>= 7
        encoded_bytes.append(value)
        return bytes(encoded_bytes)

    @staticmethod
    def calculate_metadata_hash(public_key: bytes, timestamp: int, signer: str, version: int, signature_type: int) -> bytes:
        """
        Compute Accumulate's signature metadata (initiator) hash using uvarint encoding for all fields.
        This exactly mirrors the Dart implementation.

        Fields:
        - Field 1: Signature type (varint-encoded)
        - Field 2: Public key (varint-encoded length + raw bytes)
        - Field 4: Signer URL (UTF-8 encoded, with varint length prefix)
        - Field 5: Signer version (varint-encoded)
        - Field 6: Timestamp (varint-encoded)
        """
        from accumulate.utils.encoding import encode_uvarint

        logger.debug(f" calculate_metadata_hash: {public_key.hex()}")
        logger.debug(f" calculate_metadata_hash: {timestamp}")
        logger.debug(f" calculate_metadata_hash: {signer}")
        logger.debug(f" calculate_metadata_hash: {version}")
        logger.debug(f" calculate_metadata_hash: {signature_type}")

        # Encode the signer URL into bytes.
        signer_bytes = signer.encode("utf-8")

        # Build the metadata binary by concatenating each field in order.
        metadata = b"".join([
            b"\x01" + encode_uvarint(signature_type),                   # Field 1: Signature type
            b"\x02" + encode_uvarint(len(public_key)) + public_key,         # Field 2: Public key
            b"\x04" + encode_uvarint(len(signer_bytes)) + signer_bytes,       # Field 4: Signer URL
            b"\x05" + encode_uvarint(version),                              # Field 5: Signer version
            b"\x06" + encode_uvarint(timestamp)                             # Field 6: Timestamp
        ])

        logger.debug(f" Final Metadata Encoding (hex): {metadata.hex()}")

        # Compute the SHA-256 hash of the concatenated metadata.
        metadata_hash = hashlib.sha256(metadata).digest()
        logger.debug(f"Metadata Hash (SHA-256, hex): {metadata_hash.hex()}")
        return metadata_hash


    @staticmethod
    def calculate_signature_hash(signature) -> bytes:
        data = signature.marshal_binary()
        return Signer.sha256_concat(data)

    @staticmethod
    def is_parent_of(parent: URL, child: URL) -> bool:
        return str(child).startswith(str(parent))


    async def sign_and_submit_transaction(
        self, client: "AccumulateClient", txn: Transaction, signature_type: SignatureType, debug: bool = False
    ) -> dict:
        """
        Signs the transaction, constructs the envelope, and submits it.

        :param client: AccumulateClient instance
        :param txn: Transaction object
        :param signature_type: Type of signature (e.g., ED25519)
        :param debug: If True, print the exact JSON request without sending it.
        :return: Response from the Accumulate network or printed JSON in debug mode.
        """
        #  Step 1: Check if this is a Remote Transaction
        if isinstance(txn.body, RemoteTransaction):
            logger.info(" RemoteTransaction detected, checking multisignature conditions...")

            #  If it does NOT support multisignature, prevent signing
            if not txn.body.allows_multisig():
                raise ValueError("Cannot sign a RemoteTransaction that does not allow multisignatures.")

            #  Otherwise, allow signing but ensure it's an additional signature
            existing_signatures = txn.body.get_existing_signatures()
            if self.get_public_key() in existing_signatures:
                raise ValueError("This signer has already signed this RemoteTransaction.")

            logger.info(f" Proceeding with multisignature signing for RemoteTransaction.")

        #  Step 2: Check if the signer has keys set
        if not self._private_key or not self._public_key:
            raise ValueError("Signer keys not set. Call set_keys() first.")

        logger.info(" Computing transaction hash...")
        txn_hash = txn.get_hash()

        logger.info(f" Signing transaction (hash: {txn_hash.hex()})...")
        signature_data = await self.sign_transaction(signature_type, txn_hash, txn.header)

        logger.info(f" Constructing envelope...")
        envelope = {
            "signatures": [signature_data],
            "transaction": [txn.to_dict()]
        }

        if debug:
            formatted_json = json.dumps(envelope, indent=4)
            logger.info(f" RPC Request (Not Sent):\n{formatted_json}")
            return envelope  # Return the request without sending

        logger.info(f" Submitting transaction to the Accumulate network...")
        try:
            json_envelope = json.dumps(envelope)
            response = await client.submit(json.loads(json_envelope))
            logger.info(f" Transaction Submitted Successfully! Response: {response}")
            return response
        except Exception as e:
            logger.error(f" Transaction Submission Failed: {e}")
            raise

calculate_metadata_hash(public_key, timestamp, signer, version, signature_type) staticmethod

Compute Accumulate's signature metadata (initiator) hash using uvarint encoding for all fields. This exactly mirrors the Dart implementation.

Fields: - Field 1: Signature type (varint-encoded) - Field 2: Public key (varint-encoded length + raw bytes) - Field 4: Signer URL (UTF-8 encoded, with varint length prefix) - Field 5: Signer version (varint-encoded) - Field 6: Timestamp (varint-encoded)

Source code in accumulate\signing\signer.py
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
@staticmethod
def calculate_metadata_hash(public_key: bytes, timestamp: int, signer: str, version: int, signature_type: int) -> bytes:
    """
    Compute Accumulate's signature metadata (initiator) hash using uvarint encoding for all fields.
    This exactly mirrors the Dart implementation.

    Fields:
    - Field 1: Signature type (varint-encoded)
    - Field 2: Public key (varint-encoded length + raw bytes)
    - Field 4: Signer URL (UTF-8 encoded, with varint length prefix)
    - Field 5: Signer version (varint-encoded)
    - Field 6: Timestamp (varint-encoded)
    """
    from accumulate.utils.encoding import encode_uvarint

    logger.debug(f" calculate_metadata_hash: {public_key.hex()}")
    logger.debug(f" calculate_metadata_hash: {timestamp}")
    logger.debug(f" calculate_metadata_hash: {signer}")
    logger.debug(f" calculate_metadata_hash: {version}")
    logger.debug(f" calculate_metadata_hash: {signature_type}")

    # Encode the signer URL into bytes.
    signer_bytes = signer.encode("utf-8")

    # Build the metadata binary by concatenating each field in order.
    metadata = b"".join([
        b"\x01" + encode_uvarint(signature_type),                   # Field 1: Signature type
        b"\x02" + encode_uvarint(len(public_key)) + public_key,         # Field 2: Public key
        b"\x04" + encode_uvarint(len(signer_bytes)) + signer_bytes,       # Field 4: Signer URL
        b"\x05" + encode_uvarint(version),                              # Field 5: Signer version
        b"\x06" + encode_uvarint(timestamp)                             # Field 6: Timestamp
    ])

    logger.debug(f" Final Metadata Encoding (hex): {metadata.hex()}")

    # Compute the SHA-256 hash of the concatenated metadata.
    metadata_hash = hashlib.sha256(metadata).digest()
    logger.debug(f"Metadata Hash (SHA-256, hex): {metadata_hash.hex()}")
    return metadata_hash

get_signature_type() async

Fetch the appropriate signature type dynamically.

  • If already cached, return it.
  • Otherwise, determine it dynamically.
Source code in accumulate\signing\signer.py
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
async def get_signature_type(self) -> SignatureType:
    """
    Fetch the appropriate signature type dynamically.

    - If already cached, return it.
    - Otherwise, determine it dynamically.
    """
    if self._signature_type:
        return self._signature_type  #  Use cached signature type if available

    #  Implement logic to fetch the actual signature type from signer metadata
    # Currently defaulting to ED25519, but should be dynamically set
    self._signature_type = SignatureType.ED25519  # Replace with dynamic lookup if needed

    logger.info(f" Determined Signature Type: {self._signature_type.name}")
    return self._signature_type

sign_and_submit_transaction(client, txn, signature_type, debug=False) async

Signs the transaction, constructs the envelope, and submits it.

:param client: AccumulateClient instance :param txn: Transaction object :param signature_type: Type of signature (e.g., ED25519) :param debug: If True, print the exact JSON request without sending it. :return: Response from the Accumulate network or printed JSON in debug mode.

Source code in accumulate\signing\signer.py
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
async def sign_and_submit_transaction(
    self, client: "AccumulateClient", txn: Transaction, signature_type: SignatureType, debug: bool = False
) -> dict:
    """
    Signs the transaction, constructs the envelope, and submits it.

    :param client: AccumulateClient instance
    :param txn: Transaction object
    :param signature_type: Type of signature (e.g., ED25519)
    :param debug: If True, print the exact JSON request without sending it.
    :return: Response from the Accumulate network or printed JSON in debug mode.
    """
    #  Step 1: Check if this is a Remote Transaction
    if isinstance(txn.body, RemoteTransaction):
        logger.info(" RemoteTransaction detected, checking multisignature conditions...")

        #  If it does NOT support multisignature, prevent signing
        if not txn.body.allows_multisig():
            raise ValueError("Cannot sign a RemoteTransaction that does not allow multisignatures.")

        #  Otherwise, allow signing but ensure it's an additional signature
        existing_signatures = txn.body.get_existing_signatures()
        if self.get_public_key() in existing_signatures:
            raise ValueError("This signer has already signed this RemoteTransaction.")

        logger.info(f" Proceeding with multisignature signing for RemoteTransaction.")

    #  Step 2: Check if the signer has keys set
    if not self._private_key or not self._public_key:
        raise ValueError("Signer keys not set. Call set_keys() first.")

    logger.info(" Computing transaction hash...")
    txn_hash = txn.get_hash()

    logger.info(f" Signing transaction (hash: {txn_hash.hex()})...")
    signature_data = await self.sign_transaction(signature_type, txn_hash, txn.header)

    logger.info(f" Constructing envelope...")
    envelope = {
        "signatures": [signature_data],
        "transaction": [txn.to_dict()]
    }

    if debug:
        formatted_json = json.dumps(envelope, indent=4)
        logger.info(f" RPC Request (Not Sent):\n{formatted_json}")
        return envelope  # Return the request without sending

    logger.info(f" Submitting transaction to the Accumulate network...")
    try:
        json_envelope = json.dumps(envelope)
        response = await client.submit(json.loads(json_envelope))
        logger.info(f" Transaction Submitted Successfully! Response: {response}")
        return response
    except Exception as e:
        logger.error(f" Transaction Submission Failed: {e}")
        raise

sign_transaction(signature_type, message, txn_header, signer_version=None) async

Signs the transaction using the timestamp from the TransactionHeader.

:param signature_type: The signature type (e.g., ED25519, BTC, etc.). :param message: The transaction hash to sign. :param txn_header: The TransactionHeader instance, containing the timestamp. :param signer_version: The signer's version (if None, uses the default). :return: A signed transaction dictionary.

Source code in accumulate\signing\signer.py
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
async def sign_transaction(
    self, 
    signature_type: SignatureType, 
    message: bytes, 
    txn_header: TransactionHeader,  # Use the TransactionHeader object to access timestamp
    signer_version: Optional[int] = None
) -> dict:
    """
    Signs the transaction using the timestamp from the TransactionHeader.

    :param signature_type: The signature type (e.g., ED25519, BTC, etc.).
    :param message: The transaction hash to sign.
    :param txn_header: The TransactionHeader instance, containing the timestamp.
    :param signer_version: The signer's version (if None, uses the default).
    :return: A signed transaction dictionary.
    """
    if not self._private_key:
        raise ValueError("Private key not set. Call set_keys() first.")
    if not self._public_key:
        raise ValueError("Public key not set. Call set_keys() properly.")

    if signer_version is None:
        signer_version = self._signer_version

    #  Use timestamp from the TransactionHeader
    timestamp = txn_header.timestamp
    if timestamp is None:
        raise ValueError("Transaction header does not have a timestamp!")

    logger.info(f" Signer Using TransactionHeader Timestamp: {timestamp}")

    # Compute metadata (initiator) hash using uvarint encoding.
    metadata_hash = Signer.calculate_metadata_hash(
        self.get_public_key(), timestamp, str(self.url), signer_version, signature_type.value
    )

    logger.info(f" FROM SIGNER - Metadata (Public key) Hash: {self.get_public_key().hex()}")
    logger.info(f" FROM SIGNER - Metadata (timestamp) Hash: {timestamp}")
    logger.info(f" FROM SIGNER - Metadata (signer) Hash: {str(self.url)}")
    logger.info(f" FROM SIGNER - Metadata (signer_version) Hash: {signer_version}")
    logger.info(f" FROM SIGNER - Metadata (signature_type.vale) Hash: {signature_type.value}")

    logger.info(f" FROM SIGNER - Metadata (initiator) Hash: {metadata_hash.hex()}")

    # Here, 'message' is expected to be the final transaction hash as computed in your transaction marshalling
    logger.info(f"Message passed for signing (transaction hash): {message.hex()}")

    # Compute the final signing hash as sha256(metadata_hash + message)
    final_hash = hashlib.sha256(metadata_hash + message).digest()
    logger.info(f"Final Hash for Signing (sha256(metadata_hash + message)): {final_hash.hex()}")

    # Sign using the appropriate algorithm.
    if signature_type == SignatureType.ED25519:
        signature = self._private_key.sign(final_hash)
    elif signature_type == SignatureType.BTC:
        priv_key = ec.derive_private_key(int.from_bytes(self.get_private_key(), "big"), ec.SECP256K1())
        signature = priv_key.sign(final_hash, ec.ECDSA(hashes.SHA256()))
    elif signature_type == SignatureType.ETH:
        eth_key = eth_keys.PrivateKey(self.get_private_key())
        signature = eth_key.sign_msg_hash(final_hash).to_bytes()
    elif signature_type == SignatureType.RSA_SHA256:
        private_key_obj = serialization.load_pem_private_key(self.get_private_key(), password=None, backend=default_backend())
        if isinstance(private_key_obj, rsa.RSAPrivateKey):
            signature = private_key_obj.sign(
                final_hash,
                PKCS1v15(),
                hashes.SHA256(),
            )
        else:
            raise ValueError("Invalid RSA private key")
    elif signature_type == SignatureType.ECDSA_SHA256:
        priv_key = ec.derive_private_key(int.from_bytes(self.get_private_key(), "big"), ec.SECP256K1())
        signature = priv_key.sign(final_hash, ec.ECDSA(hashes.SHA256()))
    else:
        raise ValueError(f"Unsupported signature type: {signature_type}")

    logger.info("sign_transaction() called successfully!")
    logger.info(f"Signature Generated: {signature.hex()}")

    signed_transaction = {
        "type": signature_type.name.lower(),
        "publicKey": self.get_public_key().hex(),
        "signature": signature.hex(),
        "signer": str(self.url),
        "signerVersion": signer_version,
        "timestamp": timestamp,
        "transactionHash": message.hex()  # The original transaction hash passed in
    }

    logger.info(f"Signed Transaction Data - {signed_transaction}")

    return signed_transaction

timestamp

Timestamp

Bases: ABC

Abstract base class for timestamp implementations.

Source code in accumulate\signing\timestamp.py
 7
 8
 9
10
11
12
13
14
15
16
17
18
class Timestamp(ABC):
    """Abstract base class for timestamp implementations."""

    @abstractmethod
    def get(self) -> int:
        """
        Retrieve the current timestamp value.

        Returns:
            int: The current timestamp value.
        """
        pass

get() abstractmethod

Retrieve the current timestamp value.

Returns:

Name Type Description
int int

The current timestamp value.

Source code in accumulate\signing\timestamp.py
10
11
12
13
14
15
16
17
18
@abstractmethod
def get(self) -> int:
    """
    Retrieve the current timestamp value.

    Returns:
        int: The current timestamp value.
    """
    pass

TimestampFromValue

Bases: Timestamp

Static timestamp that always returns a predefined value.

Source code in accumulate\signing\timestamp.py
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
class TimestampFromValue(Timestamp):
    """Static timestamp that always returns a predefined value."""

    def __init__(self, value: int):
        if value < 0:
            raise ValueError("Timestamp value must be non-negative")
        self._value = value

    def get(self) -> int:
        """
        Retrieve the static timestamp value.

        Returns:
            int: The predefined timestamp value.
        """
        return self._value

get()

Retrieve the static timestamp value.

Returns:

Name Type Description
int int

The predefined timestamp value.

Source code in accumulate\signing\timestamp.py
28
29
30
31
32
33
34
35
def get(self) -> int:
    """
    Retrieve the static timestamp value.

    Returns:
        int: The predefined timestamp value.
    """
    return self._value

TimestampFromVariable

Bases: Timestamp

Dynamic timestamp that starts with real time (in milliseconds) and increments.

Source code in accumulate\signing\timestamp.py
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
class TimestampFromVariable(Timestamp):
    """Dynamic timestamp that starts with real time (in milliseconds) and increments."""

    def __init__(self, initial_value: int = None):
        if initial_value is None:
            initial_value = int(time.time() * 1e3)  # ⬅️ Now using milliseconds
        if initial_value < 0:
            raise ValueError("Initial timestamp value must be non-negative")
        self._value = initial_value
        self._lock = threading.Lock()

    def get(self) -> int:
        """Atomically increment and retrieve the timestamp value."""
        with self._lock:
            self._value += 1  # ⬅️ Increment by 1 millisecond
            return self._value

    def reset(self, value: int = 0):
        """
        Reset the timestamp to a specified value (primarily for testing).

        Args:
            value (int): The value to reset the timestamp to. Must be non-negative.

        Raises:
            ValueError: If the provided value is negative.
        """
        if value < 0:
            raise ValueError("Reset value must be non-negative")
        with self._lock:
            self._value = value

get()

Atomically increment and retrieve the timestamp value.

Source code in accumulate\signing\timestamp.py
48
49
50
51
52
def get(self) -> int:
    """Atomically increment and retrieve the timestamp value."""
    with self._lock:
        self._value += 1  # ⬅️ Increment by 1 millisecond
        return self._value

reset(value=0)

Reset the timestamp to a specified value (primarily for testing).

Parameters:

Name Type Description Default
value int

The value to reset the timestamp to. Must be non-negative.

0

Raises:

Type Description
ValueError

If the provided value is negative.

Source code in accumulate\signing\timestamp.py
54
55
56
57
58
59
60
61
62
63
64
65
66
67
def reset(self, value: int = 0):
    """
    Reset the timestamp to a specified value (primarily for testing).

    Args:
        value (int): The value to reset the timestamp to. Must be non-negative.

    Raises:
        ValueError: If the provided value is negative.
    """
    if value < 0:
        raise ValueError("Reset value must be non-negative")
    with self._lock:
        self._value = value