profile
viewpoint

fmfi-compbio/deepnano-blitz 41

Very fast ONT basecaller

cardano-foundation/ledger-app-cardano 7

Cardano Ledger App for Ledger Nano S

cardano-foundation/ledgerjs-hw-app-cardano 3

Cardano bindings for Ledger Nano S JavaScript APIs for Node.js and browsers

ederlf/CapFlow 2

A captive portal for the SDNHub Hackaton

fmfi-compbio/nadavca 0

NAnopore DAta Variant CAller

ppershing/cgm-remote-monitor 0

nightscout web monitor

ppershing/kompilatory 0

Automatically exported from code.google.com/p/kompilatory

ppershing/krypto2 0

Automatically exported from code.google.com/p/krypto2

Pull request review commentvacuumlabs/cardano-hw-cli

Tx signing

+import { isArrayOfType } from '../guards'+import {+  TxInput,+  TxOutput,+  TxStakingKeyRegistrationCert,+  TxStakingKeyDeregistrationCert,+  TxDelegationCert,+  TxCertificateKeys,+  TxStakepoolRegistrationCert,+  TxMultiHostNameRelay,+  TxSingleHostIPRelay,+  TxSingleHostNameRelay,+  TxRelayTypes,+} from './types'++export const isTxInput = (+  test: any,+): test is TxInput => test.length === 2+  && Buffer.isBuffer(test[0])+  && Number.isInteger(test[1])++export const isTxOutput = (+  test: any,+): test is TxOutput => test.length === 2+  && Buffer.isBuffer(test[0])+  && Number.isInteger(test[1])++export const isWithdrawalsMap = (+  test: any,+): test is Map<Buffer, number> => test instanceof Map+  && Array.from(test.keys()).every((value) => Buffer.isBuffer(value))+  && Array.from(test.values()).every((value) => Number.isInteger(value))++export const isTxStakingKeyRegistrationCert = (+  test: any,+): test is TxStakingKeyRegistrationCert => Array.isArray(test)+  && test.length === 2+  && test[0] === TxCertificateKeys.STAKING_KEY_REGISTRATION+  && Array.isArray(test[1])+  && test[1].length === 2+  && Number.isInteger(test[1][0])+  && Buffer.isBuffer(test[1][1])++export const isStakingKeyDeregistrationCert = (+  test: any,+): test is TxStakingKeyDeregistrationCert => Array.isArray(test)+  && test.length === 2+  && test[0] === TxCertificateKeys.STAKING_KEY_DEREGISTRATION+  && Array.isArray(test[1])+  && test[1].length === 2+  && Number.isInteger(test[1][0])+  && Buffer.isBuffer(test[1][1])++export const isDelegationCert = (+  test: any,+): test is TxDelegationCert => Array.isArray(test)+  && test.length === 3+  && test[0] === TxCertificateKeys.DELEGATION+  && Array.isArray(test[1])+  && test[1].length === 2+  && Number.isInteger(test[1][0])+  && Buffer.isBuffer(test[1][1])+  && Buffer.isBuffer(test[2])++export const isTxSingleHostIPRelay = (+  test: any,+): test is TxSingleHostIPRelay => Array.isArray(test)+  && test.length <= 4+  && test[0] === TxRelayTypes.SINGLE_HOST_IP+  && (test[1] === null || Number.isInteger(test[1]))+  && (test[2] === null || Buffer.isBuffer(test[2]))+  && (test[3] === null || Buffer.isBuffer(test[3]))++export const isTxSingleHostNameRelay = (+  test: any,+): test is TxSingleHostNameRelay => Array.isArray(test)+  && test.length === 3+  && test[0] === TxRelayTypes.SINGLE_HOST_NAME+  && Number.isInteger(test[1])+  && typeof test[2] === 'string'++export const isTxMultiHostNameRelay = (+  test: any,+): test is TxMultiHostNameRelay => Array.isArray(test)+  && test.length === 2+  && test[0] === TxRelayTypes.MULTI_HOST_NAME+  && typeof test[1] === 'string'++const isMargin = (test: any) => typeof test === 'object'+  && 'value' in test+  && 0 in test.value+  && Number.isInteger(test.value[0])+  && 1 in test.value+  && Number.isInteger(test.value[1])++const isMetaData = (test: any) => Array.isArray(test)+  && test.length === 2+  && typeof test[0] === 'string'+  && Buffer.isBuffer(test[1])++export const isStakepoolRegistrationCert = (+  test: any,+): test is TxStakepoolRegistrationCert => Array.isArray(test)+  && test.length === 10

I think, we should turn this into full function for readability:

fn(value) {
  if (!Array.isArray(value) || value.length != 10) return false;
 const [a,b,c,d,x,y,z] = value
 return condition(a) && condition(b) && condition(c) ...
}
PeterBenc

comment created time in a day

PullRequestReviewEvent

Pull request review commentvacuumlabs/ledger-app-cardano-shelley

stake pool registration

 void ui_displayNetworkParamsScreen( 	        callback 	); }++void ui_displayHexBufferScreen(+        const char* screenHeader,+        const uint8_t* buffer, size_t bufferSize,+        ui_callback_fn_t callback+)+{+	ASSERT(strlen(screenHeader) > 0);+	ASSERT(strlen(screenHeader) < BUFFER_SIZE_PARANOIA);+	ASSERT(bufferSize > 0);+	ASSERT(bufferSize <= 32); // this is used for hashes, all are <= 32 bytes++	char bufferHex[2 * 32 + 1];+	explicit_bzero(bufferHex, SIZEOF(bufferHex));++	size_t length = encode_hex(+	                        buffer, bufferSize,+	                        bufferHex, SIZEOF(bufferHex)+	                );+	ASSERT(length == strlen(bufferHex));+	ASSERT(length == 2 * bufferSize);++	ui_displayPaginatedText(+	        screenHeader,+	        bufferHex,+	        callback+	);+}++void ui_displayMarginScreen(+        uint64_t marginNumerator, uint64_t marginDenominator,+        ui_callback_fn_t callback+)+{+	TRACE("%d %d", marginNumerator, marginDenominator);+	TRACE_BUFFER((uint8_t *) &marginNumerator, 8);+	TRACE_BUFFER((uint8_t *) &marginDenominator, 8);++	ASSERT(marginDenominator != 0);+	ASSERT(marginNumerator <= marginDenominator);+	ASSERT(marginDenominator <= MARGIN_DENOMINATOR_MAX);++	char marginStr[20];+	explicit_bzero(marginStr, SIZEOF(marginStr));++	{+		// marginPercentage is a multiple of 1/100th of 1%, i.e. the fractional part of the percentage has two digits+		// adding marginDenominator / 2 to have a rounded result+		uint64_t marginPercentage = (10000 * marginNumerator + (marginDenominator / 2)) / marginDenominator;+		ASSERT(marginPercentage <= 10000);++		unsigned int percentage = (unsigned int) marginPercentage;++		snprintf(marginStr, SIZEOF(marginStr), "%u.%u %%", percentage / 100, percentage % 100);+		ASSERT(strlen(marginStr) < SIZEOF(marginStr) - 1);+	}++	TRACE("%s", marginStr);++	ui_displayPaginatedText(+	        "Profit margin",+	        marginStr,+	        callback+	);+}++void ui_displayOwnerScreen(+        const pool_owner_t* owner, uint8_t networkId,+        ui_callback_fn_t callback+)+{+	{+		// assert inputs+		ASSERT(isValidNetworkId(networkId));++		switch (owner->ownerType) {++		case SIGN_TX_POOL_OWNER_TYPE_KEY_HASH:+			ASSERT(SIZEOF(owner->keyHash) == ADDRESS_KEY_HASH_LENGTH);+			break;++		case SIGN_TX_POOL_OWNER_TYPE_PATH:+			ASSERT(bip44_isValidStakingKeyPath(&owner->path));+			break;++		default:+			ASSERT(false);+		}+	}++	// we display the owner as bech32-encoded reward address for his staking key+	uint8_t rewardAddress[1 + ADDRESS_KEY_HASH_LENGTH];+	{+		if (owner->ownerType == SIGN_TX_POOL_OWNER_TYPE_PATH) {+			addressParams_t rewardAddressParams = {+				.type = REWARD,+				.networkId = networkId,+				.spendingKeyPath = owner->path,+				.stakingChoice = NO_STAKING,+			};++			deriveAddress(+			        &rewardAddressParams,+			        rewardAddress,+			        SIZEOF(rewardAddress)+			);+		} else {+			constructRewardAddress(+			        networkId,+			        owner->keyHash, SIZEOF(owner->keyHash),+			        rewardAddress, SIZEOF(rewardAddress)+			);+		}+	}++	char ownerDescription[MAX_PATH_LENGTH + MAX_HUMAN_ADDRESS_SIZE + 1];+	explicit_bzero(ownerDescription, SIZEOF(ownerDescription));++	if (owner->ownerType == SIGN_TX_POOL_OWNER_TYPE_PATH) {+		bip44_printToStr(&owner->path, ownerDescription, SIZEOF(ownerDescription));+	}++	{+		// add owner (represented as bech32-encoded reward account for owner's staking key)+		size_t pathLength = strlen(ownerDescription);+		ASSERT(pathLength <= MAX_PATH_LENGTH);+		ASSERT(pathLength <= SIZEOF(ownerDescription) - 2);++		if (pathLength > 0) {+			// add a space+			ownerDescription[pathLength++] = ' ';+			ASSERT(pathLength <= SIZEOF(ownerDescription) - 1);+			ownerDescription[pathLength] = '\0';+			ASSERT(pathLength == strlen(ownerDescription));+		}++		size_t ownerAccountLength = humanReadableAddress(

I think I would prefer style strLen += humanRe...(), that way you can perform multiple appends with

len += strAppend(str+len, ..., )
len += strAppend(...., )
janmazak

comment created time in 2 days

PullRequestReviewEvent

Pull request review commentvacuumlabs/ledger-app-cardano-shelley

stake pool registration

 void ui_displayNetworkParamsScreen( 	        callback 	); }++void ui_displayHexBufferScreen(+        const char* screenHeader,+        const uint8_t* buffer, size_t bufferSize,+        ui_callback_fn_t callback+)+{+	ASSERT(strlen(screenHeader) > 0);+	ASSERT(strlen(screenHeader) < BUFFER_SIZE_PARANOIA);+	ASSERT(bufferSize > 0);+	ASSERT(bufferSize <= 32); // this is used for hashes, all are <= 32 bytes++	char bufferHex[2 * 32 + 1];+	explicit_bzero(bufferHex, SIZEOF(bufferHex));++	size_t length = encode_hex(+	                        buffer, bufferSize,+	                        bufferHex, SIZEOF(bufferHex)+	                );+	ASSERT(length == strlen(bufferHex));+	ASSERT(length == 2 * bufferSize);++	ui_displayPaginatedText(+	        screenHeader,+	        bufferHex,+	        callback+	);+}++void ui_displayMarginScreen(+        uint64_t marginNumerator, uint64_t marginDenominator,+        ui_callback_fn_t callback+)+{+	TRACE("%d %d", marginNumerator, marginDenominator);+	TRACE_BUFFER((uint8_t *) &marginNumerator, 8);+	TRACE_BUFFER((uint8_t *) &marginDenominator, 8);++	ASSERT(marginDenominator != 0);+	ASSERT(marginNumerator <= marginDenominator);+	ASSERT(marginDenominator <= MARGIN_DENOMINATOR_MAX);++	char marginStr[20];+	explicit_bzero(marginStr, SIZEOF(marginStr));++	{+		// marginPercentage is a multiple of 1/100th of 1%, i.e. the fractional part of the percentage has two digits+		// adding marginDenominator / 2 to have a rounded result+		uint64_t marginPercentage = (10000 * marginNumerator + (marginDenominator / 2)) / marginDenominator;+		ASSERT(marginPercentage <= 10000);++		unsigned int percentage = (unsigned int) marginPercentage;++		snprintf(marginStr, SIZEOF(marginStr), "%u.%u %%", percentage / 100, percentage % 100);+		ASSERT(strlen(marginStr) < SIZEOF(marginStr) - 1);+	}++	TRACE("%s", marginStr);++	ui_displayPaginatedText(+	        "Profit margin",+	        marginStr,+	        callback+	);+}++void ui_displayOwnerScreen(+        const pool_owner_t* owner, uint8_t networkId,+        ui_callback_fn_t callback+)+{+	{+		// assert inputs+		ASSERT(isValidNetworkId(networkId));++		switch (owner->ownerType) {++		case SIGN_TX_POOL_OWNER_TYPE_KEY_HASH:+			ASSERT(SIZEOF(owner->keyHash) == ADDRESS_KEY_HASH_LENGTH);+			break;++		case SIGN_TX_POOL_OWNER_TYPE_PATH:+			ASSERT(bip44_isValidStakingKeyPath(&owner->path));+			break;++		default:+			ASSERT(false);+		}+	}++	// we display the owner as bech32-encoded reward address for his staking key+	uint8_t rewardAddress[1 + ADDRESS_KEY_HASH_LENGTH];+	{+		if (owner->ownerType == SIGN_TX_POOL_OWNER_TYPE_PATH) {+			addressParams_t rewardAddressParams = {+				.type = REWARD,+				.networkId = networkId,+				.spendingKeyPath = owner->path,+				.stakingChoice = NO_STAKING,+			};++			deriveAddress(+			        &rewardAddressParams,+			        rewardAddress,+			        SIZEOF(rewardAddress)+			);+		} else {+			constructRewardAddress(+			        networkId,+			        owner->keyHash, SIZEOF(owner->keyHash),+			        rewardAddress, SIZEOF(rewardAddress)+			);+		}+	}++	char ownerDescription[MAX_PATH_LENGTH + MAX_HUMAN_ADDRESS_SIZE + 1];+	explicit_bzero(ownerDescription, SIZEOF(ownerDescription));++	if (owner->ownerType == SIGN_TX_POOL_OWNER_TYPE_PATH) {+		bip44_printToStr(&owner->path, ownerDescription, SIZEOF(ownerDescription));+	}++	{+		// add owner (represented as bech32-encoded reward account for owner's staking key)+		size_t pathLength = strlen(ownerDescription);+		ASSERT(pathLength <= MAX_PATH_LENGTH);+		ASSERT(pathLength <= SIZEOF(ownerDescription) - 2);

prefer + instead of - as this forces unsigned->signed conversion chaos

janmazak

comment created time in 2 days

PullRequestReviewEvent

Pull request review commentvacuumlabs/ledger-app-cardano-shelley

stake pool registration

 void txHashBuilder_addCertificate_delegation( 	} } -void txHashBuilder_assertCanLeaveCertificates(tx_hash_builder_t* builder)+void txHashBuilder_addPoolRegistrationCertificate(+        tx_hash_builder_t* builder,+        const pool_registration_params_t* params,+        uint16_t numOwners, uint16_t numRelays+)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate: %d", builder->state);++	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES);+	ASSERT(builder->remainingCertificates > 0);+	builder->remainingCertificates--;++	ASSERT(builder->poolCertificateData.remainingOwners == 0);+	builder->poolCertificateData.remainingOwners = numOwners;+	ASSERT(builder->poolCertificateData.remainingRelays == 0);+	builder->poolCertificateData.remainingRelays = numRelays;++	// Array(10)[+	//   Unsigned[3]+	//   Bytes[pool_keyhash]+	//   Bytes[vrf_keyhash]+	//   Unsigned[pledge]+	//   Unsigned[cost]+	//   Tag(30) Array(2)[+	//     Unsigned[marginDenominator]+	//     Unsigned[marginNumerator]+	//   ]+	//   Bytes[rewardAccount]++	// the array is not closed yet, we need to add owners, relays, pool metadata+	{+		const pool_registration_params_t* p = params;+		BUILDER_APPEND_CBOR(CBOR_TYPE_ARRAY, 10);+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, 3);+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_BYTES, SIZEOF(p->operatorHash));+			BUILDER_APPEND_DATA(p->operatorHash, SIZEOF(p->operatorHash));+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_BYTES, SIZEOF(p->vrfKeyHash));+			BUILDER_APPEND_DATA(p->vrfKeyHash, SIZEOF(p->vrfKeyHash));+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, p->pledge);+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, p->cost);+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_TAG, 30);+			BUILDER_APPEND_CBOR(CBOR_TYPE_ARRAY, 2);+			{+				BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, p->marginNumerator);+			}+			{+				BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, p->marginDenominator);+			}+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_BYTES, SIZEOF(p->rewardAccount));+			BUILDER_APPEND_DATA(p->rewardAccount, SIZEOF(p->rewardAccount));+		}+	}++	builder->state = TX_HASH_BUILDER_IN_CERTIFICATES_POOL;+}++void txHashBuilder_addPoolRegistrationCertificate_enterOwners(tx_hash_builder_t* builder)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate_enterOwners: %d", builder->state);++	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL);++	{+		BUILDER_APPEND_CBOR(CBOR_TYPE_ARRAY, builder->poolCertificateData.remainingOwners);+	}++	builder->state = TX_HASH_BUILDER_IN_CERTIFICATES_POOL_OWNERS;+}++void txHashBuilder_addPoolRegistrationCertificate_addOwner(+        tx_hash_builder_t* builder,+        const uint8_t* stakingKeyHash, size_t stakingKeyHashSize+)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate_addOwner: %d", builder->state);++	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL_OWNERS);+	ASSERT(builder->poolCertificateData.remainingOwners > 0);+	builder->poolCertificateData.remainingOwners--;++	// Bytes[poolKeyHash]+	{+		BUILDER_APPEND_CBOR(CBOR_TYPE_BYTES, stakingKeyHashSize);+		BUILDER_APPEND_DATA(stakingKeyHash, stakingKeyHashSize);+	}+}++void txHashBuilder_addPoolRegistrationCertificate_enterRelays(tx_hash_builder_t* builder)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate_enterRelays: %d", builder->state);++	// enter empty owners if none were received+	if (builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL)+		txHashBuilder_addPoolRegistrationCertificate_enterOwners(builder);++	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL_OWNERS);+	ASSERT(builder->poolCertificateData.remainingOwners == 0);++	{+		BUILDER_APPEND_CBOR(CBOR_TYPE_ARRAY, builder->poolCertificateData.remainingRelays);+	}++	builder->state = TX_HASH_BUILDER_IN_CERTIFICATES_POOL_RELAYS;+}++static void addRelay_updateState(tx_hash_builder_t* builder)+{+	switch (builder->state) {+	case TX_HASH_BUILDER_IN_CERTIFICATES_POOL:+	case TX_HASH_BUILDER_IN_CERTIFICATES_POOL_OWNERS:+		txHashBuilder_addPoolRegistrationCertificate_enterRelays(builder);+		break;++	case TX_HASH_BUILDER_IN_CERTIFICATES_POOL_RELAYS:+		break; // we want to be here++	default:+		ASSERT(false);+	}+	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL_RELAYS);+}++void txHashBuilder_addPoolRegistrationCertificate_addRelay0(+        tx_hash_builder_t* builder,+        const uint16_t* port,+        const uint8_t* ipv4, size_t ipv4Size,+        const uint8_t* ipv6, size_t ipv6Size+)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate_addRelay0: %d", builder->state);++	addRelay_updateState(builder);+	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL_RELAYS);+	ASSERT(builder->poolCertificateData.remainingRelays > 0);+	builder->poolCertificateData.remainingRelays--;++	ASSERT(ipv4Size < BUFFER_SIZE_PARANOIA);+	ASSERT(ipv6Size < BUFFER_SIZE_PARANOIA);++	// Array(4)[+	//   Unsigned[0]+	//   Unsigned[port] / Null+	//   Bytes[ipv4] / Null+	//   Bytes[ipv6] / Null+	// ]+	{+		BUILDER_APPEND_CBOR(CBOR_TYPE_ARRAY, 4);+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, 0);+		}+		{+			if (port != NULL) {+				BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, *port);+			} else {+				BUILDER_APPEND_CBOR(CBOR_TYPE_NULL, 0);+			}+		}+		{+			if (ipv4 != NULL) {+				ASSERT(ipv4Size == 4);+				BUILDER_APPEND_CBOR(CBOR_TYPE_BYTES, ipv4Size);+				BUILDER_APPEND_DATA(ipv4, ipv4Size);+			} else {+				BUILDER_APPEND_CBOR(CBOR_TYPE_NULL, 0);+			}+		}+		{+			if (ipv6 != NULL) {+				ASSERT(ipv6Size == 16);+				BUILDER_APPEND_CBOR(CBOR_TYPE_BYTES, ipv6Size);++				// serialized as 4 big-endian uint32+				for (size_t i = 0; i < 4; i++) {+					for (size_t j = 0; j < 4; j++) {+						size_t index = 4 * i + (3 - j);+						ASSERT(index < ipv6Size);++						BUILDER_APPEND_DATA(ipv6 + index, 1);+					}+				}+			} else {+				BUILDER_APPEND_CBOR(CBOR_TYPE_NULL, 0);+			}+		}+	}+}++void txHashBuilder_addPoolRegistrationCertificate_addRelay1(+        tx_hash_builder_t* builder,+        const uint16_t* port,+        const uint8_t* dnsName, size_t dnsNameSize+)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate_addRelay1: %d", builder->state);++	addRelay_updateState(builder);+	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL_RELAYS);+	ASSERT(builder->poolCertificateData.remainingRelays > 0);+	builder->poolCertificateData.remainingRelays--;++	ASSERT(dnsName != NULL);+	ASSERT(dnsNameSize > 0);+	ASSERT(dnsNameSize < BUFFER_SIZE_PARANOIA);++	// Array(3)[+	//   Unsigned[1]+	//   Unsigned[port] / Null+	//   Text[dnsName]+	// ]+	{+		BUILDER_APPEND_CBOR(CBOR_TYPE_ARRAY, 3);+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, 1);+		}+		{+			if (port != NULL) {+				BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, *port);+			} else {+				BUILDER_APPEND_CBOR(CBOR_TYPE_NULL, 0);+			}+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_TEXT, dnsNameSize);+			BUILDER_APPEND_DATA(dnsName, dnsNameSize);+		}+	}+}++void txHashBuilder_addPoolRegistrationCertificate_addRelay2(+        tx_hash_builder_t* builder,+        const uint8_t* dnsName, size_t dnsNameSize+)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate_addRelay2: %d", builder->state);++	addRelay_updateState(builder);+	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL_RELAYS);+	ASSERT(builder->poolCertificateData.remainingRelays > 0);+	builder->poolCertificateData.remainingRelays--;++	ASSERT(dnsName != NULL);+	ASSERT(dnsNameSize > 0);+	ASSERT(dnsNameSize <= 64);++	// Array(2)[+	//   Unsigned[2]+	//   Text[dnsName]+	// ]+	{+		BUILDER_APPEND_CBOR(CBOR_TYPE_ARRAY, 2);+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, 2);+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_TEXT, dnsNameSize);+			BUILDER_APPEND_DATA(dnsName, dnsNameSize);+		}+	}+}++// enter empty owners or relays if none were received+static void addPoolMetadata_updateState(tx_hash_builder_t* builder)+{+	switch (builder->state) {+	case TX_HASH_BUILDER_IN_CERTIFICATES_POOL:

thouldn't there be enter pool owners?

janmazak

comment created time in 2 days

PullRequestReviewEvent

Pull request review commentvacuumlabs/ledger-app-cardano-shelley

stake pool registration

 void txHashBuilder_addCertificate_delegation( 	} } -void txHashBuilder_assertCanLeaveCertificates(tx_hash_builder_t* builder)+void txHashBuilder_addPoolRegistrationCertificate(+        tx_hash_builder_t* builder,+        const pool_registration_params_t* params,+        uint16_t numOwners, uint16_t numRelays+)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate: %d", builder->state);++	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES);+	ASSERT(builder->remainingCertificates > 0);+	builder->remainingCertificates--;++	ASSERT(builder->poolCertificateData.remainingOwners == 0);+	builder->poolCertificateData.remainingOwners = numOwners;+	ASSERT(builder->poolCertificateData.remainingRelays == 0);+	builder->poolCertificateData.remainingRelays = numRelays;++	// Array(10)[+	//   Unsigned[3]+	//   Bytes[pool_keyhash]+	//   Bytes[vrf_keyhash]+	//   Unsigned[pledge]+	//   Unsigned[cost]+	//   Tag(30) Array(2)[+	//     Unsigned[marginDenominator]+	//     Unsigned[marginNumerator]+	//   ]+	//   Bytes[rewardAccount]++	// the array is not closed yet, we need to add owners, relays, pool metadata+	{+		const pool_registration_params_t* p = params;+		BUILDER_APPEND_CBOR(CBOR_TYPE_ARRAY, 10);+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, 3);+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_BYTES, SIZEOF(p->operatorHash));+			BUILDER_APPEND_DATA(p->operatorHash, SIZEOF(p->operatorHash));+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_BYTES, SIZEOF(p->vrfKeyHash));+			BUILDER_APPEND_DATA(p->vrfKeyHash, SIZEOF(p->vrfKeyHash));+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, p->pledge);+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, p->cost);+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_TAG, 30);+			BUILDER_APPEND_CBOR(CBOR_TYPE_ARRAY, 2);+			{+				BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, p->marginNumerator);+			}+			{+				BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, p->marginDenominator);+			}+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_BYTES, SIZEOF(p->rewardAccount));+			BUILDER_APPEND_DATA(p->rewardAccount, SIZEOF(p->rewardAccount));+		}+	}++	builder->state = TX_HASH_BUILDER_IN_CERTIFICATES_POOL;+}++void txHashBuilder_addPoolRegistrationCertificate_enterOwners(tx_hash_builder_t* builder)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate_enterOwners: %d", builder->state);++	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL);++	{+		BUILDER_APPEND_CBOR(CBOR_TYPE_ARRAY, builder->poolCertificateData.remainingOwners);+	}++	builder->state = TX_HASH_BUILDER_IN_CERTIFICATES_POOL_OWNERS;+}++void txHashBuilder_addPoolRegistrationCertificate_addOwner(+        tx_hash_builder_t* builder,+        const uint8_t* stakingKeyHash, size_t stakingKeyHashSize+)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate_addOwner: %d", builder->state);++	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL_OWNERS);+	ASSERT(builder->poolCertificateData.remainingOwners > 0);+	builder->poolCertificateData.remainingOwners--;++	// Bytes[poolKeyHash]+	{+		BUILDER_APPEND_CBOR(CBOR_TYPE_BYTES, stakingKeyHashSize);+		BUILDER_APPEND_DATA(stakingKeyHash, stakingKeyHashSize);+	}+}++void txHashBuilder_addPoolRegistrationCertificate_enterRelays(tx_hash_builder_t* builder)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate_enterRelays: %d", builder->state);++	// enter empty owners if none were received+	if (builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL)+		txHashBuilder_addPoolRegistrationCertificate_enterOwners(builder);++	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL_OWNERS);+	ASSERT(builder->poolCertificateData.remainingOwners == 0);++	{+		BUILDER_APPEND_CBOR(CBOR_TYPE_ARRAY, builder->poolCertificateData.remainingRelays);+	}++	builder->state = TX_HASH_BUILDER_IN_CERTIFICATES_POOL_RELAYS;+}++static void addRelay_updateState(tx_hash_builder_t* builder)+{+	switch (builder->state) {+	case TX_HASH_BUILDER_IN_CERTIFICATES_POOL:+	case TX_HASH_BUILDER_IN_CERTIFICATES_POOL_OWNERS:+		txHashBuilder_addPoolRegistrationCertificate_enterRelays(builder);+		break;++	case TX_HASH_BUILDER_IN_CERTIFICATES_POOL_RELAYS:+		break; // we want to be here++	default:+		ASSERT(false);+	}+	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL_RELAYS);+}++void txHashBuilder_addPoolRegistrationCertificate_addRelay0(+        tx_hash_builder_t* builder,+        const uint16_t* port,+        const uint8_t* ipv4, size_t ipv4Size,+        const uint8_t* ipv6, size_t ipv6Size+)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate_addRelay0: %d", builder->state);++	addRelay_updateState(builder);+	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL_RELAYS);+	ASSERT(builder->poolCertificateData.remainingRelays > 0);+	builder->poolCertificateData.remainingRelays--;++	ASSERT(ipv4Size < BUFFER_SIZE_PARANOIA);+	ASSERT(ipv6Size < BUFFER_SIZE_PARANOIA);++	// Array(4)[+	//   Unsigned[0]+	//   Unsigned[port] / Null+	//   Bytes[ipv4] / Null+	//   Bytes[ipv6] / Null+	// ]+	{+		BUILDER_APPEND_CBOR(CBOR_TYPE_ARRAY, 4);+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, 0);+		}+		{+			if (port != NULL) {+				BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, *port);+			} else {+				BUILDER_APPEND_CBOR(CBOR_TYPE_NULL, 0);+			}+		}+		{+			if (ipv4 != NULL) {+				ASSERT(ipv4Size == 4);+				BUILDER_APPEND_CBOR(CBOR_TYPE_BYTES, ipv4Size);+				BUILDER_APPEND_DATA(ipv4, ipv4Size);+			} else {+				BUILDER_APPEND_CBOR(CBOR_TYPE_NULL, 0);+			}+		}+		{+			if (ipv6 != NULL) {+				ASSERT(ipv6Size == 16);+				BUILDER_APPEND_CBOR(CBOR_TYPE_BYTES, ipv6Size);++				// serialized as 4 big-endian uint32+				for (size_t i = 0; i < 4; i++) {+					for (size_t j = 0; j < 4; j++) {

You should preferably use endian-conversion utilities we have, not this hand-written code

janmazak

comment created time in 2 days

PullRequestReviewEvent

Pull request review commentvacuumlabs/ledger-app-cardano-shelley

stake pool registration

 void txHashBuilder_addCertificate_delegation( 	} } -void txHashBuilder_assertCanLeaveCertificates(tx_hash_builder_t* builder)+void txHashBuilder_addPoolRegistrationCertificate(+        tx_hash_builder_t* builder,+        const pool_registration_params_t* params,+        uint16_t numOwners, uint16_t numRelays+)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate: %d", builder->state);++	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES);+	ASSERT(builder->remainingCertificates > 0);+	builder->remainingCertificates--;++	ASSERT(builder->poolCertificateData.remainingOwners == 0);+	builder->poolCertificateData.remainingOwners = numOwners;+	ASSERT(builder->poolCertificateData.remainingRelays == 0);+	builder->poolCertificateData.remainingRelays = numRelays;++	// Array(10)[+	//   Unsigned[3]+	//   Bytes[pool_keyhash]+	//   Bytes[vrf_keyhash]+	//   Unsigned[pledge]+	//   Unsigned[cost]+	//   Tag(30) Array(2)[+	//     Unsigned[marginDenominator]+	//     Unsigned[marginNumerator]+	//   ]+	//   Bytes[rewardAccount]++	// the array is not closed yet, we need to add owners, relays, pool metadata+	{+		const pool_registration_params_t* p = params;+		BUILDER_APPEND_CBOR(CBOR_TYPE_ARRAY, 10);+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, 3);+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_BYTES, SIZEOF(p->operatorHash));+			BUILDER_APPEND_DATA(p->operatorHash, SIZEOF(p->operatorHash));+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_BYTES, SIZEOF(p->vrfKeyHash));+			BUILDER_APPEND_DATA(p->vrfKeyHash, SIZEOF(p->vrfKeyHash));+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, p->pledge);+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, p->cost);+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_TAG, 30);+			BUILDER_APPEND_CBOR(CBOR_TYPE_ARRAY, 2);+			{+				BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, p->marginNumerator);+			}+			{+				BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, p->marginDenominator);+			}+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_BYTES, SIZEOF(p->rewardAccount));+			BUILDER_APPEND_DATA(p->rewardAccount, SIZEOF(p->rewardAccount));+		}+	}++	builder->state = TX_HASH_BUILDER_IN_CERTIFICATES_POOL;+}++void txHashBuilder_addPoolRegistrationCertificate_enterOwners(tx_hash_builder_t* builder)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate_enterOwners: %d", builder->state);++	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL);++	{+		BUILDER_APPEND_CBOR(CBOR_TYPE_ARRAY, builder->poolCertificateData.remainingOwners);+	}++	builder->state = TX_HASH_BUILDER_IN_CERTIFICATES_POOL_OWNERS;+}++void txHashBuilder_addPoolRegistrationCertificate_addOwner(+        tx_hash_builder_t* builder,+        const uint8_t* stakingKeyHash, size_t stakingKeyHashSize+)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate_addOwner: %d", builder->state);++	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL_OWNERS);+	ASSERT(builder->poolCertificateData.remainingOwners > 0);+	builder->poolCertificateData.remainingOwners--;++	// Bytes[poolKeyHash]+	{+		BUILDER_APPEND_CBOR(CBOR_TYPE_BYTES, stakingKeyHashSize);+		BUILDER_APPEND_DATA(stakingKeyHash, stakingKeyHashSize);+	}+}++void txHashBuilder_addPoolRegistrationCertificate_enterRelays(tx_hash_builder_t* builder)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate_enterRelays: %d", builder->state);++	// enter empty owners if none were received+	if (builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL)+		txHashBuilder_addPoolRegistrationCertificate_enterOwners(builder);++	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL_OWNERS);+	ASSERT(builder->poolCertificateData.remainingOwners == 0);++	{+		BUILDER_APPEND_CBOR(CBOR_TYPE_ARRAY, builder->poolCertificateData.remainingRelays);+	}++	builder->state = TX_HASH_BUILDER_IN_CERTIFICATES_POOL_RELAYS;+}++static void addRelay_updateState(tx_hash_builder_t* builder)+{+	switch (builder->state) {+	case TX_HASH_BUILDER_IN_CERTIFICATES_POOL:+	case TX_HASH_BUILDER_IN_CERTIFICATES_POOL_OWNERS:+		txHashBuilder_addPoolRegistrationCertificate_enterRelays(builder);+		break;++	case TX_HASH_BUILDER_IN_CERTIFICATES_POOL_RELAYS:+		break; // we want to be here++	default:+		ASSERT(false);+	}+	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL_RELAYS);+}++void txHashBuilder_addPoolRegistrationCertificate_addRelay0(+        tx_hash_builder_t* builder,+        const uint16_t* port,+        const uint8_t* ipv4, size_t ipv4Size,+        const uint8_t* ipv6, size_t ipv6Size+)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate_addRelay0: %d", builder->state);++	addRelay_updateState(builder);+	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL_RELAYS);+	ASSERT(builder->poolCertificateData.remainingRelays > 0);+	builder->poolCertificateData.remainingRelays--;++	ASSERT(ipv4Size < BUFFER_SIZE_PARANOIA);

shouldn't we have struct ipv4_t {ip: uint8_t[4]} for this? Similarly for ipv6

janmazak

comment created time in 2 days

PullRequestReviewEvent

Pull request review commentvacuumlabs/ledger-app-cardano-shelley

stake pool registration

 void txHashBuilder_addCertificate_delegation( 	} } -void txHashBuilder_assertCanLeaveCertificates(tx_hash_builder_t* builder)+void txHashBuilder_addPoolRegistrationCertificate(+        tx_hash_builder_t* builder,+        const pool_registration_params_t* params,+        uint16_t numOwners, uint16_t numRelays+)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate: %d", builder->state);++	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES);+	ASSERT(builder->remainingCertificates > 0);+	builder->remainingCertificates--;++	ASSERT(builder->poolCertificateData.remainingOwners == 0);+	builder->poolCertificateData.remainingOwners = numOwners;+	ASSERT(builder->poolCertificateData.remainingRelays == 0);+	builder->poolCertificateData.remainingRelays = numRelays;++	// Array(10)[+	//   Unsigned[3]+	//   Bytes[pool_keyhash]+	//   Bytes[vrf_keyhash]+	//   Unsigned[pledge]+	//   Unsigned[cost]+	//   Tag(30) Array(2)[+	//     Unsigned[marginDenominator]+	//     Unsigned[marginNumerator]+	//   ]+	//   Bytes[rewardAccount]++	// the array is not closed yet, we need to add owners, relays, pool metadata+	{+		const pool_registration_params_t* p = params;+		BUILDER_APPEND_CBOR(CBOR_TYPE_ARRAY, 10);+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, 3);+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_BYTES, SIZEOF(p->operatorHash));+			BUILDER_APPEND_DATA(p->operatorHash, SIZEOF(p->operatorHash));+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_BYTES, SIZEOF(p->vrfKeyHash));+			BUILDER_APPEND_DATA(p->vrfKeyHash, SIZEOF(p->vrfKeyHash));+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, p->pledge);+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, p->cost);+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_TAG, 30);+			BUILDER_APPEND_CBOR(CBOR_TYPE_ARRAY, 2);+			{+				BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, p->marginNumerator);+			}+			{+				BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, p->marginDenominator);+			}+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_BYTES, SIZEOF(p->rewardAccount));+			BUILDER_APPEND_DATA(p->rewardAccount, SIZEOF(p->rewardAccount));+		}+	}++	builder->state = TX_HASH_BUILDER_IN_CERTIFICATES_POOL;+}++void txHashBuilder_addPoolRegistrationCertificate_enterOwners(tx_hash_builder_t* builder)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate_enterOwners: %d", builder->state);++	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL);++	{+		BUILDER_APPEND_CBOR(CBOR_TYPE_ARRAY, builder->poolCertificateData.remainingOwners);+	}++	builder->state = TX_HASH_BUILDER_IN_CERTIFICATES_POOL_OWNERS;+}++void txHashBuilder_addPoolRegistrationCertificate_addOwner(+        tx_hash_builder_t* builder,+        const uint8_t* stakingKeyHash, size_t stakingKeyHashSize+)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate_addOwner: %d", builder->state);++	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL_OWNERS);+	ASSERT(builder->poolCertificateData.remainingOwners > 0);+	builder->poolCertificateData.remainingOwners--;++	// Bytes[poolKeyHash]+	{+		BUILDER_APPEND_CBOR(CBOR_TYPE_BYTES, stakingKeyHashSize);+		BUILDER_APPEND_DATA(stakingKeyHash, stakingKeyHashSize);+	}+}++void txHashBuilder_addPoolRegistrationCertificate_enterRelays(tx_hash_builder_t* builder)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate_enterRelays: %d", builder->state);++	// enter empty owners if none were received+	if (builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL)+		txHashBuilder_addPoolRegistrationCertificate_enterOwners(builder);++	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL_OWNERS);+	ASSERT(builder->poolCertificateData.remainingOwners == 0);++	{+		BUILDER_APPEND_CBOR(CBOR_TYPE_ARRAY, builder->poolCertificateData.remainingRelays);+	}++	builder->state = TX_HASH_BUILDER_IN_CERTIFICATES_POOL_RELAYS;+}++static void addRelay_updateState(tx_hash_builder_t* builder)+{+	switch (builder->state) {+	case TX_HASH_BUILDER_IN_CERTIFICATES_POOL:+	case TX_HASH_BUILDER_IN_CERTIFICATES_POOL_OWNERS:+		txHashBuilder_addPoolRegistrationCertificate_enterRelays(builder);+		break;++	case TX_HASH_BUILDER_IN_CERTIFICATES_POOL_RELAYS:+		break; // we want to be here++	default:+		ASSERT(false);+	}+	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL_RELAYS);+}++void txHashBuilder_addPoolRegistrationCertificate_addRelay0(+        tx_hash_builder_t* builder,+        const uint16_t* port,+        const uint8_t* ipv4, size_t ipv4Size,+        const uint8_t* ipv6, size_t ipv6Size+)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate_addRelay0: %d", builder->state);++	addRelay_updateState(builder);+	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL_RELAYS);

Can relay0, relay1, ... be entered in any order?

janmazak

comment created time in 2 days

PullRequestReviewEvent

Pull request review commentvacuumlabs/ledger-app-cardano-shelley

stake pool registration

 void txHashBuilder_addCertificate_delegation( 	} } -void txHashBuilder_assertCanLeaveCertificates(tx_hash_builder_t* builder)+void txHashBuilder_addPoolRegistrationCertificate(+        tx_hash_builder_t* builder,+        const pool_registration_params_t* params,+        uint16_t numOwners, uint16_t numRelays+)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate: %d", builder->state);++	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES);+	ASSERT(builder->remainingCertificates > 0);+	builder->remainingCertificates--;++	ASSERT(builder->poolCertificateData.remainingOwners == 0);+	builder->poolCertificateData.remainingOwners = numOwners;+	ASSERT(builder->poolCertificateData.remainingRelays == 0);+	builder->poolCertificateData.remainingRelays = numRelays;++	// Array(10)[+	//   Unsigned[3]+	//   Bytes[pool_keyhash]+	//   Bytes[vrf_keyhash]+	//   Unsigned[pledge]+	//   Unsigned[cost]+	//   Tag(30) Array(2)[+	//     Unsigned[marginDenominator]+	//     Unsigned[marginNumerator]+	//   ]+	//   Bytes[rewardAccount]++	// the array is not closed yet, we need to add owners, relays, pool metadata+	{+		const pool_registration_params_t* p = params;+		BUILDER_APPEND_CBOR(CBOR_TYPE_ARRAY, 10);+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, 3);+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_BYTES, SIZEOF(p->operatorHash));+			BUILDER_APPEND_DATA(p->operatorHash, SIZEOF(p->operatorHash));+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_BYTES, SIZEOF(p->vrfKeyHash));+			BUILDER_APPEND_DATA(p->vrfKeyHash, SIZEOF(p->vrfKeyHash));+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, p->pledge);+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, p->cost);+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_TAG, 30);+			BUILDER_APPEND_CBOR(CBOR_TYPE_ARRAY, 2);+			{+				BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, p->marginNumerator);+			}+			{+				BUILDER_APPEND_CBOR(CBOR_TYPE_UNSIGNED, p->marginDenominator);+			}+		}+		{+			BUILDER_APPEND_CBOR(CBOR_TYPE_BYTES, SIZEOF(p->rewardAccount));+			BUILDER_APPEND_DATA(p->rewardAccount, SIZEOF(p->rewardAccount));+		}+	}++	builder->state = TX_HASH_BUILDER_IN_CERTIFICATES_POOL;+}++void txHashBuilder_addPoolRegistrationCertificate_enterOwners(tx_hash_builder_t* builder)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate_enterOwners: %d", builder->state);++	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL);++	{+		BUILDER_APPEND_CBOR(CBOR_TYPE_ARRAY, builder->poolCertificateData.remainingOwners);+	}++	builder->state = TX_HASH_BUILDER_IN_CERTIFICATES_POOL_OWNERS;+}++void txHashBuilder_addPoolRegistrationCertificate_addOwner(+        tx_hash_builder_t* builder,+        const uint8_t* stakingKeyHash, size_t stakingKeyHashSize+)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate_addOwner: %d", builder->state);++	ASSERT(builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL_OWNERS);+	ASSERT(builder->poolCertificateData.remainingOwners > 0);+	builder->poolCertificateData.remainingOwners--;++	// Bytes[poolKeyHash]+	{+		BUILDER_APPEND_CBOR(CBOR_TYPE_BYTES, stakingKeyHashSize);+		BUILDER_APPEND_DATA(stakingKeyHash, stakingKeyHashSize);+	}+}++void txHashBuilder_addPoolRegistrationCertificate_enterRelays(tx_hash_builder_t* builder)+{+	TRACE("txHashBuilder_addPoolRegistrationCertificate_enterRelays: %d", builder->state);++	// enter empty owners if none were received+	if (builder->state == TX_HASH_BUILDER_IN_CERTIFICATES_POOL)+		txHashBuilder_addPoolRegistrationCertificate_enterOwners(builder);

this should assert that there were no owners to start with ...

janmazak

comment created time in 2 days

PullRequestReviewEvent

Pull request review commentvacuumlabs/ledger-app-cardano-shelley

stake pool registration

 typedef enum { 	SIGN_STAGE_FEE = 26, 	SIGN_STAGE_TTL = 27, 	SIGN_STAGE_CERTIFICATES = 28,-	SIGN_STAGE_WITHDRAWALS = 29,-	SIGN_STAGE_METADATA = 30,-	SIGN_STAGE_CONFIRM = 31,-	SIGN_STAGE_WITNESSES = 32,+	SIGN_STAGE_CERTIFICATES_PRC = 29, // pool registration certificate sub-machine

I would just rename it to SIGN_STAGE_CERTIFICATES_POOL

janmazak

comment created time in 2 days

PullRequestReviewEvent

Pull request review commentvacuumlabs/ledger-app-cardano-shelley

stake pool registration

 size_t str_formatTtl(uint64_t ttl, char* out, size_t outSize) 	return strlen(out); } +// returns length of the resulting string size_t str_formatMetadata(const uint8_t* metadataHash, size_t metadataHashSize, char* out, size_t outSize) { 	return encode_hex(metadataHash, metadataHashSize, out, outSize); }++// we only check if it is non-zero ASCII+void str_validateText(const uint8_t* url, size_t urlSize)+{+	ASSERT(urlSize < BUFFER_SIZE_PARANOIA);++	for (size_t i = 0; i < urlSize; i++) {+		VALIDATE(url[i] <= 127, ERR_INVALID_DATA);+		VALIDATE(url[i] > 0, ERR_INVALID_DATA);+	}+}++#ifdef DEVEL+// only used in internal device tests+size_t urlToBuffer(const char* url, uint8_t* buffer, size_t bufferSize)+{+	size_t urlLength = strlen(url);+	ASSERT(urlLength < BUFFER_SIZE_PARANOIA);+	ASSERT(bufferSize < BUFFER_SIZE_PARANOIA);+	ASSERT(bufferSize >= urlLength);++	const char* validChars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-._~:/?#[]@!$&'()*+,;%=";

That's a good question. How do we do validation in real code? Also, looking below, maybe we should have one validateStringContainsOnlyAllowedChars(string, char_string) function

janmazak

comment created time in 2 days

PullRequestReviewEvent

Pull request review commentvacuumlabs/ledger-app-cardano-shelley

stake pool registration

 size_t str_formatTtl(uint64_t ttl, char* out, size_t outSize) 	return strlen(out); } +// returns length of the resulting string size_t str_formatMetadata(const uint8_t* metadataHash, size_t metadataHashSize, char* out, size_t outSize) { 	return encode_hex(metadataHash, metadataHashSize, out, outSize); }++// we only check if it is non-zero ASCII+void str_validateText(const uint8_t* url, size_t urlSize)+{+	ASSERT(urlSize < BUFFER_SIZE_PARANOIA);++	for (size_t i = 0; i < urlSize; i++) {+		VALIDATE(url[i] <= 127, ERR_INVALID_DATA);+		VALIDATE(url[i] > 0, ERR_INVALID_DATA);+	}+}++#ifdef DEVEL+// only used in internal device tests+size_t urlToBuffer(const char* url, uint8_t* buffer, size_t bufferSize)+{+	size_t urlLength = strlen(url);+	ASSERT(urlLength < BUFFER_SIZE_PARANOIA);+	ASSERT(bufferSize < BUFFER_SIZE_PARANOIA);+	ASSERT(bufferSize >= urlLength);++	const char* validChars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-._~:/?#[]@!$&'()*+,;%=";+	size_t validCharsLength = strlen(validChars);+	for (size_t i = 0; i < urlLength; i++) {+		bool valid = false;+		for (size_t j = 0; j < validCharsLength; j++) {+			if (url[i] == validChars[j]) {+				valid = true;+				break;+			}+		}+		if (!valid)+			THROW(ERR_INVALID_DATA);++		buffer[i] = url[i];+	}++	return urlLength;+}++size_t dnsNameToBuffer(const char* dnsName, uint8_t* buffer, size_t bufferSize)+{+	size_t dnsNameLength = strlen(dnsName);+	ASSERT(dnsNameLength < BUFFER_SIZE_PARANOIA);+	ASSERT(bufferSize < BUFFER_SIZE_PARANOIA);+	ASSERT(bufferSize >= dnsNameLength);++	for (size_t i = 0; i < dnsNameLength; i++) {+		if (dnsName[i] > 127 || dnsName[i] == 0)

I would restrict this to more reasonable range. Check ascii table but I dont think anything below 32 should be allowed

janmazak

comment created time in 2 days

PullRequestReviewEvent

Pull request review commentvacuumlabs/cardano-hw-cli

Tx signing

+import { HARDENED_THRESHOLD, NETWORKS } from '../constants'+import NamedError from '../namedError'+import { XPubKey } from '../transaction/transaction'+import { TxCertificateKeys, _Certificate, _TxAux } from '../transaction/types'+import {+  Address,+  BIP32Path,+  HwSigningData,+  Network,+  NetworkIds,+} from '../types'+import { _AddressParameters } from './types'++const {+  getPubKeyBlake2b224Hash,+  AddressTypes,+  base58,+  bech32,+  getAddressType,+  packBootstrapAddress,+  packBaseAddress,+  getShelleyAddressNetworkId,+  packEnterpriseAddress,+  isValidBootstrapAddress,+  isValidShelleyAddress,+  addressToBuffer,+  getBootstrapAddressProtocolMagic,+} = require('cardano-crypto.js')++const isShelleyPath = (path: number[]) => path[0] - HARDENED_THRESHOLD === 1852++const isStakingPath = (path: number[]) => path[3] === 2++const encodeAddress = (address: Buffer): string => {+  const addressType = getAddressType(address)+  if (addressType === AddressTypes.BOOTSTRAP) {+    return base58.encode(address)+  }+  const addressPrefixes: {[key: number]: string} = {+    [AddressTypes.BASE]: 'addr',+    [AddressTypes.POINTER]: 'addr',+    [AddressTypes.ENTERPRISE]: 'addr',+    [AddressTypes.REWARD]: 'stake',+  }+  const isTestnet = getShelleyAddressNetworkId(address) === NetworkIds.TESTNET+  const addressPrefix = `${addressPrefixes[addressType]}${isTestnet ? '_test' : ''}`+  return bech32.encode(addressPrefix, address)+}++const getSigningPath = (+  signingFiles: HwSigningData[], i: number,+): BIP32Path | undefined => {+  if (!signingFiles.length) return undefined+  return signingFiles[i] ? signingFiles[i].path : signingFiles[0].path+}++const filterSigningFiles = (+  signingFiles: HwSigningData[],+): {paymentSigningFiles: HwSigningData[], stakeSigningFiles: HwSigningData[]} => {+  const paymentSigningFiles = signingFiles.filter(+    (signingFile) => signingFile.type === 0,

don't we have enum values for the type?

PeterBenc

comment created time in 4 days

PullRequestReviewEvent

Pull request review commentvacuumlabs/cardano-hw-cli

Tx signing

+import { HARDENED_THRESHOLD, NETWORKS } from '../constants'+import NamedError from '../namedError'+import { XPubKey } from '../transaction/transaction'+import { TxCertificateKeys, _Certificate, _TxAux } from '../transaction/types'+import {+  Address,+  BIP32Path,+  HwSigningData,+  Network,+  NetworkIds,+} from '../types'+import { _AddressParameters } from './types'++const {+  getPubKeyBlake2b224Hash,+  AddressTypes,+  base58,+  bech32,+  getAddressType,+  packBootstrapAddress,+  packBaseAddress,+  getShelleyAddressNetworkId,+  packEnterpriseAddress,+  isValidBootstrapAddress,+  isValidShelleyAddress,+  addressToBuffer,+  getBootstrapAddressProtocolMagic,+} = require('cardano-crypto.js')++const isShelleyPath = (path: number[]) => path[0] - HARDENED_THRESHOLD === 1852++const isStakingPath = (path: number[]) => path[3] === 2++const encodeAddress = (address: Buffer): string => {+  const addressType = getAddressType(address)+  if (addressType === AddressTypes.BOOTSTRAP) {+    return base58.encode(address)+  }+  const addressPrefixes: {[key: number]: string} = {+    [AddressTypes.BASE]: 'addr',+    [AddressTypes.POINTER]: 'addr',+    [AddressTypes.ENTERPRISE]: 'addr',+    [AddressTypes.REWARD]: 'stake',+  }+  const isTestnet = getShelleyAddressNetworkId(address) === NetworkIds.TESTNET+  const addressPrefix = `${addressPrefixes[addressType]}${isTestnet ? '_test' : ''}`+  return bech32.encode(addressPrefix, address)+}++const getSigningPath = (+  signingFiles: HwSigningData[], i: number,+): BIP32Path | undefined => {+  if (!signingFiles.length) return undefined+  return signingFiles[i] ? signingFiles[i].path : signingFiles[0].path+}++const filterSigningFiles = (+  signingFiles: HwSigningData[],+): {paymentSigningFiles: HwSigningData[], stakeSigningFiles: HwSigningData[]} => {+  const paymentSigningFiles = signingFiles.filter(+    (signingFile) => signingFile.type === 0,+  )+  const stakeSigningFiles = signingFiles.filter(+    (signingFile) => signingFile.type === 1,+  )+  return {+    paymentSigningFiles,+    stakeSigningFiles,+  }+}++const findSigningPath = (+  certPubKeyHash: Buffer, stakingSigningFiles: HwSigningData[],+): BIP32Path | undefined => {+  const signingFile = stakingSigningFiles.find((file) => {+    const { pubKey } = XPubKey(file.cborXPubKeyHex)+    const pubKeyHash = getPubKeyBlake2b224Hash(pubKey)+    return !Buffer.compare(pubKeyHash, certPubKeyHash)+  })+  return signingFile?.path+}++const txHasPoolPoolRegistrationCert = (+  certs: _Certificate[],+): boolean => certs.some(+  ({ type }) => type === TxCertificateKeys.STAKEPOOL_REGISTRATION,+)++const validateTx = (+  txAux: _TxAux, paymentSigningFiles: HwSigningData[], stakeSigningFiles: HwSigningData[],+): void => {+  if (!txAux.inputs.length) throw NamedError('MissingInputError')+  if (!txAux.outputs.length) throw NamedError('MissingOutputError')+  if (paymentSigningFiles.length > txAux.inputs.length) {+    throw NamedError('TooManySigningFilesError')+  }+  const requireStakingSigningFile = !!(txAux.certificates.length + txAux.withdrawals.length)+  if (+    requireStakingSigningFile && !stakeSigningFiles.length+  ) throw NamedError('MissingStakingSigningFileError')+}++const validateWitnessing = (+  txAux: _TxAux, signingFiles: HwSigningData[],+): void => {+  const {+    paymentSigningFiles,+    stakeSigningFiles,+  } = filterSigningFiles(signingFiles)+  validateTx(txAux, paymentSigningFiles, stakeSigningFiles)+  if (!txHasPoolPoolRegistrationCert(txAux.certificates)) return++  if (txAux.certificates.length !== 1) throw NamedError('MultipleCertificatesWithPoolRegError')+  if (txAux.withdrawals.length) throw NamedError('WithdrawalIncludedWithPoolRegError')+  if (paymentSigningFiles.length) throw NamedError('PaymentFileInlucedWithPoolRegError')+  if (stakeSigningFiles.length !== 1) throw NamedError('MultipleStakingSigningFilesWithPoolRegError')+}++const validateSigning = (+  txAux: _TxAux, signingFiles: HwSigningData[],+): void => {+  const {+    paymentSigningFiles,+    stakeSigningFiles,+  } = filterSigningFiles(signingFiles)+  if (txHasPoolPoolRegistrationCert(txAux.certificates)) throw NamedError('CantSignTxWithPoolReg')+  validateTx(txAux, paymentSigningFiles, stakeSigningFiles)+  if (!paymentSigningFiles.length) throw NamedError('MissingPaymentSigningFileError')+}++const _packBootStrapAddress = (+  file: HwSigningData, network: Network,+) => {

should we add return types to these helpers?

PeterBenc

comment created time in 4 days

PullRequestReviewEvent

Pull request review commentvacuumlabs/cardano-hw-cli

Tx signing

+import { HARDENED_THRESHOLD, NETWORKS } from '../constants'+import NamedError from '../namedError'+import { XPubKey } from '../transaction/transaction'+import { TxCertificateKeys, _Certificate, _TxAux } from '../transaction/types'+import {+  Address,+  BIP32Path,+  HwSigningData,+  Network,+  NetworkIds,+} from '../types'+import { _AddressParameters } from './types'++const {+  getPubKeyBlake2b224Hash,+  AddressTypes,+  base58,+  bech32,+  getAddressType,+  packBootstrapAddress,+  packBaseAddress,+  getShelleyAddressNetworkId,+  packEnterpriseAddress,+  isValidBootstrapAddress,+  isValidShelleyAddress,+  addressToBuffer,+  getBootstrapAddressProtocolMagic,+} = require('cardano-crypto.js')++const isShelleyPath = (path: number[]) => path[0] - HARDENED_THRESHOLD === 1852++const isStakingPath = (path: number[]) => path[3] === 2++const encodeAddress = (address: Buffer): string => {+  const addressType = getAddressType(address)+  if (addressType === AddressTypes.BOOTSTRAP) {+    return base58.encode(address)+  }+  const addressPrefixes: {[key: number]: string} = {+    [AddressTypes.BASE]: 'addr',+    [AddressTypes.POINTER]: 'addr',+    [AddressTypes.ENTERPRISE]: 'addr',+    [AddressTypes.REWARD]: 'stake',+  }+  const isTestnet = getShelleyAddressNetworkId(address) === NetworkIds.TESTNET+  const addressPrefix = `${addressPrefixes[addressType]}${isTestnet ? '_test' : ''}`+  return bech32.encode(addressPrefix, address)+}++const getSigningPath = (+  signingFiles: HwSigningData[], i: number,+): BIP32Path | undefined => {+  if (!signingFiles.length) return undefined+  return signingFiles[i] ? signingFiles[i].path : signingFiles[0].path+}++const filterSigningFiles = (+  signingFiles: HwSigningData[],+): {paymentSigningFiles: HwSigningData[], stakeSigningFiles: HwSigningData[]} => {+  const paymentSigningFiles = signingFiles.filter(+    (signingFile) => signingFile.type === 0,+  )+  const stakeSigningFiles = signingFiles.filter(+    (signingFile) => signingFile.type === 1,+  )+  return {+    paymentSigningFiles,+    stakeSigningFiles,+  }+}++const findSigningPath = (+  certPubKeyHash: Buffer, stakingSigningFiles: HwSigningData[],+): BIP32Path | undefined => {+  const signingFile = stakingSigningFiles.find((file) => {+    const { pubKey } = XPubKey(file.cborXPubKeyHex)+    const pubKeyHash = getPubKeyBlake2b224Hash(pubKey)+    return !Buffer.compare(pubKeyHash, certPubKeyHash)+  })+  return signingFile?.path+}++const txHasPoolPoolRegistrationCert = (+  certs: _Certificate[],+): boolean => certs.some(+  ({ type }) => type === TxCertificateKeys.STAKEPOOL_REGISTRATION,+)++const validateTx = (+  txAux: _TxAux, paymentSigningFiles: HwSigningData[], stakeSigningFiles: HwSigningData[],+): void => {+  if (!txAux.inputs.length) throw NamedError('MissingInputError')+  if (!txAux.outputs.length) throw NamedError('MissingOutputError')+  if (paymentSigningFiles.length > txAux.inputs.length) {+    throw NamedError('TooManySigningFilesError')+  }+  const requireStakingSigningFile = !!(txAux.certificates.length + txAux.withdrawals.length)+  if (+    requireStakingSigningFile && !stakeSigningFiles.length+  ) throw NamedError('MissingStakingSigningFileError')+}++const validateWitnessing = (+  txAux: _TxAux, signingFiles: HwSigningData[],+): void => {+  const {+    paymentSigningFiles,+    stakeSigningFiles,+  } = filterSigningFiles(signingFiles)+  validateTx(txAux, paymentSigningFiles, stakeSigningFiles)+  if (!txHasPoolPoolRegistrationCert(txAux.certificates)) return++  if (txAux.certificates.length !== 1) throw NamedError('MultipleCertificatesWithPoolRegError')+  if (txAux.withdrawals.length) throw NamedError('WithdrawalIncludedWithPoolRegError')+  if (paymentSigningFiles.length) throw NamedError('PaymentFileInlucedWithPoolRegError')+  if (stakeSigningFiles.length !== 1) throw NamedError('MultipleStakingSigningFilesWithPoolRegError')+}++const validateSigning = (+  txAux: _TxAux, signingFiles: HwSigningData[],+): void => {+  const {+    paymentSigningFiles,+    stakeSigningFiles,+  } = filterSigningFiles(signingFiles)+  if (txHasPoolPoolRegistrationCert(txAux.certificates)) throw NamedError('CantSignTxWithPoolReg')+  validateTx(txAux, paymentSigningFiles, stakeSigningFiles)+  if (!paymentSigningFiles.length) throw NamedError('MissingPaymentSigningFileError')+}++const _packBootStrapAddress = (+  file: HwSigningData, network: Network,+) => {+  const { pubKey, chainCode } = XPubKey(file.cborXPubKeyHex)+  const xPubKey = Buffer.concat([pubKey, chainCode])+  const address: Buffer = packBootstrapAddress(+    file.path,+    xPubKey,+    undefined, // passphrase is undefined for derivation scheme v2+    2, // derivation scheme is always 2 for hw wallets+    network.protocolMagic,+  )+  return {+    address,+    addressType: getAddressType(address),+    paymentPath: file.path,+  }+}++const _packBaseAddress = (+  changeOutputFiles: HwSigningData[], network: Network,+) => {+  const stakePathFile = changeOutputFiles.find(({ path }) => isStakingPath(path))+  const paymentPathFile = changeOutputFiles.find(({ path }) => !isStakingPath(path))+  if (!stakePathFile || !paymentPathFile) return undefined

is undefined reasonable return value?

PeterBenc

comment created time in 4 days

PullRequestReviewEvent

Pull request review commentvacuumlabs/cardano-hw-cli

Tx signing

+import { HARDENED_THRESHOLD, NETWORKS } from '../constants'+import NamedError from '../namedError'+import { XPubKey } from '../transaction/transaction'+import { TxCertificateKeys, _Certificate, _TxAux } from '../transaction/types'+import {+  Address,+  BIP32Path,+  HwSigningData,+  Network,+  NetworkIds,+} from '../types'+import { _AddressParameters } from './types'++const {+  getPubKeyBlake2b224Hash,+  AddressTypes,+  base58,+  bech32,+  getAddressType,+  packBootstrapAddress,+  packBaseAddress,+  getShelleyAddressNetworkId,+  packEnterpriseAddress,+  isValidBootstrapAddress,+  isValidShelleyAddress,+  addressToBuffer,+  getBootstrapAddressProtocolMagic,+} = require('cardano-crypto.js')++const isShelleyPath = (path: number[]) => path[0] - HARDENED_THRESHOLD === 1852++const isStakingPath = (path: number[]) => path[3] === 2++const encodeAddress = (address: Buffer): string => {+  const addressType = getAddressType(address)+  if (addressType === AddressTypes.BOOTSTRAP) {+    return base58.encode(address)+  }+  const addressPrefixes: {[key: number]: string} = {+    [AddressTypes.BASE]: 'addr',+    [AddressTypes.POINTER]: 'addr',+    [AddressTypes.ENTERPRISE]: 'addr',+    [AddressTypes.REWARD]: 'stake',+  }+  const isTestnet = getShelleyAddressNetworkId(address) === NetworkIds.TESTNET+  const addressPrefix = `${addressPrefixes[addressType]}${isTestnet ? '_test' : ''}`+  return bech32.encode(addressPrefix, address)+}++const getSigningPath = (+  signingFiles: HwSigningData[], i: number,+): BIP32Path | undefined => {+  if (!signingFiles.length) return undefined+  return signingFiles[i] ? signingFiles[i].path : signingFiles[0].path+}++const filterSigningFiles = (+  signingFiles: HwSigningData[],+): {paymentSigningFiles: HwSigningData[], stakeSigningFiles: HwSigningData[]} => {+  const paymentSigningFiles = signingFiles.filter(+    (signingFile) => signingFile.type === 0,+  )+  const stakeSigningFiles = signingFiles.filter(+    (signingFile) => signingFile.type === 1,+  )+  return {+    paymentSigningFiles,+    stakeSigningFiles,+  }+}++const findSigningPath = (+  certPubKeyHash: Buffer, stakingSigningFiles: HwSigningData[],+): BIP32Path | undefined => {+  const signingFile = stakingSigningFiles.find((file) => {+    const { pubKey } = XPubKey(file.cborXPubKeyHex)+    const pubKeyHash = getPubKeyBlake2b224Hash(pubKey)+    return !Buffer.compare(pubKeyHash, certPubKeyHash)+  })+  return signingFile?.path+}++const txHasPoolPoolRegistrationCert = (+  certs: _Certificate[],+): boolean => certs.some(+  ({ type }) => type === TxCertificateKeys.STAKEPOOL_REGISTRATION,+)++const validateTx = (+  txAux: _TxAux, paymentSigningFiles: HwSigningData[], stakeSigningFiles: HwSigningData[],+): void => {+  if (!txAux.inputs.length) throw NamedError('MissingInputError')+  if (!txAux.outputs.length) throw NamedError('MissingOutputError')+  if (paymentSigningFiles.length > txAux.inputs.length) {+    throw NamedError('TooManySigningFilesError')+  }+  const requireStakingSigningFile = !!(txAux.certificates.length + txAux.withdrawals.length)+  if (+    requireStakingSigningFile && !stakeSigningFiles.length+  ) throw NamedError('MissingStakingSigningFileError')+}++const validateWitnessing = (+  txAux: _TxAux, signingFiles: HwSigningData[],+): void => {+  const {+    paymentSigningFiles,+    stakeSigningFiles,+  } = filterSigningFiles(signingFiles)+  validateTx(txAux, paymentSigningFiles, stakeSigningFiles)+  if (!txHasPoolPoolRegistrationCert(txAux.certificates)) return++  if (txAux.certificates.length !== 1) throw NamedError('MultipleCertificatesWithPoolRegError')+  if (txAux.withdrawals.length) throw NamedError('WithdrawalIncludedWithPoolRegError')+  if (paymentSigningFiles.length) throw NamedError('PaymentFileInlucedWithPoolRegError')+  if (stakeSigningFiles.length !== 1) throw NamedError('MultipleStakingSigningFilesWithPoolRegError')+}++const validateSigning = (+  txAux: _TxAux, signingFiles: HwSigningData[],+): void => {+  const {+    paymentSigningFiles,+    stakeSigningFiles,+  } = filterSigningFiles(signingFiles)+  if (txHasPoolPoolRegistrationCert(txAux.certificates)) throw NamedError('CantSignTxWithPoolReg')+  validateTx(txAux, paymentSigningFiles, stakeSigningFiles)+  if (!paymentSigningFiles.length) throw NamedError('MissingPaymentSigningFileError')+}++const _packBootStrapAddress = (+  file: HwSigningData, network: Network,+) => {+  const { pubKey, chainCode } = XPubKey(file.cborXPubKeyHex)+  const xPubKey = Buffer.concat([pubKey, chainCode])+  const address: Buffer = packBootstrapAddress(+    file.path,+    xPubKey,+    undefined, // passphrase is undefined for derivation scheme v2+    2, // derivation scheme is always 2 for hw wallets+    network.protocolMagic,+  )+  return {+    address,+    addressType: getAddressType(address),+    paymentPath: file.path,+  }+}++const _packBaseAddress = (+  changeOutputFiles: HwSigningData[], network: Network,+) => {+  const stakePathFile = changeOutputFiles.find(({ path }) => isStakingPath(path))+  const paymentPathFile = changeOutputFiles.find(({ path }) => !isStakingPath(path))+  if (!stakePathFile || !paymentPathFile) return undefined+  const { pubKey: stakePubKey } = XPubKey(stakePathFile.cborXPubKeyHex)+  const { pubKey: paymentPubKey } = XPubKey(paymentPathFile.cborXPubKeyHex)+  const address: Buffer = packBaseAddress(+    getPubKeyBlake2b224Hash(paymentPubKey),+    getPubKeyBlake2b224Hash(stakePubKey),+    network.networkId,+  )+  return {+    address,+    addressType: getAddressType(address),+    paymentPath: paymentPathFile.path,+    stakePath: stakePathFile.path,+  }+}++const _packEnterpriseAddress = (+  changeOutputFile: HwSigningData, network: Network,+) => {+  const { pubKey: paymentPubKey } = XPubKey(changeOutputFile.cborXPubKeyHex)+  const address: Buffer = packEnterpriseAddress(+    getPubKeyBlake2b224Hash(paymentPubKey),+    network.networkId,+  )+  return {+    address,+    addressType: getAddressType(address),+    paymentPath: changeOutputFile.path,+  }+}++const getChangeAddress = (+  changeOutputFiles: HwSigningData[],+  outputAddress: Buffer,+  network: Network,+): _AddressParameters | undefined => {+  const addressType = getAddressType(outputAddress)+  try {+    switch (addressType) {+      case AddressTypes.BOOTSTRAP: return _packBootStrapAddress(+        changeOutputFiles[0], network,+      )+      case AddressTypes.BASE: return _packBaseAddress(+        changeOutputFiles, network,

I don't like the assymetry here

PeterBenc

comment created time in 4 days

PullRequestReviewEvent

Pull request review commentvacuumlabs/cardano-hw-cli

Tx signing

+/* eslint-disable no-console */ import { parse } from './command-parser/commandParser' import { CommandExecutor } from './commandExecutor'+import { getErrorTranslation } from './errors'+import NamedError from './namedError' import { CommandType } from './types' -const parsedArgs = parse(process.argv)-CommandExecutor().then(async (commandExecutor: any) => {+const executeCommand = async (): Promise<void> => {+  const { parser, parsedArgs } = parse(process.argv)+  if (!Object.values(CommandType).includes(parsedArgs.command)) {+    parser.print_help()+    return+  }+  const commandExecutor = await CommandExecutor()

why is CommandExecutor async? :thinking:

PeterBenc

comment created time in 4 days

PullRequestReviewEvent

Pull request review commentvacuumlabs/cardano-hw-cli

Tx signing

+import { isArrayOfType } from '../guards'+import {+  TxInput,+  TxOutput,+  TxStakingKeyRegistrationCert,+  TxStakingKeyDeregistrationCert,+  TxDelegationCert,+  TxCertificateKeys,+  TxStakepoolRegistrationCert,+  TxMultiHostNameRelay,+  TxSingleHostIPRelay,+  TxSingleHostNameRelay,+  TxRelayTypes,+} from './types'++export const isTxInput = (+  test: any,+): test is TxInput => test.length === 2+  && Buffer.isBuffer(test[0])+  && Number.isInteger(test[1])++export const isTxOutput = (+  test: any,+): test is TxOutput => test.length === 2+  && Buffer.isBuffer(test[0])+  && Number.isInteger(test[1])++export const isWithdrawalsMap = (+  test: any,+): test is Map<Buffer, number> => test instanceof Map+  && Array.from(test.keys()).every((value) => Buffer.isBuffer(value))+  && Array.from(test.values()).every((value) => Number.isInteger(value))++export const isTxStakingKeyRegistrationCert = (+  test: any,+): test is TxStakingKeyRegistrationCert => Array.isArray(test)+  && test.length === 2+  && test[0] === TxCertificateKeys.STAKING_KEY_REGISTRATION+  && Array.isArray(test[1])+  && test[1].length === 2+  && Number.isInteger(test[1][0])+  && Buffer.isBuffer(test[1][1])++export const isStakingKeyDeregistrationCert = (+  test: any,+): test is TxStakingKeyDeregistrationCert => Array.isArray(test)+  && test.length === 2+  && test[0] === TxCertificateKeys.STAKING_KEY_DEREGISTRATION+  && Array.isArray(test[1])+  && test[1].length === 2+  && Number.isInteger(test[1][0])+  && Buffer.isBuffer(test[1][1])++export const isDelegationCert = (+  test: any,+): test is TxDelegationCert => Array.isArray(test)+  && test.length === 3+  && test[0] === TxCertificateKeys.DELEGATION+  && Array.isArray(test[1])+  && test[1].length === 2+  && Number.isInteger(test[1][0])+  && Buffer.isBuffer(test[1][1])+  && Buffer.isBuffer(test[2])++export const isTxSingleHostIPRelay = (+  test: any,+): test is TxSingleHostIPRelay => Array.isArray(test)+  && test.length <= 4+  && test[0] === TxRelayTypes.SINGLE_HOST_IP+  && (test[1] === null || Number.isInteger(test[1]))+  && (test[2] === null || Buffer.isBuffer(test[2]))+  && (test[3] === null || Buffer.isBuffer(test[3]))++export const isTxSingleHostNameRelay = (+  test: any,+): test is TxSingleHostNameRelay => Array.isArray(test)+  && test.length === 3+  && test[0] === TxRelayTypes.SINGLE_HOST_NAME+  && Number.isInteger(test[1])+  && typeof test[2] === 'string'++export const isTxMultiHostNameRelay = (+  test: any,+): test is TxMultiHostNameRelay => Array.isArray(test)+  && test.length === 2+  && test[0] === TxRelayTypes.MULTI_HOST_NAME+  && typeof test[1] === 'string'++const isMargin = (test: any) => typeof test === 'object'+  && 'value' in test+  && 0 in test.value+  && Number.isInteger(test.value[0])+  && 1 in test.value+  && Number.isInteger(test.value[1])++const isMetaData = (test: any) => Array.isArray(test)+  && test.length === 2+  && typeof test[0] === 'string'+  && Buffer.isBuffer(test[1])++export const isStakepoolRegistrationCert = (

I wonder whether it actually makes sense to have these as guards or it would be better to incorporate them into parseXyz functions. I think I would prefer the second option

PeterBenc

comment created time in 4 days

PullRequestReviewEvent

Pull request review commentvacuumlabs/cardano-hw-cli

Tx signing

+/* eslint-disable max-len */+const errors: {[key: string]: ({ message } : { message?: string }) => string} = {+  TrezorSignTxError: () => 'TrezorSignTxError: Error occured while signing the transaction with Trezor',+  HwTransportNotFoundError: () => 'HwTransportNotFoundError: Error occured while trying to find hw transport, make sure Ledger or Trezor is connected to you computer',+  InvalidPathError: ({ message }) => `InvalidPathError: can not parse path: ${message}`,+  InvalidFileTypeError: ({ message }) => `InvalidFileTypeError: Invalid file type of hw-signing-file at path: ${message}`,+  InvalidHwSigningFileError: ({ message }) => `InvalidHwSigningFileError: Invalid file contents of hw-signing-file at ${message}`,+  InvalidTxBodyFileError: ({ message }) => `InvalidTxBodyFileError: Invalid file contents of tx-body-file at ${message}`,+  TxSerializationMismatchError: () => 'TxSerializationMismatchError: Tx serialization mismatch',+  MissingHwSigningDataAtPathError: ({ message }) => `MissingHwSigningDataAtPathError: Can not find hw signing data with path ${message}`,+  MultipleWitnessesError: () => 'MultipleWitnessesError: Multiple witnesses found',+  UndefinedCommandError: () => 'UndefinedCommandError: command undefined',+  MissingSigningFileError: () => 'MissingSigningFileError: missing signing file',+  UnknownCertificateTypeError: () => 'UnknownCertificateTypeError: unknown certificate type',+  MultipleCertificatesWithPoolRegError: () => 'MultipleCertificatesWithPoolRegError: Multiple pool registration certificates found, expected one',+  WithdrawalIncludedWithPoolRegError: () => 'WithdrawalIncludedWithPoolRegError: Withdrawal certificate and pool registration certificate found, expected one',+  PaymentFileInlucedWithPoolRegError: () => 'PaymentFileInlucedWithPoolRegError: Unexpected payment hardware signing file with pool registration certificate found',+  MultipleStakingSigningFilesWithPoolRegError: () => 'MultipleStakingSigningFilesWithPoolRegError: Multiple staking signing files with pool registration certificate found, expected only one staking signing file',+  MissingPaymentSigningFileError: () => 'MissingPaymentSigningFileError: Missing payment hardware signing file',+  TooManySigningFilesError: () => 'TooManySigningFilesError: Too many signing files',+  MissingStakingSigningFileError: () => 'MissingStakingSigningFileError',+  MissingInputError: () => 'MissingInputError: Missing input',+  MissingOutputError: () => 'MissingOutputError: Missing output',+  TrezorError: () => 'TrezorError: Trezor operation failed, please make sure you are using the latest version of Trezor firmware',+  TxInputParseError: () => 'TxInputParseError: Failed to parse input',+  TxOutputParseError: () => 'TxOutputParseError: Failed to parse output',+  WithrawalsParseError: () => 'WithrawalsParseError: Failed to parse withdrawals',+  TxStakingKeyRegistrationCertParseError: () => 'TxStakingKeyRegistrationCertParseError: Failed to parse staking key registration certificate',+  TxStakingKeyDeregistrationCertParseError: () => 'TxStakingKeyDeregistrationCertParseError: Failed to parse staking key deregistration certificate',+  TxDelegationCertParseError: () => 'TxDelegationCertParseError: Failed to parse delegation certificate',+  TxStakepoolRegistrationCertParseError: () => 'TxStakepoolRegistrationCertParseError: Failed to parse stakepool registration certificate',+  TxSingleHostIPRelayParseError: () => 'TxSingleHostIPRelayParseError: Failed to parse single host IP relay',+  TxSingleHostNameRelayParseError: () => 'TxSingleHostNameRelayParseError: Failed to parse single host name relay',+  TxMultiHostNameRelayParseError: () => 'TxMultiHostNameRelayParseError: Failed to parse multi host name relay',+  MissingSigningFileForCertficateError: () => 'MissingSigningFileForCertficateError: Missing signing file for certficate',+  OwnerMultipleTimesInTxError: () => 'OwnerMultipleTimesInTxError: Owner multiple times in tx',+  UnsupportedRelayTypeError: () => 'UnsupportedRelayTypeError: Unsupported relay type',+  UnknownCertificateError: () => 'UnknownCertificateError: Unknown certificate',+  UnsupportedCertificateTypeError: () => 'UnsupportedCertificateTypeError: Unsupported certificate type',+  MissingSigningFileForWithdrawalError: () => 'MissingSigningFileForWithdrawalError: Missing signing file for withdrawal',+}++const getErrorTranslation = (+  error: Error,+): string => {+  const translation = errors[error.name]+  if (translation !== undefined) {+    return translation(error)+  }++  return 'UknownError: An unkwown error has occured'

The default message isn't very helpful.

PeterBenc

comment created time in 4 days

PullRequestReviewEvent

Pull request review commentvacuumlabs/cardano-hw-cli

Tx signing

+/* eslint-disable max-len */+const errors: {[key: string]: ({ message } : { message?: string }) => string} = {

Cannot named error take enum? It would be way easier to validate that we have correct entries

PeterBenc

comment created time in 4 days

PullRequestReviewEvent

Pull request review commentvacuumlabs/ledger-app-cardano-shelley

[WIP] Support pool registration certificates

 static void signTx_handleCertificate_ui_runStep()  		case CERTIFICATE_TYPE_STAKE_DELEGATION: 			snprintf(title, SIZEOF(title), "Delegate stake to pool");-			encode_hex(-			        ctx->stageData.certificate.poolKeyHash, SIZEOF(ctx->stageData.certificate.poolKeyHash),-			        details, SIZEOF(details)-			);+			size_t length = encode_hex(+			                        ctx->stageData.certificate.poolKeyHash, SIZEOF(ctx->stageData.certificate.poolKeyHash),+			                        details, SIZEOF(details)+			                );+			ASSERT(length == strlen(details));+			ASSERT(length == 2 * SIZEOF(ctx->stageData.certificate.poolKeyHash)); 			break; +		case CERTIFICATE_TYPE_STAKE_POOL_REGISTRATION:+			snprintf(title, SIZEOF(title), "Register");+			snprintf(details, SIZEOF(details), "stake pool");+			UI_STEP_JUMP(HANDLE_CERTIFICATE_STEP_RESPOND);

does this really work this way? I don't remember how exactly was the macro supposed to be used and I don't like we are formatting something without uiDisplay ...

janmazak

comment created time in 16 days

Pull request review commentvacuumlabs/ledger-app-cardano-shelley

[WIP] Support pool registration certificates

 #include "addressUtilsShelley.h" #include "addressUtilsByron.h"-#include "securityPolicy.h" #include "bip44.h"+#include "signTx.h"

why do we need signTx.h here?

janmazak

comment created time in 16 days

Pull request review commentvacuumlabs/ledger-app-cardano-shelley

[WIP] Support pool registration certificates

 security_policy_t policyForSignTxInput()  // For each transaction (third-party) address output security_policy_t policyForSignTxOutputAddress(+        bool includePoolRegistrationCertificate,

this is bad naming. I think we should use something more like "isSigningPoolRegistrationAsOwner"

janmazak

comment created time in 16 days

Pull request review commentvacuumlabs/ledger-app-cardano-shelley

[WIP] Support pool registration certificates

 void txHashBuilder_addCertificate_delegation(         const uint8_t* stakingKeyHash, size_t stakingKeyHashSize,         const uint8_t* poolKeyHash, size_t poolKeyHashSize );+void txHashBuilder_addPoolRegistrationCertificate(+        tx_hash_builder_t* builder,+        const uint8_t* poolKeyHash, size_t poolKeyHashSize,+        const uint8_t* vrfKeyHash, size_t vrfKeyHashSize,+        uint64_t pledge, uint64_t cost,+        uint64_t marginNumerator, uint64_t marginDenominator,+        const uint8_t* rewardAccount, size_t rewardAccountSize,+        uint16_t numOwners, uint16_t numRelays+);+void txHashBuilder_addPoolRegistrationCertificate_enterOwners(tx_hash_builder_t* builder);+void txHashBuilder_addPoolRegistrationCertificate_addOwner(+        tx_hash_builder_t* builder,+        const uint8_t* stakingKeyHash, size_t stakingKeyHashSize+);+void txHashBuilder_addPoolRegistrationCertificate_enterRelays(tx_hash_builder_t* builder);+void txHashBuilder_addPoolRegistrationCertificate_addRelay0(

can you explain what is relay0,1,2 and why they are different?

janmazak

comment created time in 16 days

Pull request review commentvacuumlabs/ledger-app-cardano-shelley

[WIP] Support pool registration certificates

 void ui_displayNetworkParamsScreen( 	        callback 	); }++void ui_displayHexBufferScreen(+        const char* screenHeader,+        const uint8_t* buffer, size_t bufferSize,+        ui_callback_fn_t callback+)+{+	ASSERT(strlen(screenHeader) > 0);+	ASSERT(strlen(screenHeader) < BUFFER_SIZE_PARANOIA);+	ASSERT(bufferSize > 0);+	ASSERT(bufferSize <= 32); // TODO this is used for hashes (nothing else?), all are <= 32 bytes++	char bufferHex[2 * 32 + 1];+	explicit_bzero(bufferHex, SIZEOF(bufferHex));++	size_t length = encode_hex(+	                        buffer, bufferSize,+	                        bufferHex, SIZEOF(bufferHex)+	                );+	ASSERT(length == strlen(bufferHex));+	ASSERT(length == 2 * bufferSize);++	ui_displayPaginatedText(+	        screenHeader,+	        bufferHex,+	        callback+	);+}++void ui_displayMarginScreen(+        uint64_t marginNumerator, uint64_t marginDenominator,+        ui_callback_fn_t callback+)+{+	TRACE("%d %d", marginNumerator, marginDenominator);+	TRACE_BUFFER((uint8_t *) &marginNumerator, 8);+	TRACE_BUFFER((uint8_t *) &marginDenominator, 8);++	ASSERT(marginDenominator != 0);

also assert they are reasonable, uint64_t is way too much, especially if we are going to do *100 below

janmazak

comment created time in 16 days

PullRequestReviewEvent

Pull request review commentvacuumlabs/ledger-app-cardano-shelley

[WIP] Support pool registration certificates

 void txHashBuilder_addCertificate_delegation(         const uint8_t* stakingKeyHash, size_t stakingKeyHashSize,         const uint8_t* poolKeyHash, size_t poolKeyHashSize );+void txHashBuilder_addPoolRegistrationCertificate(+        tx_hash_builder_t* builder,+        const uint8_t* poolKeyHash, size_t poolKeyHashSize,

this signature looks like a nightmare :-(

janmazak

comment created time in 16 days

Pull request review commentvacuumlabs/ledger-app-cardano-shelley

[WIP] Support pool registration certificates

 void ui_displayNetworkParamsScreen( 	        callback 	); }++void ui_displayHexBufferScreen(+        const char* screenHeader,+        const uint8_t* buffer, size_t bufferSize,+        ui_callback_fn_t callback+)+{+	ASSERT(strlen(screenHeader) > 0);+	ASSERT(strlen(screenHeader) < BUFFER_SIZE_PARANOIA);+	ASSERT(bufferSize > 0);+	ASSERT(bufferSize <= 32); // TODO this is used for hashes (nothing else?), all are <= 32 bytes++	char bufferHex[2 * 32 + 1];+	explicit_bzero(bufferHex, SIZEOF(bufferHex));++	size_t length = encode_hex(+	                        buffer, bufferSize,+	                        bufferHex, SIZEOF(bufferHex)+	                );+	ASSERT(length == strlen(bufferHex));+	ASSERT(length == 2 * bufferSize);++	ui_displayPaginatedText(+	        screenHeader,+	        bufferHex,+	        callback+	);+}++void ui_displayMarginScreen(+        uint64_t marginNumerator, uint64_t marginDenominator,+        ui_callback_fn_t callback+)+{+	TRACE("%d %d", marginNumerator, marginDenominator);+	TRACE_BUFFER((uint8_t *) &marginNumerator, 8);+	TRACE_BUFFER((uint8_t *) &marginDenominator, 8);++	ASSERT(marginDenominator != 0);+	ASSERT(marginNumerator <= marginDenominator);++	// TODO what are we supposed to show? does ledger even support %f? it freezes at basically all attempts to print something reasonable which uses double even in calculations+	int marginPercentage = (100 * marginNumerator) / marginDenominator;++	char marginStr[50];

we should do bzero probably also here ...

janmazak

comment created time in 16 days

PullRequestReviewEvent

Pull request review commentvacuumlabs/ledger-app-cardano-shelley

add support for stake pool registration certificates

 size_t str_formatMetadata(const uint8_t* metadataHash, size_t metadataHashSize, { 	return encode_hex(metadataHash, metadataHashSize, out, outSize); }++// TODO improve this+size_t urlToBuffer(const char* url, uint8_t* buffer, size_t bufferSize)+{+	size_t urlLength = strlen(url);+	ASSERT(urlLength < BUFFER_SIZE_PARANOIA);+	ASSERT(bufferSize < BUFFER_SIZE_PARANOIA);+	ASSERT(bufferSize >= urlLength);++	const char* validChars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-._~:/?#[]@!$&'()*+,;%=";+	size_t validCharsLength = strlen(validChars);+	for (size_t i = 0; i < urlLength; i++) {+		bool valid = false;+		for (size_t j = 0; j < validCharsLength; j++) {+			if (url[i] == validChars[j]) {+				valid = true;+				break;+			}+		}+		if (!valid)+			THROW(ERR_INVALID_DATA);++		buffer[i] = url[i];+	}++	return urlLength;+}++// TODO improve this?+size_t dnsNameToBuffer(const char* dnsName, uint8_t* buffer, size_t bufferSize)+{+	size_t dnsNameLength = strlen(dnsName);+	ASSERT(dnsNameLength < BUFFER_SIZE_PARANOIA);+	ASSERT(bufferSize < BUFFER_SIZE_PARANOIA);+	ASSERT(bufferSize >= dnsNameLength);++	for (size_t i = 0; i < dnsNameLength; i++) {+		if (dnsName[i] > 127)

allow only reasonable part of ascii - something between 32-126 (http://www.asciitable.com/). Basically, just validate that we can at least display it

janmazak

comment created time in a month

PullRequestReviewEvent

issue openedvacuumlabs/pay-overview

Feature request: make distinction between vested and non-vested shares

It would be great if

  1. already vested shares would be somehow highlighted in the UI
  2. the share balance calculator separated vested and non-vested shares

created time in a month

issue commentgoogle-coral/edgetpu

USB "Not enough bandwidth" after many runs of python inference

@Namburger finally reliably reproduced with this single model https://www.dropbox.com/s/4gzaxn8k3nrm4oh/poolx3r5_cos_adam_750_001.tflite?dl=0 (my previous reproductions we spread over dozens of models so it was hard to say which one/ if all are responsible)

ppershing

comment created time in 2 months

issue commentgoogle-coral/edgetpu

USB "Not enough bandwidth" after many runs of python inference

@Namburger Unfortunately, this would be very tricky. I am basically running an architecture search so it is very hard to attribute if the problem is in single of my architectures, a subset of them or if all of them are leaking resources in some way. In order to speed up the process -- should I be testing process restarts / model reloads or just running one model over prolonged periods of time is enough?

ppershing

comment created time in 2 months

issue commentgoogle-coral/edgetpu

What are restrictions on ResizeNearestNeighbor?

Just curious - any idea when we can expect next compiler release?

Anyway, I manually edited tflite & downgraded OP version from 3 to 2 + removed half_pixel_centers which seems to be unsupported in v2 of the op. Now the compiler complains about Image-interpolation layer won't run precisely enough on Edge TPU Can you give some insides what are considered precise-enough image interpolations by the compiler?

As for the tf2.2, converter fails on converting resize neares neighbor op. I also tried with bilinear, that one works but still produces too high tflite op version for the edgetpu compiler.

ppershing

comment created time in 2 months

issue commentgoogle-coral/edgetpu

USB "Not enough bandwidth" after many runs of python inference

I tried to reproduce this on usb2 but I failed so far. Note though that I don't have a clear reproduction on usb3 (that is - it happens from time to time but I am not exactly sure how to trigger this reliably. I tried cycling my quick benchmark tool many times in a loop but the reproduction is flaky)

Do you have an idea of what to focus on more? So far I cannot say whether the error is related to a) many times startup of the process/loading of the delegate b) many times running inference/set_tensor in one session

ppershing

comment created time in 2 months

issue commentgoogle-coral/edgetpu

What are restrictions on ResizeNearestNeighbor?

  • Interesting - so the compiler is non-deterministic?
  • I am using tf nightly 2.4.0-dev20200719 for post-trianing quantization
  • RESIZE_NEAREST_NEIGHBOR 5 Operation version not supported basically says that the operation wasn't mapped. What operation "version" is then supported?
ppershing

comment created time in 2 months

issue commentgoogle-coral/edgetpu

USB "Not enough bandwidth" after many runs of python inference

May i ask if you are plugging the Accelerator in a usb2 or usb3 port?

Should be USB3. This is relevant part of lsusb -t

/:  Bus 04.Port 1: Dev 1, Class=root_hub, Driver=xhci_hcd/4p, 5000M
    |__ Port 2: Dev 3, If 0, Class=Application Specific Interface, Driver=, 5000M
ppershing

comment created time in 2 months

issue openedgoogle-coral/edgetpu

USB "Not enough bandwidth" after many runs of python inference

I came across this several times by now.

I am trying to train many different architectures and see their accuracy on edge TPU. For this, I run a simple interence python program multiple times and once in a while I get

Traceback (most recent call last):
  File "/home/ppershing/miniconda3/envs/tf/lib/python3.7/site-packages/tflite_runtime/interpreter.py", line 161, in load_delegate
    delegate = Delegate(library, options)
  File "/home/ppershing/miniconda3/envs/tf/lib/python3.7/site-packages/tflite_runtime/interpreter.py", line 120, in __init__
    raise ValueError(capture.message)
ValueError

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
  File "./benchmark.py", line 59, in <module>
    main()
  File "./benchmark.py", line 36, in main
    interpreter = make_interpreter(args.model)
  File "./benchmark.py", line 23, in make_interpreter
    EDGETPU_SHARED_LIB, {"device": device[0]} if device else {}
  File "/home/ppershing/miniconda3/envs/tf/lib/python3.7/site-packages/tflite_runtime/interpreter.py", line 164, in load_delegate
    library, str(e)))
ValueError: Failed to load delegate from libedgetpu.so.1

with dmesg showing

[1242023.964228] usb 4-2: reset SuperSpeed Gen 1 USB device number 14 using xhci_hcd
[1242023.984440] usb 4-2: LPM exit latency is zeroed, disabling LPM.
[1242689.790359] usb 4-2: reset SuperSpeed Gen 1 USB device number 14 using xhci_hcd
[1242689.810459] usb 4-2: LPM exit latency is zeroed, disabling LPM.
[1287628.532042] usb 4-2: reset SuperSpeed Gen 1 USB device number 14 using xhci_hcd
[1287628.556215] usb 4-2: LPM exit latency is zeroed, disabling LPM.
[1287654.272292] usb 4-2: reset SuperSpeed Gen 1 USB device number 14 using xhci_hcd
[1287654.292544] usb 4-2: LPM exit latency is zeroed, disabling LPM.
[1287940.607902] usb 4-2: reset SuperSpeed Gen 1 USB device number 14 using xhci_hcd
[1287940.632007] usb 4-2: LPM exit latency is zeroed, disabling LPM.
[1287964.823831] usb 4-2: reset SuperSpeed Gen 1 USB device number 14 using xhci_hcd
[1287964.844020] usb 4-2: LPM exit latency is zeroed, disabling LPM.
[1288246.583297] usb 4-2: reset SuperSpeed Gen 1 USB device number 14 using xhci_hcd
[1288246.603426] usb 4-2: LPM exit latency is zeroed, disabling LPM.
[1288287.543248] usb 4-2: reset SuperSpeed Gen 1 USB device number 14 using xhci_hcd
[1288287.563351] usb 4-2: LPM exit latency is zeroed, disabling LPM.
[1288564.278784] usb 4-2: reset SuperSpeed Gen 1 USB device number 14 using xhci_hcd
[1288564.302717] usb 4-2: LPM exit latency is zeroed, disabling LPM.
[1288564.302955] xhci_hcd 0000:00:14.0: Not enough bandwidth
[1288564.303042] usb 4-2: Busted HC?  Not enough HCD resources for old configuration.
[1288564.310315] usb 4-2: USB disconnect, device number 14
[1288564.506335] usb 4-2: new SuperSpeed Gen 1 USB device number 15 using xhci_hcd
[1288564.526753] usb 4-2: LPM exit latency is zeroed, disabling LPM.
[1288564.527080] usb 4-2: New USB device found, idVendor=18d1, idProduct=9302, bcdDevice= 1.00
[1288564.527084] usb 4-2: New USB device strings: Mfr=0, Product=0, SerialNumber=0
[1288564.527436] xhci_hcd 0000:00:14.0: Not enough bandwidth
[1288564.527547] usb 4-2: can't set config #1, error -12

Note that this error cannot be unstuck by simply re-trying or even unplugging the device. I need to reset whole USB hub.

created time in 2 months

issue commentgoogle-coral/edgetpu

What are restrictions on ResizeNearestNeighbor?

@Namburger sure. Here it is: https://www.dropbox.com/s/rpgwp9vv1s6zmak/pool.tflite?dl=0 Note that similar model, without maxpool & resize (and thus bigger intermediate tensors in "non-residual" path) compiles fine. Also, by default tensorflow converter produces half_pixel_centers=true and the model did not work with it. So I exchanged to false after seeing that https://github.com/guichristmann/edge-tpu-tiny-yolo used that setting successfully.

ppershing

comment created time in 2 months

issue openedgoogle-coral/edgetpu

What are restrictions on ResizeNearestNeighbor?

Documantation states that

ResizeNearestNeighbor | All | Input/output is a 3-dimensional tensor. Depending on input/output size, this operation might not be mapped to the Edge TPU to avoid loss in precision.

I have problems with compiler crashing and my suspicion is this operation (though, it could also be MaxPool2D). The interesting question is

  • a) what combination of align_corners and half_pixel_centers does edgeTPU compiler support?
  • b) are there any limitations on the dimensions? In particular, I think I might be hitting some maximum as I am upsampling a rather uneven shape of 1x4x834x128 with scale factor (1,2) to 1x4x1668x128

created time in 2 months

delete branch vacuumlabs/ledger-app-cardano-shelley

delete branch : fix-withdrawal

delete time in 2 months

push eventvacuumlabs/ledger-app-cardano-shelley

Rafael Korbas

commit sha da93bfa7cf3960780d14e933d49198d95cc49dc4

pass reward address instead of staking key hash alone in tx withdrawal

view details

Rafael Korbas

commit sha fc16af8569914a7c0cb6cd0f89e1fc2355895c06

Add assertion on reward address size Motivation: prevent malformed reward addressed from being passed by mistake in the future

view details

Peter Peresini

commit sha aea0452ba0ffd43f4399064e2333d89154f6112e

Merge pull request #36 from vacuumlabs/fix-withdrawal Bugfix - Pass reward address instead of staking key hash in withdrawals

view details

push time in 2 months

PR merged vacuumlabs/ledger-app-cardano-shelley

Bugfix - Pass reward address instead of staking key hash in withdrawals

@PeterBenc noticed that transactions with transactions with withdrawals were failing with Ledger.

Turns out we were serializing the withdrawals in Ledger wrong. We were passing the staking key hash instead of the reward address. Compare with trezor implementation: https://github.com/trezor/trezor-firmware/blob/8d39ba8314fe9cda420c3cdce071562584eddd2c/core/src/apps/cardano/sign_tx.py#L278

This PR fixes that. The corresponding integration tests are fixed in https://github.com/vacuumlabs/ledgerjs-cardano-shelley/pull/5

+21 -14

3 comments

4 changed files

refi93

pr closed time in 2 months

Pull request review commentvacuumlabs/ledger-app-cardano-shelley

Bugfix - Pass reward address instead of staking key hash in withdrawals

 static void signTx_handleWithdrawalAPDU(uint8_t p2, uint8_t* wireDataBuffer, siz 	}  	{-		// add to tx-		uint8_t rewardAccount[ADDRESS_KEY_HASH_LENGTH];+		uint8_t rewardAddress[1 + ADDRESS_KEY_HASH_LENGTH]; 		{-			VALIDATE(bip44_isValidStakingKeyPath(&ctx->stageData.withdrawal.path), ERR_INVALID_DATA);-			{-				write_view_t out = make_write_view(rewardAccount, rewardAccount + SIZEOF(rewardAccount));-				view_appendPublicKeyHash(&out, &ctx->stageData.withdrawal.path);-			}+			addressParams_t rewardAddressParams = {

@ppershing AFAIK there are no device tests for tx signing, only for the txHashBuilder (txHashBuilder_test.c) which tests creation of withdrawals directly from the key and value that is supposed to be passed to the tx body - so those are passing fine and @PeterBenc confirmed that. Therefore the only tests affected by these changes are the integration ones updated in the other PR

Oh, now I see it. We have separate addressUtils_test for proper derivation, txHashBuilder test for serialization but this code is present only in tx signing which we cannot unit-test on device

refi93

comment created time in 2 months

PullRequestReviewEvent

pull request commentvacuumlabs/ledger-app-cardano-shelley

Bugfix - Pass reward address instead of staking key hash in withdrawals

Apart from tests comment, the change looks good to me

refi93

comment created time in 2 months

Pull request review commentvacuumlabs/ledger-app-cardano-shelley

Bugfix - Pass reward address instead of staking key hash in withdrawals

 static void signTx_handleWithdrawalAPDU(uint8_t p2, uint8_t* wireDataBuffer, siz 	}  	{-		// add to tx-		uint8_t rewardAccount[ADDRESS_KEY_HASH_LENGTH];+		uint8_t rewardAddress[1 + ADDRESS_KEY_HASH_LENGTH]; 		{-			VALIDATE(bip44_isValidStakingKeyPath(&ctx->stageData.withdrawal.path), ERR_INVALID_DATA);-			{-				write_view_t out = make_write_view(rewardAccount, rewardAccount + SIZEOF(rewardAccount));-				view_appendPublicKeyHash(&out, &ctx->stageData.withdrawal.path);-			}+			addressParams_t rewardAddressParams = {

General note: I find it dangerous to use structs which might not be properly initialized (i.e., full initialization is not enforced by the compiler). We should figure out a better solution in the future...

refi93

comment created time in 2 months

PullRequestReviewEvent

pull request commentvacuumlabs/ledger-app-cardano-shelley

Bugfix - Pass reward address instead of staking key hash in withdrawals

I see no change in on-device tests. Don't we have tests for this? If yes, they shouldn't be passing right now ...

refi93

comment created time in 2 months

pull request commentvacuumlabs/ledgerjs-cardano-shelley

fix test for tx with a withdrawal

Assuming the new test vectors are correct it looks good and does not even need new npm release. lgtm

PeterBenc

comment created time in 2 months

push eventcardano-foundation/ledgerjs-hw-app-cardano

Peter Peresini

commit sha 15f2703a26e96eafaf3ddd55458c35a90ba83dce

v2.0.1

view details

push time in 2 months

push eventcardano-foundation/ledgerjs-hw-app-cardano

Sebastien Guillemot

commit sha 53731c08f384af7c226a37ac2a50f0d6b4f45b0d

add missing certificate enum

view details

Peter Peresini

commit sha df08b3fdb7383b1065e7ad971626430a126f98aa

Merge pull request #10 from SebastienGllmt/missing-cert-types add missing certificate enum

view details

push time in 2 months

PR merged cardano-foundation/ledgerjs-hw-app-cardano

add missing certificate enum

The certificate enum was missing.

I got these values from AdaLite, but it seems to be missing the value for the other certificate types. These look like they aren't added yet though

+23 -17

1 comment

4 changed files

SebastienGllmt

pr closed time in 2 months

pull request commentcardano-foundation/ledgerjs-hw-app-cardano

add missing certificate enum

You are correct, Ledger device does not support other types of certificates for now.

SebastienGllmt

comment created time in 2 months

create barnchcardano-foundation/ledgerjs-hw-app-cardano

branch : shelley

created branch time in 2 months

push eventvacuumlabs/ledgerjs-cardano-shelley

Peter Peresini

commit sha 27634a0c3531c4d665a2f5c3f1965392e576a96b

Deployed to npm as 2.0.0

view details

push time in 2 months

issue commentgoogle-coral/edgetpu

Leaky_relu -> tf.maximum(x, tf.multiply(0.1, x))

I have no idea how the compiler works but based on my experience it does not like quantize ops. You should try to convert so that there is no such op in the resulting tflite. However, seeing your tflite model to be 8MB big, I would suspect that size of the model is another issue that compiler will not like. Most of my models end up with max of about 0.6MB and then the compiler will refuse to compile them because it is unable to layout intermediate tensors during computation. The limit however greatly depends on the model. So, I would recommend starting with very small models and slowly scaling them up until you hit the limits ....

albertfaromatics

comment created time in 2 months

issue commentvacuumlabs/adalite

Used Trezor Model T to convert ADA have Ledger Nano S with same seed that shows 0 balance

Hi. Trezor and ledger have incompatible mnemonics for Cardano. This is due to crypto derivation they use. Basically one of them (I don't remember which one) deviates from Cardano spec and uses bitcoin spec. Note that the difference is only when derivating root secret key from mnemonic, afterwards they use the same crypto. This makes them incompatible...

Best regards, Peter

El vie., 14 ago. 2020, 11:57 p. m., JBillzGH notifications@github.com escribió:

I connected Trezor Model T to adalite.io and was able to convert the balance to Shelley address for staking and was able to delegate. I have a Ledger Nano S that uses the same 24 seed words and when I connect the Ledger Nano S to adalite.io it shows a 0 balance.

I am able to connect both the Trezor and Ledger to myetherwallet.com and see the same eth balance so I know for certain they are using the same 24 words. However when I connect to myetherwallet.com with the Trezor it defaults to a derivation path of m/44'/60'/0'/0 while the Ledger uses m/44'/60'/0' and this is where the eth balance is on both devices. Why am I not able to use the Ledger Nano S with adalite.io to see my ADA balance?

Also, since converting the ADA on adalite.io I am no longer able to connect the Trezor Model T with the Yoroi web wallet. When I try to connect now it gives an error that it can't connect after exporting the public key.

[image: image] https://user-images.githubusercontent.com/69698520/90295659-6b6f0d00-de3e-11ea-9b7a-e5699f2b1144.png

— You are receiving this because you are subscribed to this thread. Reply to this email directly, view it on GitHub https://github.com/vacuumlabs/adalite/issues/695, or unsubscribe https://github.com/notifications/unsubscribe-auth/AAGMQMMOV6U2JKGR5KRCFTTSAWXMNANCNFSM4P74SHXA .

JBillzGH

comment created time in 2 months

issue openedgoogle-coral/edgetpu

Any possibility of supporting `transpose` operation?

According to https://coral.ai/docs/edgetpu/models-intro/#supported-operations coral does not seem to support Transpose op. I would presume that under some limiting circumstances (e.g., not transposing batch dim, etc.) the device should be able to support this. And the operation would be very helpful -- given size limitations, using grouped convolutions might be an easy way to get better accuracy. However, such architectures usually involve a channel "shuffle" so that channels from different groups can get mixed and such shuffle is usually implemented by reshape + transpose + reshape.

created time in 3 months

issue commenttensorflow/model-optimization

Weight name conflicts when keras model consists of subclassed layer

@nutsiepully Is there a suggested workaround for this?

kalaluthien

comment created time in 3 months

issue openedgoogle-coral/edgetpu

Can we get better error reporting from the compiler please?

I know this was reported here several times but I want ot reiterate that "Internal compiler error. Aborting!" on anything that compiler does not like is really frustrating. If the compiler knows what is wrong, let it report a better message (at least "assertion failed xyz").

I am trying to train a custom signal detection model (based on reduced nvidia quartznet) and so far I came across plethora of seemingly random limitations such as

  • input size (batch size) given other input dimensions
  • random quantization problems (e.g. bias outside of int8 range), weird tflite files, unsupported operations, ...
  • compiler not having enough memory for some models (despite they should take less than 1MB of memory which should still leave plenty of memory free) When I hit the error, it is hard to tell what exactly did I mess up and the problem is greatly exacerbated by the fact that edgetpu_compiler is closed source and thus we have no clear idea what are its limitations.

Given the fact that there are multiple similar issues open, I would hope that you make error reporting a bit higher priority so that people can self-diagnose most of the problems.

On the other side -- should I open new issue and upload tflite model each time I come across this error?

created time in 3 months

issue commentvacuumlabs/adalite

Used adalite to convert funds to stakable, enabled delegation, funds now ZERO

Also reported via https://safebrowsing.google.com/safebrowsing/report_phish/?hl=es to google-managed blacklist. At this point, I don't think we can do anything more. I am truly sorry @q20 :-(

q20

comment created time in 3 months

issue commentvacuumlabs/adalite

Used adalite to convert funds to stakable, enabled delegation, funds now ZERO

FYI, I already sent an email to abuse@namecheap.com but I am not sure how/if they will be responsive. @q20 I suggest you may try to file complaint here https://complaint.ic3.gov/ (to be precise, namecheap suggests that, it might perpahs help with takedown). @MichalPetro - we need to get this out on twitter/reddit/telegram ASAP! @xdzurman or @PeterBenc can we get up banner on adalite.io that says "BEWARE OF adalitewallet.com SCAM, they are stealing adalite.io mnemonic credentials!"

q20

comment created time in 3 months

issue commentvacuumlabs/adalite

Used adalite to convert funds to stakable, enabled delegation, funds now ZERO

@MichalPetro adalitewallet.com is NOT OURS, right? I believe this is the scam site. It uses some https://cors-escape.herokuapp.com/https://explorer2.adalite.io/api/bulk/addresses/summary to overcome our cross-site-access policy and get the data directly from us We should warn our users on twitter/redit + figure out how to take down the site

q20

comment created time in 3 months

issue commentvacuumlabs/adalite

Used adalite to convert funds to stakable, enabled delegation, funds now ZERO

  1. Did you access adalite.io via some insecure infrastructure (e.g. public wifi)?
q20

comment created time in 3 months

issue commentvacuumlabs/adalite

Used adalite to convert funds to stakable, enabled delegation, funds now ZERO

We should try to get to the bottom of this. I fear you may have been scammed somehow.

  1. did you visit adalite.io by entering it directly in your browser or did you use some link to get there
  2. can you check your browser history if you see something unusual just before/after visit to adalite.io?
  3. Do you have some browser extensions with full rights to access webpage content? Can you give us the full list?
  4. Can you try virus scan of your whole computer?
q20

comment created time in 3 months

issue commentvacuumlabs/adalite

Used adalite to convert funds to stakable, enabled delegation, funds now ZERO

Michal, can we track if the transaction was thorough adalite? While we can't track the transaction because of privacy cleanups, can we can perhaps check whether our backend did something at that time?

El vie., 7 ago. 2020, 4:42 p. m., q20 notifications@github.com escribió:

Understood. Not the end of the world; still hurts, though. : /

— You are receiving this because you are subscribed to this thread. Reply to this email directly, view it on GitHub https://github.com/vacuumlabs/adalite/issues/684#issuecomment-670551343, or unsubscribe https://github.com/notifications/unsubscribe-auth/AAGMQMNNZEB2A3NKLAXKQSLR7QHFPANCNFSM4PXUJLVQ .

q20

comment created time in 3 months

delete branch vacuumlabs/ledger-app-cardano-shelley

delete branch : fix_ttl_epoch

delete time in 3 months

push eventvacuumlabs/ledger-app-cardano-shelley

Peter Peresini

commit sha 103736a608f0ef136b65f0bcc2ef103c766850ca

Fix epoch/slot calculation after shelley parameter change

view details

Peter Peresini

commit sha b3d887ae77248a18c90ad2bd645937f93ef9c93b

Merge pull request #34 from vacuumlabs/fix_ttl_epoch Fix epoch/slot calculation after shelley parameter change

view details

push time in 3 months

Pull request review commentvacuumlabs/ledger-app-cardano-shelley

Fix epoch/slot calculation after shelley parameter change

 size_t str_formatAdaAmount(uint64_t amount, char* out, size_t outSize) 	return rawSize; } ++// TODO: This is valid only for mainnet+static struct {+	uint64_t startBlockNumber;+	uint64_t startEpoch;+	uint64_t slotsInEpoch;+} EPOCH_SLOTS_CONFIG[] = {+	{4492800, 208, 432000},+	{0, 0, 21600}+};+ size_t str_formatTtl(uint64_t ttl, char* out, size_t outSize) { 	ASSERT(outSize < BUFFER_SIZE_PARANOIA); -	const uint64_t SLOTS_IN_EPOCH = 21600;-	uint64_t epoch = ttl / SLOTS_IN_EPOCH;-	uint64_t slotInEpoch = ttl % SLOTS_IN_EPOCH;+	unsigned i = 0;+	while (ttl < EPOCH_SLOTS_CONFIG[i].startBlockNumber) {+		i++;+		ASSERT(i < ARRAY_LEN(EPOCH_SLOTS_CONFIG));

Yes. Plus, this is actually buffer overflow check

ppershing

comment created time in 3 months

Pull request review commentvacuumlabs/ledger-app-cardano-shelley

Fix epoch/slot calculation after shelley parameter change

 size_t str_formatAdaAmount(uint64_t amount, char* out, size_t outSize) 	return rawSize; } ++// TODO: This is valid only for mainnet+static struct {+	uint64_t startBlockNumber;

yep, that is absolute slot number. I did not want to confuse slots (which are usually meant per-epoch) with this absolute number

ppershing

comment created time in 3 months

Pull request review commentvacuumlabs/ledger-app-cardano-shelley

Fix epoch/slot calculation after shelley parameter change

 size_t str_formatAdaAmount(uint64_t amount, char* out, size_t outSize) 	return rawSize; } ++// TODO: This is valid only for mainnet+static struct {+	uint64_t startBlockNumber;+	uint64_t startEpoch;+	uint64_t slotsInEpoch;+} EPOCH_SLOTS_CONFIG[] = {+	{4492800, 208, 432000},+	{0, 0, 21600}+};+ size_t str_formatTtl(uint64_t ttl, char* out, size_t outSize) { 	ASSERT(outSize < BUFFER_SIZE_PARANOIA); -	const uint64_t SLOTS_IN_EPOCH = 21600;-	uint64_t epoch = ttl / SLOTS_IN_EPOCH;-	uint64_t slotInEpoch = ttl % SLOTS_IN_EPOCH;+	unsigned i = 0;

not needed. we are indexing a rather small array

ppershing

comment created time in 3 months

create barnchvacuumlabs/ledger-app-cardano-shelley

branch : fix_ttl_epoch

created branch time in 3 months

push eventvacuumlabs/ledger-app-cardano-shelley

Peter Peresini

commit sha 8b1a18f46a5d450366724b7274839b5e0c91feac

Fix getTxWitness not cleaning up privateKey correctly

view details

Peter Peresini

commit sha cd2839d4d897d804b6f21bd0648937ed2104a05a

Change os_memset to explicit_bzero (suggested by yhql-ledger)

view details

Peter Peresini

commit sha 83a53aca283216ada972461dca7f010a92626d90

Merge pull request #33 from vacuumlabs/security_fixes Security fixes

view details

push time in 3 months

more