10#ifndef UFSECP_BUILDING
11#define UFSECP_BUILDING
15#include "../../gpu/include/gpu_backend.hpp"
24using namespace secp256k1::gpu;
31#define UFSECP_GPU_CATCH \
32 catch (const std::bad_alloc&) { return UFSECP_ERR_GPU_MEMORY; } \
33 catch (...) { return UFSECP_ERR_INTERNAL; }
54 for (
size_t index = 0; index < count; ++index) {
55 const uint8_t prefix = pubkeys33[index * 33];
56 if (prefix != 0x02 && prefix != 0x03) {
64 for (
size_t index = 0; index < count; ++index) {
65 if (pubkeys65[index * 65] != 0x04) {
73 for (
size_t index = 0; index < count; ++index) {
74 if (recids[index] < 0 || recids[index] > 3) {
82 for (
size_t index = 0; index < count; ++index) {
83 if (sizes[index] > max_payload) {
91 for (
size_t index = 0; index < count; ++index) {
92 const uint8_t* proof = proofs324 + (index * 324);
93 if (proof[0] != 0x04 || proof[65] != 0x04 || proof[130] != 0x04 || proof[195] != 0x04) {
106 const uint32_t count = backend_count();
107 if (backend_ids_out && max_ids > 0) {
108 backend_ids(backend_ids_out, max_ids);
111 }
catch (...) {
return 0; }
116 case 1:
return "CUDA";
117 case 2:
return "OpenCL";
118 case 3:
return "Metal";
119 default:
return "none";
124 try {
return is_available(bid) ? 1 : 0; }
125 catch (...) {
return 0; }
130 auto b = create_backend(bid);
132 return b->device_count();
133 }
catch (...) {
return 0; }
137 uint32_t bid, uint32_t device_index,
142 auto b = create_backend(bid);
146 auto err = b->device_info(device_index, di);
149 std::memcpy(info_out->
name, di.name,
sizeof(info_out->
name));
167 uint32_t device_index)
172 auto backend = create_backend(bid);
175 auto err = backend->init(device_index);
181 ctx->
backend = std::move(backend);
182 ctx->backend_id = bid;
183 ctx->device_index = device_index;
202 if (!ctx)
return "NULL GPU context";
203 return ctx->
backend->last_error_msg();
212 const uint8_t* scalars32,
214 uint8_t* out_pubkeys33)
222 ctx->
backend->generator_mul_batch(scalars32, count, out_pubkeys33));
228 const uint8_t* msg_hashes32,
229 const uint8_t* pubkeys33,
230 const uint8_t* sigs64,
232 uint8_t* out_results)
236 if (!msg_hashes32 || !pubkeys33 || !sigs64 || !out_results) {
242 ctx->
backend->ecdsa_verify_batch(
243 msg_hashes32, pubkeys33, sigs64, count, out_results));
249 const uint8_t* msg_hashes32,
250 const uint8_t* pubkeys_x32,
251 const uint8_t* sigs64,
253 uint8_t* out_results)
257 if (!msg_hashes32 || !pubkeys_x32 || !sigs64 || !out_results) {
263 ctx->
backend->schnorr_verify_batch(
264 msg_hashes32, pubkeys_x32, sigs64, count, out_results));
270 const uint8_t* privkeys32,
271 const uint8_t* peer_pubkeys33,
273 uint8_t* out_secrets32)
277 if (!privkeys32 || !peer_pubkeys33 || !out_secrets32) {
287 privkeys32, peer_pubkeys33, count, out_secrets32));
293 const uint8_t* pubkeys33,
295 uint8_t* out_hash160)
306 ctx->
backend->hash160_pubkey_batch(pubkeys33, count, out_hash160));
312 const uint8_t* scalars32,
313 const uint8_t* points33,
315 uint8_t* out_result33)
323 ctx->
backend->msm(scalars32, points33, n, out_result33));
329 const uint8_t* z_i32,
330 const uint8_t* D_i33,
331 const uint8_t* E_i33,
332 const uint8_t* Y_i33,
333 const uint8_t* rho_i32,
334 const uint8_t* lambda_ie32,
335 const uint8_t* negate_R,
336 const uint8_t* negate_key,
338 uint8_t* out_results)
342 if (!z_i32 || !D_i33 || !E_i33 || !Y_i33 || !rho_i32 ||
343 !lambda_ie32 || !negate_R || !negate_key || !out_results) {
354 ctx->
backend->frost_verify_partial_batch(
355 z_i32, D_i33, E_i33, Y_i33, rho_i32, lambda_ie32,
356 negate_R, negate_key, count, out_results));
362 const uint8_t* msg_hashes32,
363 const uint8_t* sigs64,
366 uint8_t* out_pubkeys33,
371 if (!msg_hashes32 || !sigs64 || !recids || !out_pubkeys33 || !out_valid) {
381 msg_hashes32, sigs64, recids, count, out_pubkeys33, out_valid));
391 const uint8_t* proofs64,
392 const uint8_t* pubkeys65,
393 const uint8_t* messages32,
395 uint8_t* out_results)
399 if (!proofs64 || !pubkeys65 || !messages32 || !out_results)
407 ctx->
backend->zk_knowledge_verify_batch(
408 proofs64, pubkeys65, messages32, count, out_results));
414 const uint8_t* proofs64,
415 const uint8_t* G_pts65,
416 const uint8_t* H_pts65,
417 const uint8_t* P_pts65,
418 const uint8_t* Q_pts65,
420 uint8_t* out_results)
424 if (!proofs64 || !G_pts65 || !H_pts65 || !P_pts65 || !Q_pts65 || !out_results)
435 ctx->
backend->zk_dleq_verify_batch(
436 proofs64, G_pts65, H_pts65, P_pts65, Q_pts65, count, out_results));
442 const uint8_t* proofs324,
443 const uint8_t* commitments65,
444 const uint8_t* H_generator65,
446 uint8_t* out_results)
450 if (!proofs324 || !commitments65 || !H_generator65 || !out_results)
460 ctx->
backend->bulletproof_verify_batch(
461 proofs324, commitments65, H_generator65, count, out_results));
471 const uint8_t* keys32,
472 const uint8_t* nonces12,
473 const uint8_t* plaintexts,
474 const uint32_t* sizes,
475 uint32_t max_payload,
481 if (!keys32 || !nonces12 || !plaintexts || !sizes || !wire_out)
489 ctx->
backend->bip324_aead_encrypt_batch(
490 keys32, nonces12, plaintexts, sizes, max_payload, count, wire_out));
496 const uint8_t* keys32,
497 const uint8_t* nonces12,
498 const uint8_t* wire_in,
499 const uint32_t* sizes,
500 uint32_t max_payload,
502 uint8_t* plaintext_out,
507 if (!keys32 || !nonces12 || !wire_in || !sizes || !plaintext_out || !out_valid)
515 ctx->
backend->bip324_aead_decrypt_batch(
516 keys32, nonces12, wire_in, sizes, max_payload, count,
517 plaintext_out, out_valid));
545 default:
return "unknown error";
std::unique_ptr< GpuBackend > backend
uint32_t max_threads_per_block
uint64_t global_mem_bytes
#define UFSECP_ERR_INTERNAL
#define UFSECP_ERR_BAD_PUBKEY
#define UFSECP_ERR_NULL_ARG
#define UFSECP_ERR_VERIFY_FAIL
#define UFSECP_ERR_SELFTEST
#define UFSECP_ERR_BAD_SIG
#define UFSECP_ERR_BUF_TOO_SMALL
#define UFSECP_ERR_BAD_INPUT
#define UFSECP_ERR_BAD_KEY
#define UFSECP_ERR_GPU_UNSUPPORTED
#define UFSECP_ERR_GPU_UNAVAILABLE
#define UFSECP_ERR_GPU_BACKEND
#define UFSECP_ERR_GPU_MEMORY
#define UFSECP_ERR_GPU_LAUNCH
#define UFSECP_ERR_GPU_DEVICE
#define UFSECP_ERR_GPU_QUEUE
ufsecp_error_t ufsecp_gpu_schnorr_verify_batch(ufsecp_gpu_ctx *ctx, const uint8_t *msg_hashes32, const uint8_t *pubkeys_x32, const uint8_t *sigs64, size_t count, uint8_t *out_results)
static bool has_valid_recovery_ids(const int *recids, size_t count)
ufsecp_error_t ufsecp_gpu_ecdsa_verify_batch(ufsecp_gpu_ctx *ctx, const uint8_t *msg_hashes32, const uint8_t *pubkeys33, const uint8_t *sigs64, size_t count, uint8_t *out_results)
ufsecp_error_t ufsecp_gpu_zk_dleq_verify_batch(ufsecp_gpu_ctx *ctx, const uint8_t *proofs64, const uint8_t *G_pts65, const uint8_t *H_pts65, const uint8_t *P_pts65, const uint8_t *Q_pts65, size_t count, uint8_t *out_results)
ufsecp_error_t ufsecp_gpu_device_info(uint32_t bid, uint32_t device_index, ufsecp_gpu_device_info_t *info_out)
ufsecp_error_t ufsecp_gpu_ctx_create(ufsecp_gpu_ctx **ctx_out, uint32_t bid, uint32_t device_index)
static constexpr std::size_t kMaxGpuBatchN
static bool has_valid_bip324_sizes(const uint32_t *sizes, size_t count, uint32_t max_payload)
const char * ufsecp_gpu_error_str(ufsecp_error_t err)
ufsecp_error_t ufsecp_gpu_last_error(const ufsecp_gpu_ctx *ctx)
ufsecp_error_t ufsecp_gpu_ecdh_batch(ufsecp_gpu_ctx *ctx, const uint8_t *privkeys32, const uint8_t *peer_pubkeys33, size_t count, uint8_t *out_secrets32)
ufsecp_error_t ufsecp_gpu_bip324_aead_decrypt_batch(ufsecp_gpu_ctx *ctx, const uint8_t *keys32, const uint8_t *nonces12, const uint8_t *wire_in, const uint32_t *sizes, uint32_t max_payload, size_t count, uint8_t *plaintext_out, uint8_t *out_valid)
ufsecp_error_t ufsecp_gpu_frost_verify_partial_batch(ufsecp_gpu_ctx *ctx, const uint8_t *z_i32, const uint8_t *D_i33, const uint8_t *E_i33, const uint8_t *Y_i33, const uint8_t *rho_i32, const uint8_t *lambda_ie32, const uint8_t *negate_R, const uint8_t *negate_key, size_t count, uint8_t *out_results)
static bool has_valid_bulletproof_prefixes(const uint8_t *proofs324, size_t count)
ufsecp_error_t ufsecp_gpu_bip324_aead_encrypt_batch(ufsecp_gpu_ctx *ctx, const uint8_t *keys32, const uint8_t *nonces12, const uint8_t *plaintexts, const uint32_t *sizes, uint32_t max_payload, size_t count, uint8_t *wire_out)
ufsecp_error_t ufsecp_gpu_ecrecover_batch(ufsecp_gpu_ctx *ctx, const uint8_t *msg_hashes32, const uint8_t *sigs64, const int *recids, size_t count, uint8_t *out_pubkeys33, uint8_t *out_valid)
static ufsecp_error_t to_abi_error(GpuError e)
const char * ufsecp_gpu_backend_name(uint32_t bid)
ufsecp_error_t ufsecp_gpu_bulletproof_verify_batch(ufsecp_gpu_ctx *ctx, const uint8_t *proofs324, const uint8_t *commitments65, const uint8_t *H_generator65, size_t count, uint8_t *out_results)
ufsecp_error_t ufsecp_gpu_zk_knowledge_verify_batch(ufsecp_gpu_ctx *ctx, const uint8_t *proofs64, const uint8_t *pubkeys65, const uint8_t *messages32, size_t count, uint8_t *out_results)
uint32_t ufsecp_gpu_device_count(uint32_t bid)
ufsecp_error_t ufsecp_gpu_msm(ufsecp_gpu_ctx *ctx, const uint8_t *scalars32, const uint8_t *points33, size_t n, uint8_t *out_result33)
ufsecp_error_t ufsecp_gpu_hash160_pubkey_batch(ufsecp_gpu_ctx *ctx, const uint8_t *pubkeys33, size_t count, uint8_t *out_hash160)
uint32_t ufsecp_gpu_backend_count(uint32_t *backend_ids_out, uint32_t max_ids)
const char * ufsecp_gpu_last_error_msg(const ufsecp_gpu_ctx *ctx)
static bool has_valid_compressed_pubkeys(const uint8_t *pubkeys33, size_t count)
int ufsecp_gpu_is_available(uint32_t bid)
static bool has_valid_uncompressed_pubkeys(const uint8_t *pubkeys65, size_t count)
ufsecp_error_t ufsecp_gpu_generator_mul_batch(ufsecp_gpu_ctx *ctx, const uint8_t *scalars32, size_t count, uint8_t *out_pubkeys33)
void ufsecp_gpu_ctx_destroy(ufsecp_gpu_ctx *ctx)