2 * Copyright (c) 2003-2017 Lev Walkin <vlm@lionet.info>.
4 * Redistribution and modifications are permitted subject to BSD license.
6 #include <asn_internal.h>
7 #include <constr_SET_OF.h>
8 #include <asn_SET_OF.h>
11 * Number of bytes left for this structure.
12 * (ctx->left) indicates the number of bytes _transferred_ for the structure.
13 * (size) contains the number of bytes in the buffer passed.
15 #define LEFT ((size<(size_t)ctx->left)?size:(size_t)ctx->left)
18 * If the subprocessor function returns with an indication that it wants
19 * more data, it may well be a fatal decoding problem, because the
20 * size is constrained by the <TLV>'s L, even if the buffer size allows
22 * For example, consider the buffer containing the following TLVs:
23 * <T:5><L:1><V> <T:6>...
24 * The TLV length clearly indicates that one byte is expected in V, but
25 * if the V processor returns with "want more data" even if the buffer
26 * contains way more data than the V processor have seen.
28 #define SIZE_VIOLATION (ctx->left >= 0 && (size_t)ctx->left <= size)
31 * This macro "eats" the part of the buffer which is definitely "consumed",
32 * i.e. was correctly converted into local representation or rightfully skipped.
35 #define ADVANCE(num_bytes) do { \
36 size_t num = num_bytes; \
37 ptr = ((const char *)ptr) + num;\
41 consumed_myself += num; \
45 * Switch to the next phase of parsing.
49 #define NEXT_PHASE(ctx) do { \
53 #define PHASE_OUT(ctx) do { ctx->phase = 10; } while(0)
56 * Return a standardized complex structure.
59 #define RETURN(_code) do { \
61 rval.consumed = consumed_myself;\
66 * The decoder of the SET OF type.
69 SET_OF_decode_ber(const asn_codec_ctx_t *opt_codec_ctx,
70 const asn_TYPE_descriptor_t *td, void **struct_ptr,
71 const void *ptr, size_t size, int tag_mode) {
73 * Bring closer parts of structure description.
75 const asn_SET_OF_specifics_t *specs = (const asn_SET_OF_specifics_t *)td->specifics;
76 const asn_TYPE_member_t *elm = td->elements; /* Single one */
79 * Parts of the structure being constructed.
81 void *st = *struct_ptr; /* Target structure. */
82 asn_struct_ctx_t *ctx; /* Decoder context */
84 ber_tlv_tag_t tlv_tag; /* T from TLV */
85 asn_dec_rval_t rval; /* Return code from subparsers */
87 ssize_t consumed_myself = 0; /* Consumed bytes from ptr */
89 ASN_DEBUG("Decoding %s as SET OF", td->name);
92 * Create the target structure if it is not present already.
95 st = *struct_ptr = CALLOC(1, specs->struct_size);
102 * Restore parsing context.
104 ctx = (asn_struct_ctx_t *)((char *)st + specs->ctx_offset);
107 * Start to parse where left previously
113 * Check that the set of tags associated with given structure
114 * perfectly fits our expectations.
117 rval = ber_check_tags(opt_codec_ctx, td, ctx, ptr, size,
118 tag_mode, 1, &ctx->left, 0);
119 if(rval.code != RC_OK) {
120 ASN_DEBUG("%s tagging check failed: %d",
121 td->name, rval.code);
126 ctx->left += rval.consumed; /* ?Substracted below! */
127 ADVANCE(rval.consumed);
129 ASN_DEBUG("Structure consumes %ld bytes, "
130 "buffer %ld", (long)ctx->left, (long)size);
137 * From the place where we've left it previously,
138 * try to decode the next item.
140 for(;; ctx->step = 0) {
141 ssize_t tag_len; /* Length of TLV's T */
147 * MICROPHASE 1: Synchronize decoding.
151 ASN_DEBUG("End of SET OF %s", td->name);
153 * No more things to decode.
161 * Fetch the T from TLV.
163 tag_len = ber_fetch_tag(ptr, LEFT, &tlv_tag);
165 case 0: if(!SIZE_VIOLATION) RETURN(RC_WMORE);
167 case -1: RETURN(RC_FAIL);
170 if(ctx->left < 0 && ((const uint8_t *)ptr)[0] == 0) {
176 } else if(((const uint8_t *)ptr)[1] == 0) {
178 * Found the terminator of the
179 * indefinite length structure.
185 /* Outmost tag may be unknown and cannot be fetched/compared */
186 if(elm->tag != (ber_tlv_tag_t)-1) {
187 if(BER_TAGS_EQUAL(tlv_tag, elm->tag)) {
189 * The new list member of expected type has arrived.
192 ASN_DEBUG("Unexpected tag %s fixed SET OF %s",
193 ber_tlv_tag_string(tlv_tag), td->name);
194 ASN_DEBUG("%s SET OF has tag %s",
195 td->name, ber_tlv_tag_string(elm->tag));
201 * MICROPHASE 2: Invoke the member-specific decoder.
203 ctx->step |= 1; /* Confirm entering next microphase */
207 * Invoke the member fetch routine according to member's type
209 rval = elm->type->op->ber_decoder(opt_codec_ctx,
210 elm->type, &ctx->ptr, ptr, LEFT, 0);
211 ASN_DEBUG("In %s SET OF %s code %d consumed %d",
212 td->name, elm->type->name,
213 rval.code, (int)rval.consumed);
217 asn_anonymous_set_ *list = _A_SET_FROM_VOID(st);
218 if(ASN_SET_ADD(list, ctx->ptr) != 0)
224 case RC_WMORE: /* More data expected */
225 if(!SIZE_VIOLATION) {
226 ADVANCE(rval.consumed);
230 case RC_FAIL: /* Fatal error */
231 ASN_STRUCT_FREE(*elm->type, ctx->ptr);
236 ADVANCE(rval.consumed);
237 } /* for(all list members) */
242 * Read in all "end of content" TLVs.
244 while(ctx->left < 0) {
246 if(LEFT > 0 && ((const char *)ptr)[0] != 0) {
253 if(((const char *)ptr)[0] == 0
254 && ((const char *)ptr)[1] == 0) {
269 * Internally visible buffer holding a single encoded element.
274 size_t allocated_size;
275 unsigned bits_unused;
277 /* Append bytes to the above structure */
278 static int _el_addbytes(const void *buffer, size_t size, void *el_buf_ptr) {
279 struct _el_buffer *el_buf = (struct _el_buffer *)el_buf_ptr;
281 if(el_buf->length + size > el_buf->allocated_size) {
282 size_t new_size = el_buf->allocated_size ? el_buf->allocated_size : 8;
287 } while(el_buf->length + size > new_size);
289 p = REALLOC(el_buf->buf, new_size);
292 el_buf->allocated_size = new_size;
298 memcpy(el_buf->buf + el_buf->length, buffer, size);
300 el_buf->length += size;
304 static void assert_unused_bits(const struct _el_buffer* p) {
306 assert((p->buf[p->length-1] & ~(0xff << p->bits_unused)) == 0);
308 assert(p->bits_unused == 0);
312 static int _el_buf_cmp(const void *ap, const void *bp) {
313 const struct _el_buffer *a = (const struct _el_buffer *)ap;
314 const struct _el_buffer *b = (const struct _el_buffer *)bp;
318 if(a->length < b->length)
319 common_len = a->length;
321 common_len = b->length;
323 if (a->buf && b->buf) {
324 ret = memcmp(a->buf, b->buf, common_len);
327 if(a->length < b->length)
329 else if(a->length > b->length)
331 /* Ignore unused bits. */
332 assert_unused_bits(a);
333 assert_unused_bits(b);
340 SET_OF__encode_sorted_free(struct _el_buffer *el_buf, size_t count) {
343 for(i = 0; i < count; i++) {
344 FREEMEM(el_buf[i].buf);
350 enum SET_OF__encode_method {
351 SOES_DER, /* Distinguished Encoding Rules */
352 SOES_CUPER /* Canonical Unaligned Packed Encoding Rules */
355 static struct _el_buffer *
356 SET_OF__encode_sorted(const asn_TYPE_member_t *elm,
357 const asn_anonymous_set_ *list,
358 enum SET_OF__encode_method method) {
359 struct _el_buffer *encoded_els;
363 (struct _el_buffer *)CALLOC(list->count, sizeof(encoded_els[0]));
364 if(encoded_els == NULL) {
369 * Encode all members.
371 for(edx = 0; edx < list->count; edx++) {
372 const void *memb_ptr = list->array[edx];
373 struct _el_buffer *encoding_el = &encoded_els[edx];
374 asn_enc_rval_t erval;
379 * Encode the member into the prepared space.
383 erval = elm->type->op->der_encoder(elm->type, memb_ptr, 0, elm->tag,
384 _el_addbytes, encoding_el);
387 erval = uper_encode(elm->type,
388 elm->encoding_constraints.per_constraints,
389 memb_ptr, _el_addbytes, encoding_el);
390 if(erval.encoded != -1) {
391 size_t extra_bits = erval.encoded % 8;
392 assert(encoding_el->length == (size_t)(erval.encoded + 7) / 8);
393 encoding_el->bits_unused = (8 - extra_bits) & 0x7;
397 assert(!"Unreachable");
400 if(erval.encoded < 0) break;
403 if(edx == list->count) {
405 * Sort the encoded elements according to their encoding.
407 qsort(encoded_els, list->count, sizeof(encoded_els[0]), _el_buf_cmp);
411 SET_OF__encode_sorted_free(encoded_els, edx);
418 * The DER encoder of the SET OF type.
421 SET_OF_encode_der(const asn_TYPE_descriptor_t *td, const void *sptr,
422 int tag_mode, ber_tlv_tag_t tag, asn_app_consume_bytes_f *cb,
424 const asn_TYPE_member_t *elm = td->elements;
425 const asn_anonymous_set_ *list = _A_CSET_FROM_VOID(sptr);
426 size_t computed_size = 0;
427 ssize_t encoding_size = 0;
428 struct _el_buffer *encoded_els;
431 ASN_DEBUG("Estimating size for SET OF %s", td->name);
434 * Gather the length of the underlying members sequence.
436 for(edx = 0; edx < list->count; edx++) {
437 void *memb_ptr = list->array[edx];
438 asn_enc_rval_t erval;
440 if(!memb_ptr) ASN__ENCODE_FAILED;
443 elm->type->op->der_encoder(elm->type, memb_ptr, 0, elm->tag, 0, 0);
444 if(erval.encoded == -1) return erval;
445 computed_size += erval.encoded;
450 * Encode the TLV for the sequence itself.
453 der_write_tags(td, computed_size, tag_mode, 1, tag, cb, app_key);
454 if(encoding_size < 0) {
457 computed_size += encoding_size;
459 if(!cb || list->count == 0) {
460 asn_enc_rval_t erval;
461 erval.encoded = computed_size;
462 ASN__ENCODED_OK(erval);
465 ASN_DEBUG("Encoding members of %s SET OF", td->name);
468 * DER mandates dynamic sorting of the SET OF elements
469 * according to their encodings. Build an array of the
472 encoded_els = SET_OF__encode_sorted(elm, list, SOES_DER);
475 * Report encoded elements to the application.
476 * Dispose of temporary sorted members table.
478 for(edx = 0; edx < list->count; edx++) {
479 struct _el_buffer *encoded_el = &encoded_els[edx];
480 /* Report encoded chunks to the application */
481 if(cb(encoded_el->buf, encoded_el->length, app_key) < 0) {
484 encoding_size += encoded_el->length;
488 SET_OF__encode_sorted_free(encoded_els, list->count);
490 if(edx == list->count) {
491 asn_enc_rval_t erval;
492 assert(computed_size == (size_t)encoding_size);
493 erval.encoded = computed_size;
494 ASN__ENCODED_OK(erval);
501 #define XER_ADVANCE(num_bytes) do { \
502 size_t num = num_bytes; \
503 buf_ptr = ((const char *)buf_ptr) + num;\
505 consumed_myself += num; \
509 * Decode the XER (XML) data.
512 SET_OF_decode_xer(const asn_codec_ctx_t *opt_codec_ctx,
513 const asn_TYPE_descriptor_t *td, void **struct_ptr,
514 const char *opt_mname, const void *buf_ptr, size_t size) {
516 * Bring closer parts of structure description.
518 const asn_SET_OF_specifics_t *specs = (const asn_SET_OF_specifics_t *)td->specifics;
519 const asn_TYPE_member_t *element = td->elements;
521 const char *xml_tag = opt_mname ? opt_mname : td->xml_tag;
524 * ... and parts of the structure being constructed.
526 void *st = *struct_ptr; /* Target structure. */
527 asn_struct_ctx_t *ctx; /* Decoder context */
529 asn_dec_rval_t rval; /* Return value from a decoder */
530 ssize_t consumed_myself = 0; /* Consumed bytes from ptr */
533 * Create the target structure if it is not present already.
536 st = *struct_ptr = CALLOC(1, specs->struct_size);
537 if(st == 0) RETURN(RC_FAIL);
540 /* Which tag is expected for the downstream */
541 if(specs->as_XMLValueList) {
542 elm_tag = (specs->as_XMLValueList == 1) ? 0 : "";
544 elm_tag = (*element->name)
545 ? element->name : element->type->xml_tag;
549 * Restore parsing context.
551 ctx = (asn_struct_ctx_t *)((char *)st + specs->ctx_offset);
554 * Phases of XER/XML processing:
555 * Phase 0: Check that the opening tag matches our expectations.
556 * Phase 1: Processing body and reacting on closing tag.
557 * Phase 2: Processing inner type.
559 for(; ctx->phase <= 2;) {
560 pxer_chunk_type_e ch_type; /* XER chunk type */
561 ssize_t ch_size; /* Chunk size */
562 xer_check_tag_e tcv; /* Tag check value */
565 * Go inside the inner member of a set.
567 if(ctx->phase == 2) {
568 asn_dec_rval_t tmprval;
570 /* Invoke the inner type decoder, m.b. multiple times */
571 ASN_DEBUG("XER/SET OF element [%s]", elm_tag);
572 tmprval = element->type->op->xer_decoder(opt_codec_ctx,
573 element->type, &ctx->ptr, elm_tag,
575 if(tmprval.code == RC_OK) {
576 asn_anonymous_set_ *list = _A_SET_FROM_VOID(st);
577 if(ASN_SET_ADD(list, ctx->ptr) != 0)
580 XER_ADVANCE(tmprval.consumed);
582 XER_ADVANCE(tmprval.consumed);
583 RETURN(tmprval.code);
585 ctx->phase = 1; /* Back to body processing */
586 ASN_DEBUG("XER/SET OF phase => %d", ctx->phase);
591 * Get the next part of the XML stream.
593 ch_size = xer_next_token(&ctx->context,
594 buf_ptr, size, &ch_type);
601 case PXER_COMMENT: /* Got XML comment */
602 case PXER_TEXT: /* Ignore free-standing text */
603 XER_ADVANCE(ch_size); /* Skip silently */
606 break; /* Check the rest down there */
610 tcv = xer_check_tag(buf_ptr, ch_size, xml_tag);
611 ASN_DEBUG("XER/SET OF: tcv = %d, ph=%d t=%s",
612 tcv, ctx->phase, xml_tag);
615 if(ctx->phase == 0) break;
619 if(ctx->phase == 0) {
620 /* No more things to decode */
621 XER_ADVANCE(ch_size);
622 ctx->phase = 3; /* Phase out */
627 if(ctx->phase == 0) {
628 XER_ADVANCE(ch_size);
629 ctx->phase = 1; /* Processing body phase */
636 ASN_DEBUG("XER/SET OF: tcv=%d, ph=%d", tcv, ctx->phase);
637 if(ctx->phase == 1) {
639 * Process a single possible member.
649 ASN_DEBUG("Unexpected XML tag in SET OF");
653 ctx->phase = 3; /* "Phase out" on hard failure */
659 typedef struct xer_tmp_enc_s {
665 SET_OF_encode_xer_callback(const void *buffer, size_t size, void *key) {
666 xer_tmp_enc_t *t = (xer_tmp_enc_t *)key;
667 if(t->offset + size >= t->size) {
668 size_t newsize = (t->size << 2) + size;
669 void *p = REALLOC(t->buffer, newsize);
674 memcpy((char *)t->buffer + t->offset, buffer, size);
679 SET_OF_xer_order(const void *aptr, const void *bptr) {
680 const xer_tmp_enc_t *a = (const xer_tmp_enc_t *)aptr;
681 const xer_tmp_enc_t *b = (const xer_tmp_enc_t *)bptr;
682 size_t minlen = a->offset;
684 if(b->offset < minlen) minlen = b->offset;
685 /* Well-formed UTF-8 has this nice lexicographical property... */
686 ret = memcmp(a->buffer, b->buffer, minlen);
687 if(ret != 0) return ret;
688 if(a->offset == b->offset)
690 if(a->offset == minlen)
697 SET_OF_encode_xer(const asn_TYPE_descriptor_t *td, const void *sptr, int ilevel,
698 enum xer_encoder_flags_e flags, asn_app_consume_bytes_f *cb,
701 const asn_SET_OF_specifics_t *specs = (const asn_SET_OF_specifics_t *)td->specifics;
702 const asn_TYPE_member_t *elm = td->elements;
703 const asn_anonymous_set_ *list = _A_CSET_FROM_VOID(sptr);
704 const char *mname = specs->as_XMLValueList
705 ? 0 : ((*elm->name) ? elm->name : elm->type->xml_tag);
706 size_t mlen = mname ? strlen(mname) : 0;
707 int xcan = (flags & XER_F_CANONICAL);
708 xer_tmp_enc_t *encs = 0;
709 size_t encs_count = 0;
710 void *original_app_key = app_key;
711 asn_app_consume_bytes_f *original_cb = cb;
714 if(!sptr) ASN__ENCODE_FAILED;
717 encs = (xer_tmp_enc_t *)MALLOC(list->count * sizeof(encs[0]));
718 if(!encs) ASN__ENCODE_FAILED;
719 cb = SET_OF_encode_xer_callback;
724 for(i = 0; i < list->count; i++) {
725 asn_enc_rval_t tmper;
727 void *memb_ptr = list->array[i];
728 if(!memb_ptr) continue;
731 memset(&encs[encs_count], 0, sizeof(encs[0]));
732 app_key = &encs[encs_count];
737 if(!xcan) ASN__TEXT_INDENT(1, ilevel);
738 ASN__CALLBACK3("<", 1, mname, mlen, ">", 1);
741 if(!xcan && specs->as_XMLValueList == 1)
742 ASN__TEXT_INDENT(1, ilevel + 1);
743 tmper = elm->type->op->xer_encoder(elm->type, memb_ptr,
744 ilevel + (specs->as_XMLValueList != 2),
746 if(tmper.encoded == -1) return tmper;
747 er.encoded += tmper.encoded;
748 if(tmper.encoded == 0 && specs->as_XMLValueList) {
749 const char *name = elm->type->xml_tag;
750 size_t len = strlen(name);
751 ASN__CALLBACK3("<", 1, name, len, "/>", 2);
755 ASN__CALLBACK3("</", 2, mname, mlen, ">", 1);
760 if(!xcan) ASN__TEXT_INDENT(1, ilevel - 1);
763 xer_tmp_enc_t *enc = encs;
764 xer_tmp_enc_t *end = encs + encs_count;
765 ssize_t control_size = 0;
769 app_key = original_app_key;
770 qsort(encs, encs_count, sizeof(encs[0]), SET_OF_xer_order);
772 for(; enc < end; enc++) {
773 ASN__CALLBACK(enc->buffer, enc->offset);
774 FREEMEM(enc->buffer);
776 control_size += enc->offset;
778 assert(control_size == er.encoded);
787 for(n = 0; n < encs_count; n++) {
788 FREEMEM(encs[n].buffer);
796 SET_OF_print(const asn_TYPE_descriptor_t *td, const void *sptr, int ilevel,
797 asn_app_consume_bytes_f *cb, void *app_key) {
798 asn_TYPE_member_t *elm = td->elements;
799 const asn_anonymous_set_ *list = _A_CSET_FROM_VOID(sptr);
803 if(!sptr) return (cb("<absent>", 8, app_key) < 0) ? -1 : 0;
806 if(cb(td->name, strlen(td->name), app_key) < 0
807 || cb(" ::= {", 6, app_key) < 0)
810 for(i = 0; i < list->count; i++) {
811 const void *memb_ptr = list->array[i];
812 if(!memb_ptr) continue;
816 ret = elm->type->op->print_struct(elm->type, memb_ptr,
817 ilevel + 1, cb, app_key);
824 return (cb("}", 1, app_key) < 0) ? -1 : 0;
828 SET_OF_free(const asn_TYPE_descriptor_t *td, void *ptr,
829 enum asn_struct_free_method method) {
831 const asn_SET_OF_specifics_t *specs;
832 asn_TYPE_member_t *elm = td->elements;
833 asn_anonymous_set_ *list = _A_SET_FROM_VOID(ptr);
834 asn_struct_ctx_t *ctx; /* Decoder context */
838 * Could not use set_of_empty() because of (*free)
841 for(i = 0; i < list->count; i++) {
842 void *memb_ptr = list->array[i];
844 ASN_STRUCT_FREE(*elm->type, memb_ptr);
846 list->count = 0; /* No meaningful elements left */
848 asn_set_empty(list); /* Remove (list->array) */
850 specs = (const asn_SET_OF_specifics_t *)td->specifics;
851 ctx = (asn_struct_ctx_t *)((char *)ptr + specs->ctx_offset);
853 ASN_STRUCT_FREE(*elm->type, ctx->ptr);
858 case ASFM_FREE_EVERYTHING:
861 case ASFM_FREE_UNDERLYING:
863 case ASFM_FREE_UNDERLYING_AND_RESET:
864 memset(ptr, 0, specs->struct_size);
871 SET_OF_constraint(const asn_TYPE_descriptor_t *td, const void *sptr,
872 asn_app_constraint_failed_f *ctfailcb, void *app_key) {
873 const asn_TYPE_member_t *elm = td->elements;
874 asn_constr_check_f *constr;
875 const asn_anonymous_set_ *list = _A_CSET_FROM_VOID(sptr);
879 ASN__CTFAIL(app_key, td, sptr,
880 "%s: value not given (%s:%d)",
881 td->name, __FILE__, __LINE__);
885 constr = elm->encoding_constraints.general_constraints;
886 if(!constr) constr = elm->type->encoding_constraints.general_constraints;
889 * Iterate over the members of an array.
890 * Validate each in turn, until one fails.
892 for(i = 0; i < list->count; i++) {
893 const void *memb_ptr = list->array[i];
896 if(!memb_ptr) continue;
898 ret = constr(elm->type, memb_ptr, ctfailcb, app_key);
905 #ifndef ASN_DISABLE_PER_SUPPORT
908 SET_OF_decode_uper(const asn_codec_ctx_t *opt_codec_ctx,
909 const asn_TYPE_descriptor_t *td,
910 const asn_per_constraints_t *constraints, void **sptr,
911 asn_per_data_t *pd) {
913 const asn_SET_OF_specifics_t *specs = (const asn_SET_OF_specifics_t *)td->specifics;
914 const asn_TYPE_member_t *elm = td->elements; /* Single one */
916 asn_anonymous_set_ *list;
917 const asn_per_constraint_t *ct;
921 if(ASN__STACK_OVERFLOW_CHECK(opt_codec_ctx))
925 * Create the target structure if it is not present already.
928 st = *sptr = CALLOC(1, specs->struct_size);
929 if(!st) ASN__DECODE_FAILED;
931 list = _A_SET_FROM_VOID(st);
933 /* Figure out which constraints to use */
934 if(constraints) ct = &constraints->size;
935 else if(td->encoding_constraints.per_constraints)
936 ct = &td->encoding_constraints.per_constraints->size;
939 if(ct && ct->flags & APC_EXTENSIBLE) {
940 int value = per_get_few_bits(pd, 1);
941 if(value < 0) ASN__DECODE_STARVED;
942 if(value) ct = 0; /* Not restricted! */
945 if(ct && ct->effective_bits >= 0) {
946 /* X.691, #19.5: No length determinant */
947 nelems = per_get_few_bits(pd, ct->effective_bits);
948 ASN_DEBUG("Preparing to fetch %ld+%ld elements from %s",
949 (long)nelems, ct->lower_bound, td->name);
950 if(nelems < 0) ASN__DECODE_STARVED;
951 nelems += ct->lower_bound;
959 nelems = uper_get_length(pd, -1, 0, &repeat);
960 ASN_DEBUG("Got to decode %" ASN_PRI_SSIZE " elements (eff %d)",
961 nelems, (int)(ct ? ct->effective_bits : -1));
962 if(nelems < 0) ASN__DECODE_STARVED;
965 for(i = 0; i < nelems; i++) {
967 ASN_DEBUG("SET OF %s decoding", elm->type->name);
968 rv = elm->type->op->uper_decoder(opt_codec_ctx, elm->type,
969 elm->encoding_constraints.per_constraints, &ptr, pd);
970 ASN_DEBUG("%s SET OF %s decoded %d, %p",
971 td->name, elm->type->name, rv.code, ptr);
972 if(rv.code == RC_OK) {
973 if(ASN_SET_ADD(list, ptr) == 0) {
974 if(rv.consumed == 0 && nelems > 200) {
975 /* Protect from SET OF NULL compression bombs. */
980 ASN_DEBUG("Failed to add element into %s",
985 ASN_DEBUG("Failed decoding %s of %s (SET OF)",
986 elm->type->name, td->name);
988 if(ptr) ASN_STRUCT_FREE(*elm->type, ptr);
992 nelems = -1; /* Allow uper_get_length() */
995 ASN_DEBUG("Decoded %s as SET OF", td->name);
1003 SET_OF_encode_uper(const asn_TYPE_descriptor_t *td,
1004 const asn_per_constraints_t *constraints, const void *sptr,
1005 asn_per_outp_t *po) {
1006 const asn_anonymous_set_ *list;
1007 const asn_per_constraint_t *ct;
1008 const asn_TYPE_member_t *elm = td->elements;
1009 struct _el_buffer *encoded_els;
1013 if(!sptr) ASN__ENCODE_FAILED;
1015 list = _A_CSET_FROM_VOID(sptr);
1019 ASN_DEBUG("Encoding %s as SEQUENCE OF (%d)", td->name, list->count);
1021 if(constraints) ct = &constraints->size;
1022 else if(td->encoding_constraints.per_constraints)
1023 ct = &td->encoding_constraints.per_constraints->size;
1026 /* If extensible constraint, check if size is in root */
1029 (list->count < ct->lower_bound || list->count > ct->upper_bound);
1030 ASN_DEBUG("lb %ld ub %ld %s", ct->lower_bound, ct->upper_bound,
1031 ct->flags & APC_EXTENSIBLE ? "ext" : "fix");
1032 if(ct->flags & APC_EXTENSIBLE) {
1033 /* Declare whether size is in extension root */
1034 if(per_put_few_bits(po, not_in_root, 1)) ASN__ENCODE_FAILED;
1035 if(not_in_root) ct = 0;
1036 } else if(not_in_root && ct->effective_bits >= 0) {
1042 if(ct && ct->effective_bits >= 0) {
1043 /* X.691, #19.5: No length determinant */
1044 if(per_put_few_bits(po, list->count - ct->lower_bound,
1045 ct->effective_bits))
1047 } else if(list->count == 0) {
1048 /* When the list is empty add only the length determinant
1049 * X.691, #20.6 and #11.9.4.1
1051 if (uper_put_length(po, 0, 0)) {
1054 ASN__ENCODED_OK(er);
1059 * Canonical UPER #22.1 mandates dynamic sorting of the SET OF elements
1060 * according to their encodings. Build an array of the encoded elements.
1062 encoded_els = SET_OF__encode_sorted(elm, list, SOES_CUPER);
1064 for(encoded_edx = 0; (ssize_t)encoded_edx < list->count;) {
1069 if(ct && ct->effective_bits >= 0) {
1070 may_encode = list->count;
1073 uper_put_length(po, list->count - encoded_edx, &need_eom);
1074 if(may_encode < 0) ASN__ENCODE_FAILED;
1077 for(edx = encoded_edx; edx < encoded_edx + may_encode; edx++) {
1078 const struct _el_buffer *el = &encoded_els[edx];
1079 if(asn_put_many_bits(po, el->buf,
1080 (8 * el->length) - el->bits_unused) < 0) {
1085 if(need_eom && uper_put_length(po, 0, 0))
1086 ASN__ENCODE_FAILED; /* End of Message length */
1088 encoded_edx += may_encode;
1091 SET_OF__encode_sorted_free(encoded_els, list->count);
1093 if((ssize_t)encoded_edx == list->count) {
1094 ASN__ENCODED_OK(er);
1101 #endif /* ASN_DISABLE_PER_SUPPORT */
1103 struct comparable_ptr {
1104 const asn_TYPE_descriptor_t *td;
1109 SET_OF__compare_cb(const void *aptr, const void *bptr) {
1110 const struct comparable_ptr *a = aptr;
1111 const struct comparable_ptr *b = bptr;
1112 assert(a->td == b->td);
1113 return a->td->op->compare_struct(a->td, a->sptr, b->sptr);
1117 SET_OF_compare(const asn_TYPE_descriptor_t *td, const void *aptr,
1119 const asn_anonymous_set_ *a = _A_CSET_FROM_VOID(aptr);
1120 const asn_anonymous_set_ *b = _A_CSET_FROM_VOID(bptr);
1123 struct comparable_ptr *asorted;
1124 struct comparable_ptr *bsorted;
1125 ssize_t common_length;
1129 if(b->count) return -1;
1131 } else if(b->count == 0) {
1135 asorted = MALLOC(a->count * sizeof(asorted[0]));
1136 bsorted = MALLOC(b->count * sizeof(bsorted[0]));
1137 if(!asorted || !bsorted) {
1143 for(idx = 0; idx < a->count; idx++) {
1144 asorted[idx].td = td->elements->type;
1145 asorted[idx].sptr = a->array[idx];
1148 for(idx = 0; idx < b->count; idx++) {
1149 bsorted[idx].td = td->elements->type;
1150 bsorted[idx].sptr = b->array[idx];
1153 qsort(asorted, a->count, sizeof(asorted[0]), SET_OF__compare_cb);
1154 qsort(bsorted, b->count, sizeof(bsorted[0]), SET_OF__compare_cb);
1156 common_length = (a->count < b->count ? a->count : b->count);
1157 for(idx = 0; idx < common_length; idx++) {
1158 int ret = td->elements->type->op->compare_struct(
1159 td->elements->type, asorted[idx].sptr, bsorted[idx].sptr);
1170 if(idx < b->count) /* more elements in b */
1171 return -1; /* a is shorter, so put it first */
1172 if(idx < a->count) return 1;
1183 asn_TYPE_operation_t asn_OP_SET_OF = {
1191 #ifdef ASN_DISABLE_OER_SUPPORT
1198 #ifdef ASN_DISABLE_PER_SUPPORT
1204 #endif /* ASN_DISABLE_PER_SUPPORT */
1206 0 /* Use generic outmost tag fetcher */
1210 asn_random_fill_result_t
1211 SET_OF_random_fill(const asn_TYPE_descriptor_t *td, void **sptr,
1212 const asn_encoding_constraints_t *constraints,
1213 size_t max_length) {
1214 const asn_SET_OF_specifics_t *specs =
1215 (const asn_SET_OF_specifics_t *)td->specifics;
1216 asn_random_fill_result_t res_ok = {ARFILL_OK, 0};
1217 asn_random_fill_result_t result_failed = {ARFILL_FAILED, 0};
1218 asn_random_fill_result_t result_skipped = {ARFILL_SKIPPED, 0};
1219 const asn_TYPE_member_t *elm = td->elements;
1221 long max_elements = 5;
1222 long slb = 0; /* Lower size bound */
1223 long sub = 0; /* Upper size bound */
1226 if(max_length == 0) return result_skipped;
1229 st = (*sptr = CALLOC(1, specs->struct_size));
1231 return result_failed;
1235 switch(asn_random_between(0, 6)) {
1236 case 0: max_elements = 0; break;
1237 case 1: max_elements = 1; break;
1238 case 2: max_elements = 5; break;
1239 case 3: max_elements = max_length; break;
1240 case 4: max_elements = max_length / 2; break;
1241 case 5: max_elements = max_length / 4; break;
1244 sub = slb + max_elements;
1246 if(!constraints || !constraints->per_constraints)
1247 constraints = &td->encoding_constraints;
1248 if(constraints->per_constraints) {
1249 const asn_per_constraint_t *pc = &constraints->per_constraints->size;
1250 if(pc->flags & APC_SEMI_CONSTRAINED) {
1251 slb = pc->lower_bound;
1252 sub = pc->lower_bound + max_elements;
1253 } else if(pc->flags & APC_CONSTRAINED) {
1254 slb = pc->lower_bound;
1255 sub = pc->upper_bound;
1256 if(sub - slb > max_elements) sub = slb + max_elements;
1260 /* Bias towards edges of allowed space */
1261 switch(asn_random_between(-1, 4)) {
1264 /* Prepare lengths somewhat outside of constrained range. */
1265 if(constraints->per_constraints
1266 && (constraints->per_constraints->size.flags & APC_EXTENSIBLE)) {
1267 switch(asn_random_between(0, 5)) {
1280 rnd_len = asn_random_between(0, slb);
1283 if(sub < (ssize_t)max_length) {
1286 rnd_len = max_length;
1290 if(sub < (ssize_t)max_length) {
1291 rnd_len = asn_random_between(sub + 1, max_length);
1293 rnd_len = max_length;
1297 rnd_len = max_length;
1304 rnd_len = asn_random_between(slb, sub);
1308 rnd_len = asn_random_between(slb + 1, sub);
1313 rnd_len = asn_random_between(slb, slb);
1317 rnd_len = asn_random_between(slb, sub - 1);
1322 rnd_len = asn_random_between(sub, sub);
1326 for(; rnd_len > 0; rnd_len--) {
1327 asn_anonymous_set_ *list = _A_SET_FROM_VOID(st);
1329 asn_random_fill_result_t tmpres = elm->type->op->random_fill(
1330 elm->type, &ptr, &elm->encoding_constraints,
1331 (max_length > res_ok.length ? max_length - res_ok.length : 0)
1333 switch(tmpres.code) {
1335 ASN_SET_ADD(list, ptr);
1336 res_ok.length += tmpres.length;
1338 case ARFILL_SKIPPED: