blob: a01b804487bf2fad2b8b29803271182e33357e3f [file] [log] [blame]
vlmfa67ddc2004-06-03 03:38:44 +00001/*-
2 * Copyright (c) 2003, 2004 Lev Walkin <vlm@lionet.info>. All rights reserved.
3 * Redistribution and modifications are permitted subject to BSD license.
4 */
5#include <constr_SET.h>
6#include <netinet/in.h> /* for ntohl() */
7#include <assert.h> /* for assert() */
8
9/*
10 * Number of bytes left for this structure.
11 * (ctx->left) indicates the number of bytes _transferred_ for the structure.
12 * (size) contains the number of bytes in the buffer passed.
13 */
vlmb42843a2004-06-05 08:17:50 +000014#define LEFT ((size<(size_t)ctx->left)?size:ctx->left)
vlmfa67ddc2004-06-03 03:38:44 +000015
16/*
17 * If the subprocessor function returns with an indication that it wants
18 * more data, it may well be a fatal decoding problem, because the
19 * size is constrained by the <TLV>'s L, even if the buffer size allows
20 * reading more data.
21 * For example, consider the buffer containing the following TLVs:
22 * <T:5><L:1><V> <T:6>...
23 * The TLV length clearly indicates that one byte is expected in V, but
24 * if the V processor returns with "want more data" even if the buffer
25 * contains way more data than the V processor have seen.
26 */
vlmb42843a2004-06-05 08:17:50 +000027#define SIZE_VIOLATION (ctx->left >= 0 && (size_t)ctx->left <= size)
vlmfa67ddc2004-06-03 03:38:44 +000028
29/*
30 * This macro "eats" the part of the buffer which is definitely "consumed",
31 * i.e. was correctly converted into local representation or rightfully skipped.
32 */
33#define ADVANCE(num_bytes) do { \
34 size_t num = num_bytes; \
35 ptr += num; \
36 size -= num; \
37 if(ctx->left >= 0) \
38 ctx->left -= num; \
39 consumed_myself += num; \
40 } while(0)
41
42/*
43 * Switch to the next phase of parsing.
44 */
45#define NEXT_PHASE(ctx) do { \
46 ctx->phase++; \
47 ctx->step = 0; \
48 } while(0)
49
50/*
51 * Return a standardized complex structure.
52 */
53#define RETURN(_code) do { \
54 rval.code = _code; \
55 rval.consumed = consumed_myself;\
56 return rval; \
57 } while(0)
58
59/*
60 * Tags are canonically sorted in the tag2element map.
61 */
62static int
63_t2e_cmp(const void *ap, const void *bp) {
vlm31473082004-06-06 07:20:02 +000064 const asn1_TYPE_tag2member_t *a = ap;
65 const asn1_TYPE_tag2member_t *b = bp;
vlmfa67ddc2004-06-03 03:38:44 +000066 int a_class = BER_TAG_CLASS(a->el_tag);
67 int b_class = BER_TAG_CLASS(b->el_tag);
68
69 if(a_class == b_class) {
70 ber_tlv_tag_t a_value = BER_TAG_VALUE(a->el_tag);
71 ber_tlv_tag_t b_value = BER_TAG_VALUE(b->el_tag);
72
73 if(a_value == b_value)
74 return 0;
75 else if(a_value < b_value)
76 return -1;
77 else
78 return 1;
79 } else if(a_class < b_class) {
80 return -1;
81 } else {
82 return 1;
83 }
84}
85
86/*
87 * The decoder of the SET type.
88 */
89ber_dec_rval_t
90SET_decode_ber(asn1_TYPE_descriptor_t *sd,
91 void **struct_ptr, void *ptr, size_t size, int tag_mode) {
92 /*
93 * Bring closer parts of structure description.
94 */
95 asn1_SET_specifics_t *specs = sd->specifics;
96 asn1_SET_element_t *elements = specs->elements;
97
98 /*
99 * Parts of the structure being constructed.
100 */
101 void *st = *struct_ptr; /* Target structure. */
102 ber_dec_ctx_t *ctx; /* Decoder context */
103
104 ber_tlv_tag_t tlv_tag; /* T from TLV */
105 //ber_tlv_len_t tlv_len; /* L from TLV */
106 ber_dec_rval_t rval; /* Return code from subparsers */
107
108 ssize_t consumed_myself = 0; /* Consumed bytes from ptr */
109 int edx; /* SET element's index */
110
111 ASN_DEBUG("Decoding %s as SET", sd->name);
112
113 /*
114 * Create the target structure if it is not present already.
115 */
116 if(st == 0) {
117 st = *struct_ptr = CALLOC(1, specs->struct_size);
118 if(st == 0) {
119 RETURN(RC_FAIL);
120 }
121 }
122
123 /*
124 * Restore parsing context.
125 */
126 ctx = (st + specs->ctx_offset);
127
128 /*
129 * Start to parse where left previously
130 */
131 switch(ctx->phase) {
132 case 0:
133 /*
134 * PHASE 0.
135 * Check that the set of tags associated with given structure
136 * perfectly fits our expectations.
137 */
138
139 rval = ber_check_tags(sd, ctx, ptr, size,
140 tag_mode, &ctx->left, 0);
141 if(rval.code != RC_OK) {
142 ASN_DEBUG("%s tagging check failed: %d",
143 sd->name, rval.code);
144 consumed_myself += rval.consumed;
145 RETURN(rval.code);
146 }
147
148 if(ctx->left >= 0)
149 ctx->left += rval.consumed; /* ?Substracted below! */
150 ADVANCE(rval.consumed);
151
152 NEXT_PHASE(ctx);
153
154 ASN_DEBUG("Structure advertised %ld bytes, "
155 "buffer contains %ld", (long)ctx->left, (long)size);
156
157 /* Fall through */
158 case 1:
159 /*
160 * PHASE 1.
161 * From the place where we've left it previously,
162 * try to decode the next member from the list of
163 * this structure's elements.
164 * (ctx->step) stores the member being processed
165 * between invocations and the microphase {0,1} of parsing
166 * that member:
167 * step = (2 * <member_number> + <microphase>).
168 * Note, however, that the elements in BER may arrive out of
169 * order, yet DER mandates that they shall arive in the
170 * canonical order of their tags. So, there is a room
171 * for optimization.
172 */
173 for(edx = (ctx->step >> 1); edx < specs->elements_count;
174 ctx->step = (ctx->step & ~1) + 2,
175 edx = (ctx->step >> 1)) {
176 void *memb_ptr; /* Pointer to the member */
177 void *memb_ptr2; /* Pointer to that pointer */
178 ssize_t tag_len; /* Length of TLV's T */
179
180 if(ctx->step & 1)
181 goto microphase2;
182
183 /*
184 * MICROPHASE 1: Synchronize decoding.
185 */
186
187 if(ctx->left == 0)
188 /*
189 * No more things to decode.
190 * Exit out of here and check whether all mandatory
191 * elements have been received (in the next phase).
192 */
193 break;
194
195 /*
196 * Fetch the T from TLV.
197 */
198 tag_len = ber_fetch_tag(ptr, LEFT, &tlv_tag);
199 switch(tag_len) {
200 case 0: if(!SIZE_VIOLATION) RETURN(RC_WMORE);
201 /* Fall through */
202 case -1: RETURN(RC_FAIL);
203 }
204
205 if(ctx->left < 0 && ((uint8_t *)ptr)[0] == 0) {
206 if(LEFT < 2) {
207 if(SIZE_VIOLATION)
208 RETURN(RC_FAIL);
209 else
210 RETURN(RC_WMORE);
211 } else if(((uint8_t *)ptr)[1] == 0) {
212 /*
213 * Found the terminator of the
214 * indefinite length structure.
215 * Invoke the generic finalization function.
216 */
217 goto phase3;
218 }
219 }
220
221 if(BER_TAGS_EQUAL(tlv_tag, elements[edx].tag)) {
222 /*
223 * The elements seem to go in order.
224 * This is not particularly strange,
225 * but is not strongly anticipated either.
226 */
227 } else {
vlm31473082004-06-06 07:20:02 +0000228 asn1_TYPE_tag2member_t *t2m;
229 asn1_TYPE_tag2member_t key;
vlmfa67ddc2004-06-03 03:38:44 +0000230
231 key.el_tag = tlv_tag;
232 t2m = bsearch(&key, specs->tag2el, specs->tag2el_count,
233 sizeof(specs->tag2el[0]), _t2e_cmp);
234 if(t2m) {
235 /*
236 * Found the element corresponding to the tag.
237 */
238 edx = t2m->el_no;
239 ctx->step = 2 * edx;
240 } else if(specs->extensible == 0) {
241 ASN_DEBUG("Unexpected tag %s "
242 "in non-extensible SET %s",
243 ber_tlv_tag_string(tlv_tag), sd->name);
244 RETURN(RC_FAIL);
245 } else {
246 /* Skip this tag */
247 ssize_t skip;
248
249 ASN_DEBUG("Skipping unknown tag %s",
250 ber_tlv_tag_string(tlv_tag));
251
252 skip = ber_skip_length(
253 BER_TLV_CONSTRUCTED(ptr),
254 ptr + tag_len, LEFT - tag_len);
255
256 switch(skip) {
257 case 0: if(!SIZE_VIOLATION) RETURN(RC_WMORE);
258 /* Fall through */
259 case -1: RETURN(RC_FAIL);
260 }
261
262 ADVANCE(skip + tag_len);
263 ctx->step -= 2;
264 edx--;
265 continue; /* Try again with the next tag */
266 }
267 }
268
269 /*
270 * MICROPHASE 2: Invoke the member-specific decoder.
271 */
272 ctx->step |= 1; /* Confirm entering next microphase */
273 microphase2:
274
275 /*
276 * Check for duplications: must not overwrite
277 * already decoded elements.
278 */
279 if(ASN_SET_ISPRESENT2(st + specs->pres_offset, edx)) {
vlme26690f2004-07-01 00:47:16 +0000280 ASN_DEBUG("SET %s: Duplicate element %s (%d)",
281 sd->name, elements[edx].name, edx);
vlmfa67ddc2004-06-03 03:38:44 +0000282 RETURN(RC_FAIL);
283 }
284
285 /*
286 * Compute the position of the member inside a structure,
287 * and also a type of containment (it may be contained
288 * as pointer or using inline inclusion).
289 */
290 if(elements[edx].optional) {
291 /* Optional member, hereby, a simple pointer */
292 memb_ptr2 = (char *)st + elements[edx].memb_offset;
293 } else {
294 /*
295 * A pointer to a pointer
296 * holding the start of the structure
297 */
298 memb_ptr = (char *)st + elements[edx].memb_offset;
299 memb_ptr2 = &memb_ptr;
300 }
301 /*
302 * Invoke the member fetch routine according to member's type
303 */
304 rval = elements[edx].type->ber_decoder(
305 (void *)elements[edx].type,
306 memb_ptr2, ptr, LEFT,
307 elements[edx].tag_mode);
308 switch(rval.code) {
309 case RC_OK:
310 ASN_SET_MKPRESENT(st + specs->pres_offset, edx);
311 break;
312 case RC_WMORE: /* More data expected */
313 if(!SIZE_VIOLATION) {
314 ADVANCE(rval.consumed);
315 RETURN(RC_WMORE);
316 }
317 /* Fail through */
318 case RC_FAIL: /* Fatal error */
319 RETURN(RC_FAIL);
320 } /* switch(rval) */
321
322 ADVANCE(rval.consumed);
323 } /* for(all structure members) */
324
325 phase3:
326 ctx->phase = 3;
327 /* Fall through */
328 case 3:
329 case 4: /* Only 00 is expected */
330 ASN_DEBUG("SET %s Leftover: %ld, size = %ld",
331 sd->name, (long)ctx->left, (long)size);
332
333 /*
334 * Skip everything until the end of the SET.
335 */
336 while(ctx->left) {
337 ssize_t tl, ll;
338
339 tl = ber_fetch_tag(ptr, LEFT, &tlv_tag);
340 switch(tl) {
341 case 0: if(!SIZE_VIOLATION) RETURN(RC_WMORE);
342 /* Fall through */
343 case -1: RETURN(RC_FAIL);
344 }
345
346 /*
347 * If expected <0><0>...
348 */
349 if(ctx->left < 0
350 && ((uint8_t *)ptr)[0] == 0) {
351 if(LEFT < 2) {
352 if(SIZE_VIOLATION)
353 RETURN(RC_FAIL);
354 else
355 RETURN(RC_WMORE);
356 } else if(((uint8_t *)ptr)[1] == 0) {
357 /*
358 * Correctly finished with <0><0>.
359 */
360 ADVANCE(2);
361 ctx->left++;
362 ctx->phase = 4;
363 continue;
364 }
365 }
366
367 if(specs->extensible == 0 || ctx->phase == 4) {
368 ASN_DEBUG("Unexpected continuation "
369 "of a non-extensible type %s",
370 sd->name);
371 RETURN(RC_FAIL);
372 }
373
374 ll = ber_skip_length(
375 BER_TLV_CONSTRUCTED(ptr),
376 ptr + tl, LEFT - tl);
377 switch(ll) {
378 case 0: if(!SIZE_VIOLATION) RETURN(RC_WMORE);
379 /* Fall through */
380 case -1: RETURN(RC_FAIL);
381 }
382
383 ADVANCE(tl + ll);
384 }
385
386 ctx->phase = 5;
387 case 5:
388 /*
389 * Check that all mandatory elements are present.
390 */
391 for(edx = 0; edx < specs->elements_count;
392 edx += (8 * sizeof(specs->_mandatory_elements[0]))) {
393 unsigned int midx, pres, must;
394
395 midx = edx/(8 * sizeof(specs->_mandatory_elements[0]));
396 pres = ((unsigned int *)(st+specs->pres_offset))[midx];
397 must = ntohl(specs->_mandatory_elements[midx]);
398
399 if((pres & must) == must) {
400 /*
401 * Yes, everything seems to be in place.
402 */
403 } else {
404 ASN_DEBUG("One or more mandatory elements "
405 "of a SET %s %d (%08x.%08x)=%08x "
406 "are not present",
407 sd->name,
408 midx,
409 pres,
410 must,
411 (~(pres & must) & must)
412 );
413 RETURN(RC_FAIL);
414 }
415 }
416
417 NEXT_PHASE(ctx);
418 }
419
420 RETURN(RC_OK);
421}
422
423/*
424 * The DER encoder of the SET type.
425 */
426der_enc_rval_t
427SET_encode_der(asn1_TYPE_descriptor_t *sd,
428 void *ptr, int tag_mode, ber_tlv_tag_t tag,
429 asn_app_consume_bytes_f *cb, void *app_key) {
430 asn1_SET_specifics_t *specs = sd->specifics;
431 size_t computed_size = 0;
432 der_enc_rval_t my_erval;
433 int t2m_build_own = (specs->tag2el_count != specs->elements_count);
vlm31473082004-06-06 07:20:02 +0000434 asn1_TYPE_tag2member_t *t2m;
vlmfa67ddc2004-06-03 03:38:44 +0000435 int t2m_count;
436 ssize_t ret;
437 int edx;
438
439 /*
440 * Use existing, or build our own tags map.
441 */
442 if(t2m_build_own) {
443 t2m = alloca(specs->elements_count * sizeof(t2m[0]));
444 t2m_count = 0;
445 } else {
446 /*
447 * There is no untagged CHOICE in this SET.
448 * Employ existing table.
449 */
450 t2m = specs->tag2el;
451 t2m_count = specs->tag2el_count;
452 }
453
454 /*
455 * Gather the length of the underlying members sequence.
456 */
457 for(edx = 0; edx < specs->elements_count; edx++) {
458 asn1_SET_element_t *elm = &specs->elements[edx];
459 der_enc_rval_t erval;
460 void *memb_ptr;
461
462 /*
463 * Compute the length of the encoding of this member.
464 */
465 if(elm->optional) {
466 memb_ptr = *(void **)((char *)ptr + elm->memb_offset);
467 if(!memb_ptr) {
468 if(t2m_build_own) {
469 t2m[t2m_count].el_no = edx;
470 t2m[t2m_count].el_tag = 0;
471 t2m_count++;
472 }
473 continue;
474 }
475 } else {
476 memb_ptr = (void *)((char *)ptr + elm->memb_offset);
477 }
478 erval = elm->type->der_encoder(elm->type, memb_ptr,
479 elm->tag_mode, elm->tag,
480 0, 0);
481 if(erval.encoded == -1)
482 return erval;
483 computed_size += erval.encoded;
484
485 /*
486 * Remember the outmost tag of this member.
487 */
488 if(t2m_build_own) {
489 t2m[t2m_count].el_no = edx;
490 t2m[t2m_count].el_tag = asn1_TYPE_outmost_tag(
491 elm->type, memb_ptr, elm->tag_mode, elm->tag);
492 t2m_count++;
493 } else {
494 /*
495 * No dynamic sorting is necessary.
496 */
497 }
498 }
499
500 /*
501 * Finalize order of the components.
502 */
503 assert(t2m_count == specs->elements_count);
504 if(t2m_build_own) {
505 /*
506 * Sort the underlying members according to their
507 * canonical tags order. DER encoding mandates it.
508 */
509 qsort(t2m, t2m_count, sizeof(specs->tag2el[0]), _t2e_cmp);
510 } else {
511 /*
512 * Tags are already sorted by the compiler.
513 */
514 }
515
516 /*
517 * Encode the TLV for the sequence itself.
518 */
519 ret = der_write_tags(sd, computed_size, tag_mode, tag, cb, app_key);
520 if(ret == -1) {
521 my_erval.encoded = -1;
522 my_erval.failed_type = sd;
523 my_erval.structure_ptr = ptr;
524 return my_erval;
525 }
526 my_erval.encoded = computed_size + ret;
527
528 if(!cb) return my_erval;
529
530 /*
531 * Encode all members.
532 */
533 for(edx = 0; edx < specs->elements_count; edx++) {
534 asn1_SET_element_t *elm;
535 der_enc_rval_t erval;
536 void *memb_ptr;
537
538 /* Encode according to the tag order */
539 elm = &specs->elements[t2m[edx].el_no];
540
541 if(elm->optional) {
542 memb_ptr = *(void **)((char *)ptr + elm->memb_offset);
543 if(!memb_ptr) continue;
544 } else {
545 memb_ptr = (void *)((char *)ptr + elm->memb_offset);
546 }
547 erval = elm->type->der_encoder(elm->type, memb_ptr,
548 elm->tag_mode, elm->tag,
549 cb, app_key);
550 if(erval.encoded == -1)
551 return erval;
552 computed_size -= erval.encoded;
553 }
554
555 if(computed_size != 0) {
556 /*
557 * Encoded size is not equal to the computed size.
558 */
559 my_erval.encoded = -1;
560 my_erval.failed_type = sd;
561 my_erval.structure_ptr = ptr;
562 }
563
564 return my_erval;
565}
566
567int
568SET_print(asn1_TYPE_descriptor_t *td, const void *sptr, int ilevel,
569 asn_app_consume_bytes_f *cb, void *app_key) {
570 asn1_SET_specifics_t *specs = td->specifics;
571 int edx;
572 int ret;
573
574 if(!sptr) return cb("<absent>", 8, app_key);
575
576 /* Dump preamble */
577 if(cb(td->name, strlen(td->name), app_key)
578 || cb(" ::= {\n", 7, app_key))
579 return -1;
580
581 for(edx = 0; edx < specs->elements_count; edx++) {
582 asn1_SET_element_t *elm = &specs->elements[edx];
583 const void *memb_ptr;
584
585 if(elm->optional) {
586 memb_ptr = *(const void * const *)((const char *)sptr + elm->memb_offset);
587 if(!memb_ptr) continue;
588 } else {
589 memb_ptr = (const void *)((const char *)sptr + elm->memb_offset);
590 }
591
592 /* Indentation */
593 for(ret = 0; ret < ilevel; ret++) cb(" ", 1, app_key);
594
595 /* Print the member's name and stuff */
596 if(cb(elm->name, strlen(elm->name), app_key)
597 || cb(": ", 2, app_key))
598 return -1;
599
600 /* Print the member itself */
601 ret = elm->type->print_struct(elm->type, memb_ptr, ilevel + 4,
602 cb, app_key);
603 if(ret) return ret;
604
605 ret = cb("\n", 1, app_key);
606 if(ret) return ret;
607 }
608
609 /* Indentation */
610 for(ret = 0; ret < ilevel - 4; ret++) cb(" ", 1, app_key);
611
612 return cb("}", 1, app_key);
613}
614
615void
616SET_free(asn1_TYPE_descriptor_t *td, void *ptr, int contents_only) {
617 asn1_SET_specifics_t *specs = td->specifics;
618 int edx;
619
620 if(!td || !ptr)
621 return;
622
623 ASN_DEBUG("Freeing %s as SET", td->name);
624
625 for(edx = 0; edx < specs->elements_count; edx++) {
626 asn1_SET_element_t *elm = &specs->elements[edx];
627 void *memb_ptr;
628 if(elm->optional) {
629 memb_ptr = *(void **)((char *)ptr + elm->memb_offset);
630 if(memb_ptr)
631 elm->type->free_struct(elm->type, memb_ptr, 0);
632 } else {
633 memb_ptr = (void *)((char *)ptr + elm->memb_offset);
634 elm->type->free_struct(elm->type, memb_ptr, 1);
635 }
636 }
637
638 if(!contents_only) {
639 FREEMEM(ptr);
640 }
641}
642
643int
644SET_constraint(asn1_TYPE_descriptor_t *td, const void *sptr,
645 asn_app_consume_bytes_f *app_errlog, void *app_key) {
646 asn1_SET_specifics_t *specs = td->specifics;
647 int edx;
648
649 if(!sptr) {
650 _ASN_ERRLOG("%s: value not given", td->name);
651 return -1;
652 }
653
654 /*
655 * Iterate over structure members and check their validity.
656 */
657 for(edx = 0; edx < specs->elements_count; edx++) {
658 asn1_SET_element_t *elm = &specs->elements[edx];
659 const void *memb_ptr;
660
661 if(elm->optional) {
662 memb_ptr = *(const void * const *)((const char *)sptr + elm->memb_offset);
663 if(!memb_ptr) {
664 if(ASN_SET_ISPRESENT2(
665 &(specs->_mandatory_elements), edx)) {
666 _ASN_ERRLOG(
667 "%s: mandatory element "
668 "%s absent",
669 td->name, elm->name);
670 return -1;
671 }
672 continue;
673 }
674 } else {
675 memb_ptr = (const void *)((const char *)sptr + elm->memb_offset);
676 }
677
678 return elm->type->check_constraints(elm->type, memb_ptr,
679 app_errlog, app_key);
680 }
681
682 return 0;
683}