asterix: Dissector automatic update from spec files

Asterix data format is a complex family of asterix categories,
where each individual category exists in multiple editions.
As a result of many variants, the epan/dissectors/packet-asterix.c
is one of the largest dissectors.

So far, the asterix dissector had been maintained manually, where the
generic decoding routines and category/edition specific definitions
were entangled in the same file (packet-asterix.c).

This commit preserves the overall dissector structure, but makes
it easy to update the dissector with new categories or editions as
they become available (via the update script from this commit).
See tools/asterix/README.md file for dissector update procedure.

This commit includes:

- tools/asterix/packet-asterix-template.c
  Extraction of generic asterix decoding routines and
  common data structures.

- tools/asterix/update-specs.py
  Update script, to render the template with up-to-date asterix
  specs files. The asterix specs files themselves are maintained in
  a separate repository.

- epan/dissectors/packet-asterix.c
  Automatically generated dissector for asterix data format.
  Although generated, this file needs to remain in the repository,
  to be able to build the project in a reproducible way.

The generated asterix dissector was additionally tested with:

- ./tools/check_typed_item_calls.py --mask
- ./tools/fuzz-test.sh

Sync with asterix-specs #cef694825c
This commit is contained in:
Zoran Bošnjak 2021-10-29 23:24:44 +02:00 committed by Wireshark GitLab Utility
parent 278ba4157f
commit 16779dc43e
6 changed files with 38489 additions and 13770 deletions

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

39
tools/asterix/README.md Normal file
View File

@ -0,0 +1,39 @@
# Asterix parser generator
*Asterix* is a set of standards, where each standard is defined
as so called *asterix category*.
In addition, each *asterix category* is potentially released
in number of editions. There is no guarantie about backward
compatibility between the editions.
The structured version of asterix specifications is maintained
in a separate project:
<https://zoranbosnjak.github.io/asterix-specs/specs.html>
The purpose of this directory is to convert from structured
specifications (json format) to the `epan/dissectors/packet-asterix.c` file,
which is the actual asterix parser for this project.
## Update procedure
It is important **NOT** to edit `epan/dissectors/packet-asterix.c` file
manually, since this file is automatically generated.
To sync with the upstream asterix specifications, run:
```bash
cd {this directory}
# show current upstream git revision (for reference)
export ASTERIX_SPECS_REV=$(./update-specs.py --reference)
echo $ASTERIX_SPECS_REV
# update asterix decoder
./update-specs.py > ../../epan/dissectors/packet-asterix.c
git add ../../epan/dissectors/packet-asterix.c
# inspect change, rebuild project, test...
# commit change, with reference to upstream version
git commit -m "asterix: Sync with asterix-specs #$ASTERIX_SPECS_REV"
```

View File

@ -0,0 +1,871 @@
/*
Notice:
This file is auto generated, do not edit!
See tools/asterix/README.md for details.
Data source:
---{gitrev}---
*/
/* packet-asterix.c
* Routines for ASTERIX decoding
* By Marko Hrastovec <marko.hrastovec@sloveniacontrol.si>
*
* Wireshark - Network traffic analyzer
* By Gerald Combs <gerald@wireshark.org>
* Copyright 1998 Gerald Combs
*
* SPDX-License-Identifier: GPL-2.0-or-later
*/
/*
* ASTERIX (All-purpose structured EUROCONTROL surveillances
* information exchange) is a protocol related to air traffic control.
*
* The specifications can be downloaded from
* http://www.eurocontrol.int/services/asterix
*/
#include <config.h>
#include <wsutil/bits_ctz.h>
#include <epan/packet.h>
#include <epan/prefs.h>
#include <epan/proto_data.h>
void proto_register_asterix(void);
void proto_reg_handoff_asterix(void);
#define PROTO_TAG_ASTERIX "ASTERIX"
#define ASTERIX_PORT 8600
#define MAX_DISSECT_STR 1024
#define MAX_BUFFER 256
static int proto_asterix = -1;
static gint hf_asterix_category = -1;
static gint hf_asterix_length = -1;
static gint hf_asterix_message = -1;
static gint hf_asterix_fspec = -1;
static gint hf_re_field_len = -1;
static gint hf_spare = -1;
static gint hf_counter = -1;
static gint hf_XXX_FX = -1;
static gint ett_asterix = -1;
static gint ett_asterix_category = -1;
static gint ett_asterix_length = -1;
static gint ett_asterix_message = -1;
static gint ett_asterix_subtree = -1;
static dissector_handle_t asterix_handle;
/* The following defines tell us how to decode the length of
* fields and how to construct their display structure */
#define FIXED 1
#define REPETITIVE 2
#define FX 3
/*#define FX_1 4*/
/*#define RE 5*/
#define COMPOUND 6
/*#define SP 7*/
#define FX_UAP 8 /* The FX_UAP field type is a hack. Currently it *
* is only used in: *
* - I001_020 *
* - asterix_get_active_uap() */
#define EXP 9 /* Explicit (RE or SP) */
/* The following defines tell us how to
* decode and display individual fields. */
#define FIELD_PART_INT 0
#define FIELD_PART_UINT 1
#define FIELD_PART_FLOAT 2
#define FIELD_PART_UFLOAT 3
#define FIELD_PART_SQUAWK 4
#define FIELD_PART_CALLSIGN 5
#define FIELD_PART_ASCII 6
#define FIELD_PART_FX 7
#define FIELD_PART_HEX 8
#define FIELD_PART_IAS_IM 9
#define FIELD_PART_IAS_ASPD 10
typedef struct FieldPart_s FieldPart;
struct FieldPart_s {
guint8 bit_length; /* length of field in bits */
double scaling_factor; /* scaling factor of the field (for instance: 1/128) */
guint8 type; /* Pre-defined type for proper presentation */
gint *hf; /* Pointer to hf representing this kind of data */
const char *format_string; /* format string for showing float values */
};
DIAG_OFF_PEDANTIC
typedef struct AsterixField_s AsterixField;
struct AsterixField_s {
guint8 type; /* type of field */
guint length; /* fixed length */
guint repetition_counter_size; /* size of repetition counter, length of one item is in length */
guint header_length; /* the size is in first header_length bytes of the field */
gint *hf; /* pointer to Wireshark hf_register_info */
const FieldPart **part; /* Look declaration and description of FieldPart above. */
const AsterixField *field[]; /* subfields */
};
DIAG_ON_PEDANTIC
static void dissect_asterix_packet (tvbuff_t *, packet_info *pinfo, proto_tree *);
static void dissect_asterix_data_block (tvbuff_t *tvb, packet_info *pinfo, guint, proto_tree *, guint8, gint);
static gint dissect_asterix_fields (tvbuff_t *, packet_info *pinfo, guint, proto_tree *, guint8, const AsterixField *[]);
static void asterix_build_subtree (tvbuff_t *, packet_info *pinfo, guint, proto_tree *, const AsterixField *);
static void twos_complement (gint64 *, guint8);
static guint8 asterix_bit (guint8, guint8);
static guint asterix_fspec_len (tvbuff_t *, guint);
static guint8 asterix_field_exists (tvbuff_t *, guint, int);
static guint8 asterix_get_active_uap (tvbuff_t *, guint, guint8);
static int asterix_field_length (tvbuff_t *, guint, const AsterixField *);
static int asterix_field_offset (tvbuff_t *, guint, const AsterixField *[], int);
static int asterix_message_length (tvbuff_t *, guint, guint8, guint8);
static const char AISCode[] = { ' ', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O',
'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', ' ', ' ', ' ', ' ', ' ',
' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', ' ', ' ', ' ', ' ', ' ', ' ' };
static const value_string valstr_XXX_FX[] = {
{ 0, "End of data item" },
{ 1, "Extension into next extent" },
{ 0, NULL }
};
static const FieldPart IXXX_FX = { 1, 1.0, FIELD_PART_FX, &hf_XXX_FX, NULL };
static const FieldPart IXXX_1bit_spare = { 1, 1.0, FIELD_PART_UINT, NULL, NULL };
static const FieldPart IXXX_2bit_spare = { 2, 1.0, FIELD_PART_UINT, NULL, NULL };
static const FieldPart IXXX_3bit_spare = { 3, 1.0, FIELD_PART_UINT, NULL, NULL };
static const FieldPart IXXX_4bit_spare = { 4, 1.0, FIELD_PART_UINT, NULL, NULL };
static const FieldPart IXXX_5bit_spare = { 5, 1.0, FIELD_PART_UINT, NULL, NULL };
static const FieldPart IXXX_6bit_spare = { 6, 1.0, FIELD_PART_UINT, NULL, NULL };
static const FieldPart IXXX_7bit_spare = { 7, 1.0, FIELD_PART_UINT, NULL, NULL };
/* Spare Item */
DIAG_OFF_PEDANTIC
static const AsterixField IX_SPARE = { FIXED, 0, 0, 0, &hf_spare, NULL, { NULL } };
/* insert1 */
---{insert1}---
/* insert1 */
/* settings which category version to use for each ASTERIX category */
static gint global_categories_version[] = {
0, /* 000 */
0, /* 001 */
0, /* 002 */
0, /* 003 */
0, /* 004 */
0, /* 005 */
0, /* 006 */
0, /* 007 */
0, /* 008 */
0, /* 009 */
0, /* 010 */
0, /* 011 */
0, /* 012 */
0, /* 013 */
0, /* 014 */
0, /* 015 */
0, /* 016 */
0, /* 017 */
0, /* 018 */
0, /* 019 */
0, /* 020 */
0, /* 021 */
0, /* 022 */
0, /* 023 */
0, /* 024 */
0, /* 025 */
0, /* 026 */
0, /* 027 */
0, /* 028 */
0, /* 029 */
0, /* 030 */
0, /* 031 */
0, /* 032 */
0, /* 033 */
0, /* 034 */
0, /* 035 */
0, /* 036 */
0, /* 037 */
0, /* 038 */
0, /* 039 */
0, /* 040 */
0, /* 041 */
0, /* 042 */
0, /* 043 */
0, /* 044 */
0, /* 045 */
0, /* 046 */
0, /* 047 */
0, /* 048 */
0, /* 049 */
0, /* 050 */
0, /* 051 */
0, /* 052 */
0, /* 053 */
0, /* 054 */
0, /* 055 */
0, /* 056 */
0, /* 057 */
0, /* 058 */
0, /* 059 */
0, /* 060 */
0, /* 061 */
0, /* 062 */
0, /* 063 */
0, /* 064 */
0, /* 065 */
0, /* 066 */
0, /* 067 */
0, /* 068 */
0, /* 069 */
0, /* 070 */
0, /* 071 */
0, /* 072 */
0, /* 073 */
0, /* 074 */
0, /* 075 */
0, /* 076 */
0, /* 077 */
0, /* 078 */
0, /* 079 */
0, /* 080 */
0, /* 081 */
0, /* 082 */
0, /* 083 */
0, /* 084 */
0, /* 085 */
0, /* 086 */
0, /* 087 */
0, /* 088 */
0, /* 089 */
0, /* 090 */
0, /* 091 */
0, /* 092 */
0, /* 093 */
0, /* 094 */
0, /* 095 */
0, /* 096 */
0, /* 097 */
0, /* 098 */
0, /* 099 */
0, /* 100 */
0, /* 101 */
0, /* 102 */
0, /* 103 */
0, /* 104 */
0, /* 105 */
0, /* 106 */
0, /* 107 */
0, /* 108 */
0, /* 109 */
0, /* 110 */
0, /* 111 */
0, /* 112 */
0, /* 113 */
0, /* 114 */
0, /* 115 */
0, /* 116 */
0, /* 117 */
0, /* 118 */
0, /* 119 */
0, /* 120 */
0, /* 121 */
0, /* 122 */
0, /* 123 */
0, /* 124 */
0, /* 125 */
0, /* 126 */
0, /* 127 */
0, /* 128 */
0, /* 129 */
0, /* 130 */
0, /* 131 */
0, /* 132 */
0, /* 133 */
0, /* 134 */
0, /* 135 */
0, /* 136 */
0, /* 137 */
0, /* 138 */
0, /* 139 */
0, /* 140 */
0, /* 141 */
0, /* 142 */
0, /* 143 */
0, /* 144 */
0, /* 145 */
0, /* 146 */
0, /* 147 */
0, /* 148 */
0, /* 149 */
0, /* 150 */
0, /* 151 */
0, /* 152 */
0, /* 153 */
0, /* 154 */
0, /* 155 */
0, /* 156 */
0, /* 157 */
0, /* 158 */
0, /* 159 */
0, /* 160 */
0, /* 161 */
0, /* 162 */
0, /* 163 */
0, /* 164 */
0, /* 165 */
0, /* 166 */
0, /* 167 */
0, /* 168 */
0, /* 169 */
0, /* 170 */
0, /* 171 */
0, /* 172 */
0, /* 173 */
0, /* 174 */
0, /* 175 */
0, /* 176 */
0, /* 177 */
0, /* 178 */
0, /* 179 */
0, /* 180 */
0, /* 181 */
0, /* 182 */
0, /* 183 */
0, /* 184 */
0, /* 185 */
0, /* 186 */
0, /* 187 */
0, /* 188 */
0, /* 189 */
0, /* 190 */
0, /* 191 */
0, /* 192 */
0, /* 193 */
0, /* 194 */
0, /* 195 */
0, /* 196 */
0, /* 197 */
0, /* 198 */
0, /* 199 */
0, /* 200 */
0, /* 201 */
0, /* 202 */
0, /* 203 */
0, /* 204 */
0, /* 205 */
0, /* 206 */
0, /* 207 */
0, /* 208 */
0, /* 209 */
0, /* 210 */
0, /* 211 */
0, /* 212 */
0, /* 213 */
0, /* 214 */
0, /* 215 */
0, /* 216 */
0, /* 217 */
0, /* 218 */
0, /* 219 */
0, /* 220 */
0, /* 221 */
0, /* 222 */
0, /* 223 */
0, /* 224 */
0, /* 225 */
0, /* 226 */
0, /* 227 */
0, /* 228 */
0, /* 229 */
0, /* 230 */
0, /* 231 */
0, /* 232 */
0, /* 233 */
0, /* 234 */
0, /* 235 */
0, /* 236 */
0, /* 237 */
0, /* 238 */
0, /* 239 */
0, /* 240 */
0, /* 241 */
0, /* 242 */
0, /* 243 */
0, /* 244 */
0, /* 245 */
0, /* 246 */
0, /* 247 */
0, /* 248 */
0, /* 249 */
0, /* 250 */
0, /* 251 */
0, /* 252 */
0, /* 253 */
0, /* 254 */
0 /* 255 */
};
static int dissect_asterix (tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree, void* data _U_)
{
col_set_str (pinfo->cinfo, COL_PROTOCOL, "ASTERIX");
col_clear (pinfo->cinfo, COL_INFO);
if (tree) { /* we are being asked for details */
dissect_asterix_packet (tvb, pinfo, tree);
}
return tvb_captured_length(tvb);
}
static void dissect_asterix_packet (tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree)
{
guint i;
guint8 category;
guint16 length;
proto_item *asterix_packet_item;
proto_tree *asterix_packet_tree;
for (i = 0; i < tvb_reported_length (tvb); i += length + 3) {
/* all ASTERIX messages have the same structure:
*
* header:
*
* 1 byte category even though a category is referenced as I019,
* this is just stored as decimal 19 (i.e. 0x13)
* 2 bytes length the total length of this ASTERIX message, the
* length includes the size of the header.
*
* Note that the there was a structural change at
* one point that changes whether multiple
* records can occur after the header or not
* (each category specifies this explicitly. All
* of the currently supported categories can have
* multiple records so this implementation just
* assumes that is always the case)
*
* record (multiple records can exists):
*
* n bytes FSPEC the field specifier is a bit mask where the
* lowest bit of each byte is called the FX bit.
* When the FX bit is set this indicates that
* the FSPEC extends into the next byte.
* Any other bit indicates the presence of the
* field that owns that bit (as per the User
* Application Profile (UAP)).
* X bytes Field Y X is as per the specification for field Y.
* etc.
*
* The User Application Profile (UAP) is simply a mapping from the
* FSPEC to fields. Each category has its own UAP.
*/
category = tvb_get_guint8 (tvb, i);
length = (tvb_get_guint8 (tvb, i + 1) << 8) + tvb_get_guint8 (tvb, i + 2) - 3; /* -3 for category and length */
asterix_packet_item = proto_tree_add_item (tree, proto_asterix, tvb, i, length + 3, ENC_NA);
proto_item_append_text (asterix_packet_item, ", Category %03d", category);
asterix_packet_tree = proto_item_add_subtree (asterix_packet_item, ett_asterix);
proto_tree_add_item (asterix_packet_tree, hf_asterix_category, tvb, i, 1, ENC_BIG_ENDIAN);
proto_tree_add_item (asterix_packet_tree, hf_asterix_length, tvb, i + 1, 2, ENC_BIG_ENDIAN);
dissect_asterix_data_block (tvb, pinfo, i + 3, asterix_packet_tree, category, length);
}
}
static void dissect_asterix_data_block (tvbuff_t *tvb, packet_info *pinfo, guint offset, proto_tree *tree, guint8 category, gint length)
{
guint8 active_uap;
int fspec_len, inner_offset, size, counter;
proto_item *asterix_message_item = NULL;
proto_tree *asterix_message_tree = NULL;
for (counter = 1, inner_offset = 0; inner_offset < length; counter++) {
/* This loop handles parsing of each ASTERIX record */
active_uap = asterix_get_active_uap (tvb, offset + inner_offset, category);
size = asterix_message_length (tvb, offset + inner_offset, category, active_uap);
if (size > 0) {
asterix_message_item = proto_tree_add_item (tree, hf_asterix_message, tvb, offset + inner_offset, size, ENC_NA);
proto_item_append_text (asterix_message_item, ", #%02d, length: %d", counter, size);
asterix_message_tree = proto_item_add_subtree (asterix_message_item, ett_asterix_message);
fspec_len = asterix_fspec_len (tvb, offset + inner_offset);
/*show_fspec (tvb, asterix_message_tree, offset + inner_offset, fspec_len);*/
proto_tree_add_item (asterix_message_tree, hf_asterix_fspec, tvb, offset + inner_offset, fspec_len, ENC_NA);
size = dissect_asterix_fields (tvb, pinfo, offset + inner_offset, asterix_message_tree, category, categories[category][global_categories_version[category]][active_uap]);
inner_offset += size + fspec_len;
}
else {
inner_offset = length;
}
}
}
static gint dissect_asterix_fields (tvbuff_t *tvb, packet_info *pinfo, guint offset, proto_tree *tree, guint8 category, const AsterixField *current_uap[])
{
guint i, j, size, start, len, inner_offset, fspec_len;
guint64 counter;
proto_item *asterix_field_item = NULL;
proto_tree *asterix_field_tree = NULL;
proto_item *asterix_field_item2 = NULL;
proto_tree *asterix_field_tree2 = NULL;
if (current_uap == NULL)
return 0;
for (i = 0, size = 0; current_uap[i] != NULL; i++) {
start = asterix_field_offset (tvb, offset, current_uap, i);
if (start > 0) {
len = asterix_field_length (tvb, offset + start, current_uap[i]);
size += len;
switch(current_uap[i]->type) {
case COMPOUND:
asterix_field_item = proto_tree_add_item (tree, *current_uap[i]->hf, tvb, offset + start, len, ENC_NA);
asterix_field_tree = proto_item_add_subtree (asterix_field_item, ett_asterix_subtree);
fspec_len = asterix_fspec_len (tvb, offset + start);
proto_tree_add_item (asterix_field_tree, hf_asterix_fspec, tvb, offset + start, fspec_len, ENC_NA);
dissect_asterix_fields (tvb, pinfo, offset + start, asterix_field_tree, category, (const AsterixField **)current_uap[i]->field);
break;
case REPETITIVE:
asterix_field_item = proto_tree_add_item (tree, *current_uap[i]->hf, tvb, offset + start, len, ENC_NA);
asterix_field_tree = proto_item_add_subtree (asterix_field_item, ett_asterix_subtree);
for (j = 0, counter = 0; j < current_uap[i]->repetition_counter_size; j++) {
counter = (counter << 8) + tvb_get_guint8 (tvb, offset + start + j);
}
proto_tree_add_item (asterix_field_tree, hf_counter, tvb, offset + start, current_uap[i]->repetition_counter_size, ENC_BIG_ENDIAN);
for (j = 0, inner_offset = 0; j < counter; j++, inner_offset += current_uap[i]->length) {
asterix_field_item2 = proto_tree_add_item (asterix_field_tree, *current_uap[i]->hf, tvb, offset + start + current_uap[i]->repetition_counter_size + inner_offset, current_uap[i]->length, ENC_NA);
asterix_field_tree2 = proto_item_add_subtree (asterix_field_item2, ett_asterix_subtree);
asterix_build_subtree (tvb, pinfo, offset + start + current_uap[i]->repetition_counter_size + inner_offset, asterix_field_tree2, current_uap[i]);
}
break;
/* currently not generated from asterix-spec*/
/*case EXP:
asterix_field_item = proto_tree_add_item (tree, *current_uap[i]->hf, tvb, offset + start, len, ENC_NA);
asterix_field_tree = proto_item_add_subtree (asterix_field_item, ett_asterix_subtree);
proto_tree_add_item (asterix_field_tree, hf_re_field_len, tvb, offset + start, 1, ENC_BIG_ENDIAN);
start++;
fspec_len = asterix_fspec_len (tvb, offset + start);
proto_tree_add_item (asterix_field_tree, hf_asterix_fspec, tvb, offset + start, fspec_len, ENC_NA);
dissect_asterix_fields (tvb, pinfo, offset + start, asterix_field_tree, category, (const AsterixField **)current_uap[i]->field);
break;*/
default: /* FIXED, FX, FX_1, FX_UAP */
asterix_field_item = proto_tree_add_item (tree, *current_uap[i]->hf, tvb, offset + start, len, ENC_NA);
asterix_field_tree = proto_item_add_subtree (asterix_field_item, ett_asterix_subtree);
asterix_build_subtree (tvb, pinfo, offset + start, asterix_field_tree, current_uap[i]);
break;
}
}
}
return size;
}
static void asterix_build_subtree (tvbuff_t *tvb, packet_info *pinfo, guint offset, proto_tree *parent, const AsterixField *field)
{
header_field_info* hfi;
int bytes_in_type, byte_offset_of_mask;
gint i, inner_offset, offset_in_tvb, length_in_tvb;
guint8 go_on;
gint64 value;
char *str_buffer = NULL;
double scaling_factor = 1.0;
guint8 *air_speed_im_bit;
if (field->part != NULL) {
for (i = 0, inner_offset = 0, go_on = 1; go_on && field->part[i] != NULL; i++) {
value = tvb_get_bits64 (tvb, offset * 8 + inner_offset, field->part[i]->bit_length, ENC_BIG_ENDIAN);
if (field->part[i]->hf != NULL) {
offset_in_tvb = offset + inner_offset / 8;
length_in_tvb = (inner_offset % 8 + field->part[i]->bit_length + 7) / 8;
switch (field->part[i]->type) {
case FIELD_PART_FX:
if (!value) go_on = 0;
/* Fall through */
case FIELD_PART_INT:
case FIELD_PART_UINT:
case FIELD_PART_HEX:
case FIELD_PART_ASCII:
case FIELD_PART_SQUAWK:
hfi = proto_registrar_get_nth (*field->part[i]->hf);
if (hfi->bitmask)
{
// for a small bit field to decode correctly with
// a mask that belongs to a large(r) one we need to
// re-adjust offset_in_tvb and length_in_tvb to
// correctly align with the given hf mask.
//
// E.g. the following would not decode correctly:
// { &hf_020_050_V, ... FT_UINT16, ... 0x8000, ...
// instead one would have to use
// { &hf_020_050_V, ... FT_UINT8, ... 0x80, ...
//
bytes_in_type = ftype_length (hfi->type);
if (bytes_in_type > 1)
{
byte_offset_of_mask = bytes_in_type - (ws_ilog2 (hfi->bitmask) + 8)/8;
if (byte_offset_of_mask >= 0)
{
offset_in_tvb -= byte_offset_of_mask;
length_in_tvb = bytes_in_type;
}
}
}
proto_tree_add_item (parent, *field->part[i]->hf, tvb, offset_in_tvb, length_in_tvb, ENC_BIG_ENDIAN);
break;
case FIELD_PART_FLOAT:
twos_complement (&value, field->part[i]->bit_length);
/* Fall through */
case FIELD_PART_UFLOAT:
scaling_factor = field->part[i]->scaling_factor;
if (field->part[i]->format_string != NULL)
proto_tree_add_double_format_value (parent, *field->part[i]->hf, tvb, offset_in_tvb, length_in_tvb, value * scaling_factor, field->part[i]->format_string, value * scaling_factor);
else
proto_tree_add_double (parent, *field->part[i]->hf, tvb, offset_in_tvb, length_in_tvb, value * scaling_factor);
break;
case FIELD_PART_CALLSIGN:
str_buffer = wmem_strdup_printf(
wmem_packet_scope (),
"%c%c%c%c%c%c%c%c",
AISCode[(value >> 42) & 63],
AISCode[(value >> 36) & 63],
AISCode[(value >> 30) & 63],
AISCode[(value >> 24) & 63],
AISCode[(value >> 18) & 63],
AISCode[(value >> 12) & 63],
AISCode[(value >> 6) & 63],
AISCode[value & 63]);
proto_tree_add_string (parent, *field->part[i]->hf, tvb, offset_in_tvb, length_in_tvb, str_buffer);
break;
case FIELD_PART_IAS_IM:
/* special processing for I021/150 and I062/380#4 because Air Speed depends on IM subfield */
air_speed_im_bit = wmem_new (wmem_packet_scope (), guint8);
*air_speed_im_bit = (tvb_get_guint8 (tvb, offset_in_tvb) & 0x80) >> 7;
/* Save IM info for the packet. key = 21150. */
p_add_proto_data (pinfo->pool, pinfo, proto_asterix, 21150, air_speed_im_bit);
proto_tree_add_item (parent, *field->part[i]->hf, tvb, offset_in_tvb, length_in_tvb, ENC_BIG_ENDIAN);
break;
case FIELD_PART_IAS_ASPD:
/* special processing for I021/150 and I062/380#4 because Air Speed depends on IM subfield */
air_speed_im_bit = (guint8 *)p_get_proto_data (pinfo->pool, pinfo, proto_asterix, 21150);
if (!air_speed_im_bit || *air_speed_im_bit == 0)
scaling_factor = 1.0/16384.0;
else
scaling_factor = 0.001;
proto_tree_add_double (parent, *field->part[i]->hf, tvb, offset_in_tvb, length_in_tvb, value * scaling_factor);
break;
}
}
inner_offset += field->part[i]->bit_length;
}
} /* if not null */
}
static guint8 asterix_bit (guint8 b, guint8 bitNo)
{
return bitNo < 8 && (b & (0x80 >> bitNo)) > 0;
}
/* Function makes gint64 two's complement.
* Only the bit_len bit are set in gint64. All more significant
* bits need to be set to have proper two's complement.
* If the number is negative, all other bits must be set to 1.
* If the number is positive, all other bits must remain 0. */
static void twos_complement (gint64 *v, guint8 bit_len)
{
if (*v & (G_GUINT64_CONSTANT(1) << (bit_len - 1))) {
*v |= (G_GUINT64_CONSTANT(0xffffffffffffffff) << bit_len);
}
}
static guint asterix_fspec_len (tvbuff_t *tvb, guint offset)
{
guint i;
guint max_length = tvb_reported_length (tvb) - offset;
for (i = 0; (tvb_get_guint8 (tvb, offset + i) & 1) && i < max_length; i++);
return i + 1;
}
static guint8 asterix_field_exists (tvbuff_t *tvb, guint offset, int bitIndex)
{
guint8 bitNo, i;
bitNo = bitIndex + bitIndex / 7;
for (i = 0; i < bitNo / 8; i++) {
if (!(tvb_get_guint8 (tvb, offset + i) & 1)) return 0;
}
return asterix_bit (tvb_get_guint8 (tvb, offset + i), bitNo % 8);
}
static int asterix_field_length (tvbuff_t *tvb, guint offset, const AsterixField *field)
{
guint size;
guint64 count;
guint8 i;
size = 0;
switch(field->type) {
case FIXED:
size = field->length;
break;
case REPETITIVE:
for (i = 0, count = 0; i < field->repetition_counter_size && i < sizeof (count); i++)
count = (count << 8) + tvb_get_guint8 (tvb, offset + i);
size = (guint)(field->repetition_counter_size + count * field->length);
break;
case FX_UAP:
case FX:
for (size = field->length + field->header_length; tvb_get_guint8 (tvb, offset + size - 1) & 1; size += field->length);
break;
case EXP:
for (i = 0, size = 0; i < field->header_length; i++) {
size = (size << 8) + tvb_get_guint8 (tvb, offset + i);
}
break;
case COMPOUND:
/* FSPEC */
for (size = 0; tvb_get_guint8 (tvb, offset + size) & 1; size++);
size++;
for (i = 0; field->field[i] != NULL; i++) {
if (asterix_field_exists (tvb, offset, i))
size += asterix_field_length (tvb, offset + size, field->field[i]);
}
break;
}
return size;
}
/* This works for category 001. For other it may require changes. */
static guint8 asterix_get_active_uap (tvbuff_t *tvb, guint offset, guint8 category)
{
int i, inner_offset;
AsterixField **current_uap;
if ((category == 1) && (categories[category] != NULL)) { /* if category is supported */
if (categories[category][global_categories_version[category]][1] != NULL) { /* if exists another uap */
current_uap = (AsterixField **)categories[category][global_categories_version[category]][0];
if (current_uap != NULL) {
inner_offset = asterix_fspec_len (tvb, offset);
for (i = 0; current_uap[i] != NULL; i++) {
if (asterix_field_exists (tvb, offset, i)) {
if (current_uap[i]->type == FX_UAP) {
return tvb_get_guint8 (tvb, offset + inner_offset) >> 7;
}
inner_offset += asterix_field_length (tvb, offset + inner_offset, current_uap[i]);
}
}
}
}
}
return 0;
}
static int asterix_field_offset (tvbuff_t *tvb, guint offset, const AsterixField *current_uap[], int field_index)
{
int i, inner_offset;
inner_offset = 0;
if (asterix_field_exists (tvb, offset, field_index)) {
inner_offset = asterix_fspec_len (tvb, offset);
for (i = 0; i < field_index; i++) {
if (asterix_field_exists (tvb, offset, i))
inner_offset += asterix_field_length (tvb, offset + inner_offset, current_uap[i]);
}
}
return inner_offset;
}
static int asterix_message_length (tvbuff_t *tvb, guint offset, guint8 category, guint8 active_uap)
{
int i, size;
AsterixField **current_uap;
if (categories[category] != NULL) { /* if category is supported */
current_uap = (AsterixField **)categories[category][global_categories_version[category]][active_uap];
if (current_uap != NULL) {
size = asterix_fspec_len (tvb, offset);
for (i = 0; current_uap[i] != NULL; i++) {
if (asterix_field_exists (tvb, offset, i)) {
size += asterix_field_length (tvb, offset + size, current_uap[i]);
}
}
return size;
}
}
return 0;
}
void proto_register_asterix (void)
{
static hf_register_info hf[] = {
{ &hf_asterix_category, { "Category", "asterix.category", FT_UINT8, BASE_DEC, NULL, 0x0, NULL, HFILL } },
{ &hf_asterix_length, { "Length", "asterix.length", FT_UINT16, BASE_DEC, NULL, 0x0, NULL, HFILL } },
{ &hf_asterix_message, { "Asterix message", "asterix.message", FT_NONE, BASE_NONE, NULL, 0x0, NULL, HFILL } },
{ &hf_asterix_fspec, { "FSPEC", "asterix.fspec", FT_NONE, BASE_NONE, NULL, 0x0, NULL, HFILL } },
{ &hf_re_field_len, { "RE LEN", "asterix.re_field_len", FT_UINT8, BASE_DEC, NULL, 0x0, NULL, HFILL } },
{ &hf_spare, { "Spare", "asterix.spare", FT_NONE, BASE_NONE, NULL, 0x0, NULL, HFILL } },
{ &hf_counter, { "Counter", "asterix.counter", FT_UINT32, BASE_DEC, NULL, 0x0, NULL, HFILL } },
{ &hf_XXX_FX, { "FX", "asterix.FX", FT_UINT8, BASE_DEC, VALS (valstr_XXX_FX), 0x01, "Extension into next extent", HFILL } },
/* insert2 */
---{insert2}---
/* insert2 */
};
/* Setup protocol subtree array */
static gint *ett[] = {
&ett_asterix,
&ett_asterix_category,
&ett_asterix_length,
&ett_asterix_message,
&ett_asterix_subtree
};
module_t *asterix_prefs_module;
proto_asterix = proto_register_protocol (
"ASTERIX packet", /* name */
"ASTERIX", /* short name */
"asterix" /* abbrev */
);
proto_register_field_array (proto_asterix, hf, array_length (hf));
proto_register_subtree_array (ett, array_length (ett));
asterix_handle = register_dissector ("asterix", dissect_asterix, proto_asterix);
asterix_prefs_module = prefs_register_protocol (proto_asterix, NULL);
/* insert3 */
---{insert3}---
/* insert3 */
}
void proto_reg_handoff_asterix (void)
{
dissector_add_uint_with_preference("udp.port", ASTERIX_PORT, asterix_handle);
}
/*
* Editor modelines - https://www.wireshark.org/tools/modelines.html
*
* Local variables:
* c-basic-offset: 4
* tab-width: 8
* indent-tabs-mode: nil
* End:
*
* vi: set shiftwidth=4 tabstop=8 expandtab:
* :indentSize=4:tabSize=8:noTabs=true:
*/

690
tools/asterix/update-specs.py Executable file
View File

@ -0,0 +1,690 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# By Zoran Bošnjak <zoran.bosnjak@sloveniacontrol.si>
#
# Use asterix specifications in JSON format,
# to generate C/C++ structures, suitable for wireshark.
#
# SPDX-License-Identifier: GPL-2.0-or-later
#
import argparse
import urllib.request
import json
from copy import copy
from itertools import chain, repeat
import os
import sys
import re
# Path to default upstream repository
upstream_repo = 'https://zoranbosnjak.github.io/asterix-specs'
class Offset(object):
"""Keep track of number of added bits.
It's like integer, except when offsets are added together,
a 'modulo 8' is applied, such that offset is always between [0,7].
"""
def __init__(self):
self.current = 0
def __add__(self, other):
self.current = (self.current + other) % 8
return self
@property
def get(self):
return self.current
class Context(object):
"""Support class to be used as a context manager.
The 'tell' method is used to output (print) some data.
All output is first collected to a buffer, then rendered
using a template file.
"""
def __init__(self):
self.buffer = {}
self.offset = Offset()
self.inside_extended = None
self.inside_repetitive = False
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
pass
def tell(self, channel, s):
"""Append string 's' to an output channel."""
lines = self.buffer.get(channel, [])
lines.append(s)
self.buffer[channel] = lines
def reset_offset(self):
self.offset = Offset()
def get_number(value):
"""Get Natural/Real/Rational number as an object."""
class Integer(object):
def __init__(self, val):
self.val = val
def __str__(self):
return '{}'.format(self.val)
def __float__(self):
return float(self.val)
class Ratio(object):
def __init__(self, a, b):
self.a = a
self.b = b
def __str__(self):
return '{}/{}'.format(self.a, self.b)
def __float__(self):
return float(self.a) / float(self.b)
class Real(object):
def __init__(self, val):
self.val = val
def __str__(self):
return '{0:f}'.format(self.val).rstrip('0')
def __float__(self):
return float(self.val)
t = value['type']
val = value['value']
if t == 'Integer':
return Integer(int(val))
if t == 'Ratio':
x, y = val['numerator'], val['denominator']
return Ratio(x, y)
if t == 'Real':
return Real(float(val))
raise Exception('unexpected value type {}'.format(t))
def replace_string(s, mapping):
"""Helper function to replace each entry from the mapping."""
for (key,val) in mapping.items():
s = s.replace(key, val)
return s
def replace_unicode(s):
"""Unicode replacement table."""
return replace_string(s, {
u'': '-',
u'': '',
u'': '',
u'°': ' deg',
})
def get_scaling(content):
"""Get scaling factor from the content."""
k = content.get('scaling')
if k is None:
return None
k = get_number(k)
fract = content['fractionalBits']
if fract > 0:
scale = format(float(k) / (pow(2, fract)), '.29f')
scale = scale.rstrip('0')
else:
scale = format(float(k))
return scale
def get_fieldpart(content):
"""Get FIELD_PART* from the content."""
t = content['type']
if t == 'Raw': return 'FIELD_PART_HEX'
elif t == 'Table': return 'FIELD_PART_UINT'
elif t == 'String':
var = content['variation']
if var == 'StringAscii': return 'FIELD_PART_ASCII'
elif var == 'StringICAO': return 'FIELD_PART_CALLSIGN'
elif var == 'StringOctal': return 'FIELD_PART_SQUAWK'
else:
raise Exception('unexpected string variation: {}'.format(var))
elif t == 'Integer':
if content['signed']:
return 'FIELD_PART_INT'
else:
return 'FIELD_PART_UINT'
elif t == 'Quantity':
if content['signed']:
return 'FIELD_PART_FLOAT'
else:
return 'FIELD_PART_UFLOAT'
elif t == 'Bds':
return 'FIELD_PART_HEX'
else:
raise Exception('unexpected content type: {}'.format(t))
def download_url(path):
"""Download url and return content as a string."""
with urllib.request.urlopen(upstream_repo + path) as url:
return url.read()
def read_file(path):
"""Read file content, return string."""
with open(path) as f:
return f.read()
def load_jsons(paths):
"""Load json files from either URL or from local disk."""
# load from url
if paths == []:
manifest = download_url('/manifest.json').decode()
listing = []
for spec in json.loads(manifest):
cat = spec['category']
for edition in spec['cats']:
listing.append('/specs/cat{}/cats/cat{}/definition.json'.format(cat, edition))
for edition in spec['refs']:
listing.append('/specs/cat{}/refs/ref{}/definition.json'.format(cat, edition))
return [download_url(i).decode() for i in listing]
# load from disk
else:
listing = []
for path in paths:
if os.path.isdir(path):
for root, dirs, files in os.walk(path):
for i in files:
(a,b) = os.path.splitext(i)
if (a,b) != ('definition', '.json'):
continue
listing.append(os.path.join(root, i))
elif os.path.isfile(path):
listing.append(path)
else:
raise Exception('unexpected path type: {}'.path)
return [read_file(f) for f in listing]
def load_gitrev(paths):
"""Read git revision reference."""
# load from url
if paths == []:
gitrev = download_url('/gitrev.txt').decode().strip()
return [upstream_repo, 'git revision: {}'.format(gitrev)]
# load from disk
else:
return ['(local disk)']
def get_ft(ref, n, content, offset):
"""Get FT... from the content."""
a = offset.get
# bruto bit size (next multiple of 8)
(m, b) = divmod(a+n, 8)
m = m if b == 0 else m + 1
m *= 8
mask = '0x00'
if a != 0 or b != 0:
bits = chain(repeat(0, a), repeat(1, n), repeat(0, m-n-a))
mask = 0
for (a,b) in zip(bits, reversed(range(m))):
mask += a*pow(2,b)
mask = hex(mask)
# prefix mask with zeros '0x000...', to adjust mask size
assert mask[0:2] == '0x'
mask = mask[2:]
required_mask_size = (m//8)*2
add_some = required_mask_size - len(mask)
mask = '0x' + '0'*add_some + mask
t = content['type']
if t == 'Raw':
return 'FT_UINT{}, BASE_DEC, NULL, {}'.format(m, mask)
elif t == 'Table':
return 'FT_UINT{}, BASE_DEC, VALS (valstr_{}), {}'.format(m, ref, mask)
elif t == 'String':
var = content['variation']
if var == 'StringAscii':
return 'FT_STRING, BASE_NONE, NULL, {}'.format(mask)
elif var == 'StringICAO':
return 'FT_STRING, BASE_NONE, NULL, {}'.format(mask)
elif var == 'StringOctal':
return 'FT_UINT{}, BASE_OCT, NULL, {}'.format(m, mask)
else:
raise Exception('unexpected string variation: {}'.format(var))
elif t == 'Integer':
signed = content['signed']
if signed:
return 'FT_INT{}, BASE_DEC, NULL, {}'.format(m, mask)
else:
return 'FT_UINT{}, BASE_DEC, NULL, {}'.format(m, mask)
elif t == 'Quantity':
return 'FT_DOUBLE, BASE_NONE, NULL, 0x00'
elif t == 'Bds':
return 'FT_UINT{}, BASE_DEC, NULL, {}'.format(m, mask)
else:
raise Exception('unexpected content type: {}'.format(t))
def reference(cat, edition, path):
"""Create reference string."""
name = '_'.join(path)
if edition is None:
return('{:03d}_{}'.format(cat, name))
return('{:03d}_V{}_{}_{}'.format(cat, edition['major'], edition['minor'], name))
def get_content(rule):
t = rule['type']
# Most cases are 'ContextFree', use as specified.
if t == 'ContextFree':
return rule['content']
# Handle 'Dependent' contents as 'Raw'.
elif t == 'Dependent':
return {'type': "Raw"}
else:
raise Exception('unexpected type: {}'.format(t))
def get_bit_size(item):
"""Return bit size of a (spare) item."""
if item['spare']:
return item['length']
else:
return item['variation']['size']
def get_description(item, content=None):
"""Return item description."""
result = item['name']
if item['title']:
result += ', {}'.format(item['title'])
if content is not None and content.get('unit'):
result += ', [{}]'.format(replace_unicode(content['unit']))
return result
def part1(ctx, get_ref, catalogue):
"""Generate components in order
- static gint hf_...
- FiledPart
- FieldPart[]
- AsterixField
"""
tell = lambda s: ctx.tell('insert1', s)
tell_pr = lambda s: ctx.tell('insert2', s)
ctx.reset_offset()
ctx.inside_extended = None
def handle_item(path, item):
"""Handle 'spare' or regular 'item'.
This function is used recursively, depending on the item structure.
"""
def handle_variation(path, variation):
"""Handle 'Element, Group...' variations.
This function is used recursively, depending on the item structure."""
t = variation['type']
ref = get_ref(path)
def part_of(item):
if item['spare']:
return '&IXXX_{}bit_spare'.format(item['length'])
return '&I{}_{}'.format(ref, item['name'])
if t == 'Element':
tell('static gint hf_{} = -1;'.format(ref))
n = variation['size']
content = get_content(variation['rule'])
scaling = get_scaling(content)
scaling = scaling if scaling is not None else 1.0
fp = get_fieldpart(content)
if content['type'] == 'Table':
tell('static const value_string valstr_{}[] = {}'.format(ref, '{'))
for (a,b) in content['values']:
tell(' {} {}, "{}" {},'.format('{', a, replace_unicode(b), '}'))
tell(' {} 0, NULL {}'.format('{', '}'))
tell('};')
tell('static const FieldPart I{} = {} {}, {}, {}, &hf_{}, NULL {};'.format(ref, '{', n, scaling, fp, ref, '}'))
description = get_description(item, content)
ft = get_ft(ref, n, content, ctx.offset)
tell_pr(' {} &hf_{}, {} "{}", "asterix.{}", {}, NULL, HFILL {} {},'.format('{', ref, '{', description, ref, ft, '}', '}'))
ctx.offset += n
if ctx.inside_extended is not None:
n, rest = ctx.inside_extended
if ctx.offset.get + 1 > n:
raise Exception("unexpected offset")
# FX bit
if ctx.offset.get + 1 == n:
ctx.offset += 1
m = next(rest)
ctx.inside_extended = (m, rest)
elif t == 'Group':
ctx.reset_offset()
description = get_description(item)
tell_pr(' {} &hf_{}, {} "{}", "asterix.{}", FT_NONE, BASE_NONE, NULL, 0x00, NULL, HFILL {} {},'.format('{', ref, '{', description, ref, '}', '}'))
tell('static gint hf_{} = -1;'.format(ref))
for i in variation['items']:
handle_item(path, i)
# FieldPart[]
tell('static const FieldPart *I{}_PARTS[] = {}'.format(ref,'{'))
for i in variation['items']:
tell(' {},'.format(part_of(i)))
tell(' NULL')
tell('};')
# AsterixField
bit_size = sum([get_bit_size(i) for i in variation['items']])
byte_size = bit_size // 8
parts = 'I{}_PARTS'.format(ref)
comp = '{ NULL }'
if not ctx.inside_repetitive:
tell('static const AsterixField I{} = {} FIXED, {}, 0, 0, &hf_{}, {}, {} {};'.format
(ref, '{', byte_size, ref, parts, comp, '}'))
elif t == 'Extended':
n1 = variation['first']
n2 = variation['extents']
ctx.reset_offset()
ctx.inside_extended = (n1, chain(repeat(n1,1), repeat(n2)))
description = get_description(item)
tell_pr(' {} &hf_{}, {} "{}", "asterix.{}", FT_NONE, BASE_NONE, NULL, 0x00, NULL, HFILL {} {},'.format('{', ref, '{', description, ref, '}', '}'))
tell('static gint hf_{} = -1;'.format(ref))
for i in variation['items']:
handle_item(path, i)
tell('static const FieldPart *I{}_PARTS[] = {}'.format(ref,'{'))
chunks = chain(repeat(n1,1), repeat(n2))
items = variation['items']
# iterate over items, reinsert FX bits
while True:
bit_size = next(chunks)
assert (bit_size % 8) == 0, "bit alignment error"
byte_size = bit_size // 8
bits_from = bit_size
while True:
i = items[0]
items = items[1:]
n = get_bit_size(i)
tell(' {},'.format(part_of(i)))
bits_from -= n
if bits_from <= 1:
break
tell(' &IXXX_FX,')
if not items:
break
tell(' NULL')
tell('};')
# AsterixField
n1 = variation['first'] // 8
n2 = variation['extents'] // 8
parts = 'I{}_PARTS'.format(ref)
comp = '{ NULL }'
tell('static const AsterixField I{} = {} FX, {}, 0, {}, &hf_{}, {}, {} {};'.format
(ref, '{', n2, n1 - 1, ref, parts, comp, '}'))
ctx.inside_extended = None
elif t == 'Repetitive':
ctx.reset_offset()
ctx.inside_repetitive = True
# Group is required below this item.
if variation['variation']['type'] == 'Element':
subitem = copy(item)
subitem['variation'] = variation['variation']
subitem['name'] = 'VALUE'
subvar = {
'type': "Group",
'items': [subitem],
}
else:
subvar = variation['variation']
handle_variation(path, subvar)
# AsterixField
bit_size = sum([get_bit_size(i) for i in subvar['items']])
byte_size = bit_size // 8
rep = variation['rep'] // 8
parts = 'I{}_PARTS'.format(ref)
comp = '{ NULL }'
tell('static const AsterixField I{} = {} REPETITIVE, {}, {}, 0, &hf_{}, {}, {} {};'.format
(ref, '{', byte_size, rep, ref, parts, comp, '}'))
ctx.inside_repetitive = False
elif t == 'Explicit':
ctx.reset_offset()
tell('static gint hf_{} = -1;'.format(ref))
description = get_description(item)
tell_pr(' {} &hf_{}, {} "{}", "asterix.{}", FT_NONE, BASE_NONE, NULL, 0x00, NULL, HFILL {} {},'.format('{', ref, '{', description, ref, '}', '}'))
tell('static const AsterixField I{} = {} EXP, 0, 0, 1, &hf_{}, NULL, {} NULL {} {};'.format(ref, '{', ref, '{', '}', '}'))
elif t == 'Compound':
ctx.reset_offset()
tell('static gint hf_{} = -1;'.format(ref))
description = get_description(item)
tell_pr(' {} &hf_{}, {} "{}", "asterix.{}", FT_NONE, BASE_NONE, NULL, 0x00, NULL, HFILL {} {},'.format('{', ref, '{', description, ref, '}', '}'))
comp = '{'
for i in variation['items']:
if i is None:
comp += ' &IX_SPARE,'
continue
# Group is required below this item.
if i['variation']['type'] == 'Element':
level2 = copy(i)
level2['name'] = 'VALUE'
level1 = copy(i)
level1['variation'] = {
'items': [level2],
'type': 'Group',
}
subitem = level1
else:
subitem = i
comp += ' &I{}_{},'.format(ref, subitem['name'])
handle_item(path, subitem)
comp += ' NULL }'
# AsterixField
tell('static const AsterixField I{} = {} COMPOUND, 0, 0, 0, &hf_{}, NULL, {} {};'.format
(ref, '{', ref, comp, '}'))
else:
raise Exception('unexpected variation type: {}'.format(t))
if item['spare']:
ctx.offset += item['length']
return
# Group is required on the first level.
if path == [] and item['variation']['type'] == 'Element':
level2 = copy(item)
level2['name'] = 'VALUE'
variation = {
'items': [level2],
'type': "Group",
}
else:
variation = item['variation']
handle_variation(path + [item['name']], variation)
for i in catalogue:
handle_item([], i)
tell('')
def part2(ctx, ref, uap):
"""Generate UAPs"""
tell = lambda s: ctx.tell('insert1', s)
tell('DIAG_OFF_PEDANTIC')
ut = uap['type']
if ut == 'uap':
variations = [{'name': 'uap', 'items': uap['items']}]
elif ut == 'uaps':
variations = uap['variations']
else:
raise Exception('unexpected uap type {}'.format(ut))
for var in variations:
tell('static const AsterixField *I{}_{}[] = {}'.format(ref, var['name'], '{'))
for i in var['items']:
if i is None:
tell(' &IX_SPARE,')
else:
tell(' &I{}_{},'.format(ref, i))
tell(' NULL')
tell('};')
tell('static const AsterixField **I{}[] = {}'.format(ref, '{'))
for var in variations:
tell(' I{}_{},'.format(ref, var['name']))
tell(' NULL')
tell('};')
tell('DIAG_ON_PEDANTIC')
tell('')
def part3(ctx, specs):
"""Generate
- static const AsterixField ***...
- static const enum_val_t ..._versions[]...
"""
tell = lambda s: ctx.tell('insert1', s)
def fmt_edition(cat, edition):
return 'I{:03d}_V{}_{}'.format(cat, edition['major'], edition['minor'])
cats = set([spec['number'] for spec in specs])
for cat in sorted(cats):
lst = [spec for spec in specs if spec['number'] == cat]
editions = sorted([val['edition'] for val in lst], key = lambda x: (x['major'], x['minor']), reverse=True)
editions_fmt = [fmt_edition(cat, edition) for edition in editions]
editions_str = ', '.join(['I{:03d}'.format(cat)] + editions_fmt)
tell('DIAG_OFF_PEDANTIC')
tell('static const AsterixField ***I{:03d}all[] = {} {} {};'.format(cat, '{', editions_str, '}'))
tell('DIAG_ON_PEDANTIC')
tell('')
tell('static const enum_val_t I{:03d}_versions[] = {}'.format(cat, '{'))
edition = editions[0]
a = edition['major']
b = edition['minor']
tell(' {} "I{:03d}", "Version {}.{} (latest)", 0 {},'.format('{', cat, a, b, '}'))
for ix, edition in enumerate(editions, start=1):
a = edition['major']
b = edition['minor']
tell(' {} "I{:03d}_v{}_{}", "Version {}.{}", {} {},'.format('{', cat, a, b, a, b, ix, '}'))
tell(' { NULL, NULL, 0 }')
tell('};')
tell('')
def part4(ctx, cats):
"""Generate
- static const AsterixField ****categories[]...
- prefs_register_enum_preference ...
"""
tell = lambda s: ctx.tell('insert1', s)
tell_pr = lambda s: ctx.tell('insert3', s)
tell('static const AsterixField ****categories[] = {')
for i in range(0, 256):
val = 'I{:03d}all'.format(i) if i in cats else 'NULL'
tell(' {}, /* {:03d} */'.format(val, i))
tell(' NULL')
tell('};')
for cat in sorted(cats):
tell_pr(' prefs_register_enum_preference (asterix_prefs_module, "i{:03d}_version", "I{:03d} version", "Select the CAT{:03d} version", &global_categories_version[{}], I{:03d}_versions, FALSE);'.format(cat, cat, cat, cat, cat))
def main():
parser = argparse.ArgumentParser(description='Process asterix specs files.')
parser.add_argument('paths', metavar='PATH', nargs='*',
help='json spec file(s), use upstream repository in no input is given')
parser.add_argument('--reference', action='store_true',
help='print upstream reference and exit')
args = parser.parse_args()
if args.reference:
gitrev_short = download_url('/gitrev.txt').decode().strip()[0:10]
print(gitrev_short)
sys.exit(0)
# read and json-decode input files
jsons = load_jsons(args.paths)
jsons = [json.loads(i) for i in jsons]
jsons = sorted(jsons, key = lambda x: (x['number'], x['edition']['major'], x['edition']['minor']))
jsons = [spec for spec in jsons if spec['type'] == 'Basic']
cats = list(set([x['number'] for x in jsons]))
latest_editions = {cat: sorted(
filter(lambda x: x['number'] == cat, jsons),
key = lambda x: (x['edition']['major'], x['edition']['minor']), reverse=True)[0]['edition']
for cat in cats}
# regular expression for template rendering
ins = re.compile(r'---\{([A-Za-z0-9_]*)\}---')
gitrev = load_gitrev(args.paths)
with Context() as ctx:
for i in gitrev:
ctx.tell('gitrev', i)
# generate parts into the context buffer
for spec in jsons:
is_latest = spec['edition'] == latest_editions[spec['number']]
ctx.tell('insert1', '/* Category {:03d}, edition {}.{} */'.format(spec['number'], spec['edition']['major'], spec['edition']['minor']))
# handle part1
get_ref = lambda path: reference(spec['number'], spec['edition'], path)
part1(ctx, get_ref, spec['catalogue'])
if is_latest:
ctx.tell('insert1', '/* Category {:03d}, edition {}.{} (latest) */'.format(spec['number'], spec['edition']['major'], spec['edition']['minor']))
get_ref = lambda path: reference(spec['number'], None, path)
part1(ctx, get_ref, spec['catalogue'])
# handle part2
cat = spec['number']
edition = spec['edition']
ref = '{:03d}_V{}_{}'.format(cat, edition['major'], edition['minor'])
part2(ctx, ref, spec['uap'])
if is_latest:
ref = '{:03d}'.format(cat)
part2(ctx, ref, spec['uap'])
part3(ctx, jsons)
part4(ctx, set([spec['number'] for spec in jsons]))
# use context buffer to render template
with open('packet-asterix-template.c') as f:
template_lines = f.readlines()
# copy each line of the template to stdout,
# if the 'insertion' is found in the template,
# replace it with the buffer content
for line in template_lines:
line = line.rstrip()
insertion = ins.match(line)
if insertion is None:
print(line)
else:
segment = insertion.group(1)
[print(i) for i in ctx.buffer[segment]]
if __name__ == '__main__':
main()

View File

@ -33,6 +33,11 @@ for FILE in $COMMIT_FILES; do
then
continue
fi
# This is a template file, not a final '.c' file.
if test "$FILE_BASENAME" = "packet-asterix-template.c"
then
continue
fi
# extcap/{etwdump.c,etl.c,etw_message.c}: those compile, and are compiled,
# only on Windows
# The same applies to capture-wpcap.c