mirror of https://gerrit.osmocom.org/pysim
commands: add ".." notation to expand hexstrings
When updating files and records there are sometimes huge portions that are just 0xff. Mostly this is at the end of a file or record that is not completely used. Lets add a notation to tell PySim-shell how to fill those sections. Change-Id: Iedd7887bf7d706878f4a3beca8dbea456404610bchanges/18/28218/2
parent
f16ac6acf8
commit
40ea4a4a1c
|
@ -23,7 +23,7 @@
|
|||
|
||||
from construct import *
|
||||
from pySim.construct import LV
|
||||
from pySim.utils import rpad, b2h, h2b, sw_match, bertlv_encode_len, Hexstr, h2i, str_sanitize
|
||||
from pySim.utils import rpad, b2h, h2b, sw_match, bertlv_encode_len, Hexstr, h2i, str_sanitize, expand_hex
|
||||
from pySim.exceptions import SwMatchError
|
||||
|
||||
|
||||
|
@ -190,6 +190,10 @@ class SimCardCommands(object):
|
|||
offset : byte offset in file from which to start writing
|
||||
verify : Whether or not to verify data after write
|
||||
"""
|
||||
|
||||
file_len = self.binary_size(ef)
|
||||
data = expand_hex(data, file_len)
|
||||
|
||||
data_length = len(data) // 2
|
||||
|
||||
# Save write cycles by reading+comparing before write
|
||||
|
@ -255,16 +259,17 @@ class SimCardCommands(object):
|
|||
verify : verify data by re-reading the record
|
||||
conserve : read record and compare it with data, skip write on match
|
||||
"""
|
||||
|
||||
res = self.select_path(ef)
|
||||
rec_length = self.__record_len(res)
|
||||
data = expand_hex(data, rec_length)
|
||||
|
||||
if force_len:
|
||||
# enforce the record length by the actual length of the given data input
|
||||
rec_length = len(data) // 2
|
||||
else:
|
||||
# determine the record length from the select response of the file and pad
|
||||
# the input data with 0xFF if necessary. In cases where the input data
|
||||
# exceed we throw an exception.
|
||||
rec_length = self.__record_len(res)
|
||||
# make sure the input data is padded to the record length using 0xFF.
|
||||
# In cases where the input data exceed we throw an exception.
|
||||
if (len(data) // 2 > rec_length):
|
||||
raise ValueError('Data length exceeds record length (expected max %d, got %d)' % (
|
||||
rec_length, len(data) // 2))
|
||||
|
|
|
@ -1225,6 +1225,60 @@ def auto_int(x):
|
|||
return int(x, 0)
|
||||
|
||||
|
||||
def expand_hex(hexstring, length):
|
||||
"""Expand a given hexstring to a specified length by replacing "." or ".."
|
||||
with a filler that is derived from the neighboring nibbles respective
|
||||
bytes. Usually this will be the nibble respective byte before "." or
|
||||
"..", execpt when the string begins with "." or "..", then the nibble
|
||||
respective byte after "." or ".." is used.". In case the string cannot
|
||||
be expanded for some reason, the input string is returned unmodified.
|
||||
|
||||
Args:
|
||||
hexstring : hexstring to expand
|
||||
length : desired length of the resulting hexstring.
|
||||
Returns:
|
||||
expanded hexstring
|
||||
"""
|
||||
|
||||
# expand digit aligned
|
||||
if hexstring.count(".") == 1:
|
||||
pos = hexstring.index(".")
|
||||
if pos > 0:
|
||||
filler = hexstring[pos - 1]
|
||||
else:
|
||||
filler = hexstring[pos + 1]
|
||||
|
||||
missing = length * 2 - (len(hexstring) - 1)
|
||||
if missing <= 0:
|
||||
return hexstring
|
||||
|
||||
return hexstring.replace(".", filler * missing)
|
||||
|
||||
# expand byte aligned
|
||||
elif hexstring.count("..") == 1:
|
||||
if len(hexstring) % 2:
|
||||
return hexstring
|
||||
|
||||
pos = hexstring.index("..")
|
||||
|
||||
if pos % 2:
|
||||
return hexstring
|
||||
|
||||
if pos > 1:
|
||||
filler = hexstring[pos - 2:pos]
|
||||
else:
|
||||
filler = hexstring[pos + 2:pos+4]
|
||||
|
||||
missing = length * 2 - (len(hexstring) - 2)
|
||||
if missing <= 0:
|
||||
return hexstring
|
||||
|
||||
return hexstring.replace("..", filler * (missing // 2))
|
||||
|
||||
# no change
|
||||
return hexstring
|
||||
|
||||
|
||||
class JsonEncoder(json.JSONEncoder):
|
||||
"""Extend the standard library JSONEncoder with support for more types."""
|
||||
|
||||
|
|
Loading…
Reference in New Issue