mirror of https://gerrit.osmocom.org/pysim
commands: do not check SW manually, use send_apdu_checksw()
The transport layer provides a method send_apdu_checksw to send APDUs and to be sure the SW is the expected one. Given that, there is no need to verify the SW manually. The exception of send_apdu_checksw will catch the problem and also display the SW in a human readable form. Change-Id: I9ce556ac0b7bb21c5c5a27170c32af0152255b79 Related: OS#5275
This commit is contained in:
parent
fc769e2fdb
commit
796ca3daf9
|
@ -23,7 +23,7 @@
|
|||
|
||||
from construct import *
|
||||
from pySim.construct import LV
|
||||
from pySim.utils import rpad, b2h, h2b, sw_match, bertlv_encode_len, Hexstr, h2i
|
||||
from pySim.utils import rpad, b2h, h2b, sw_match, bertlv_encode_len, Hexstr, h2i, str_sanitize
|
||||
from pySim.exceptions import SwMatchError
|
||||
|
||||
class SimCardCommands(object):
|
||||
|
@ -145,12 +145,12 @@ class SimCardCommands(object):
|
|||
while chunk_offset < length:
|
||||
chunk_len = min(255, length-chunk_offset)
|
||||
pdu = self.cla_byte + 'b0%04x%02x' % (offset + chunk_offset, chunk_len)
|
||||
data,sw = self._tp.send_apdu(pdu)
|
||||
if sw == '9000':
|
||||
total_data += data
|
||||
chunk_offset += chunk_len
|
||||
else:
|
||||
raise ValueError('Failed to read (offset %d)' % (offset))
|
||||
try:
|
||||
data, sw = self._tp.send_apdu_checksw(pdu)
|
||||
except Exception as e:
|
||||
raise ValueError('%s, failed to read (offset %d)' % (str_sanitize(str(e)), offset))
|
||||
total_data += data
|
||||
chunk_offset += chunk_len
|
||||
return total_data, sw
|
||||
|
||||
def update_binary(self, ef, data:str, offset:int=0, verify:bool=False, conserve:bool=False):
|
||||
|
@ -172,22 +172,21 @@ class SimCardCommands(object):
|
|||
|
||||
self.select_path(ef)
|
||||
total_data = ''
|
||||
total_sw = "9000"
|
||||
chunk_offset = 0
|
||||
while chunk_offset < data_length:
|
||||
chunk_len = min(255, data_length - chunk_offset)
|
||||
# chunk_offset is bytes, but data slicing is hex chars, so we need to multiply by 2
|
||||
pdu = self.cla_byte + 'd6%04x%02x' % (offset + chunk_offset, chunk_len) + data[chunk_offset*2 : (chunk_offset+chunk_len)*2]
|
||||
chunk_data, chunk_sw = self._tp.send_apdu(pdu)
|
||||
if chunk_sw == total_sw:
|
||||
total_data += chunk_data
|
||||
chunk_offset += chunk_len
|
||||
else:
|
||||
total_sw = chunk_sw
|
||||
raise ValueError('Failed to write chunk (chunk_offset %d, chunk_len %d)' % (chunk_offset, chunk_len))
|
||||
try:
|
||||
chunk_data, chunk_sw = self._tp.send_apdu_checksw(pdu)
|
||||
except Exception as e:
|
||||
raise ValueError('%s, failed to write chunk (chunk_offset %d, chunk_len %d)' % \
|
||||
(str_sanitize(str(e)), chunk_offset, chunk_len))
|
||||
total_data += data
|
||||
chunk_offset += chunk_len
|
||||
if verify:
|
||||
self.verify_binary(ef, data, offset)
|
||||
return total_data, total_sw
|
||||
return total_data, chunk_sw
|
||||
|
||||
def verify_binary(self, ef, data:str, offset:int=0):
|
||||
"""Verify contents of transparent EF.
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
|
||||
import json
|
||||
import abc
|
||||
import string
|
||||
from io import BytesIO
|
||||
from typing import Optional, List, Dict, Any, Tuple
|
||||
|
||||
|
@ -89,6 +90,20 @@ def lpad(s:str, l:int, c='f') -> str:
|
|||
def half_round_up(n:int) -> int:
|
||||
return (n + 1)//2
|
||||
|
||||
def str_sanitize(s:str) -> str:
|
||||
"""replace all non printable chars, line breaks and whitespaces, with ' ', make sure that
|
||||
there are no whitespaces at the end and at the beginning of the string.
|
||||
|
||||
Args:
|
||||
s : string to sanitize
|
||||
Returns:
|
||||
filtered result of string 's'
|
||||
"""
|
||||
|
||||
chars_to_keep = string.digits + string.ascii_letters + string.punctuation
|
||||
res = ''.join([c if c in chars_to_keep else ' ' for c in s])
|
||||
return res.strip()
|
||||
|
||||
#########################################################################
|
||||
# poor man's COMPREHENSION-TLV decoder.
|
||||
#########################################################################
|
||||
|
|
Loading…
Reference in New Issue