projects
/
b43-tools.git
/ blobdiff
commit
grep
author
committer
pickaxe
?
search:
re
summary
|
shortlog
|
log
|
commit
|
commitdiff
|
tree
raw
|
inline
| side by side
disassembler: Emit %start directive. This avoids a warning when re-assembling
[b43-tools.git]
/
disassembler
/
brcm80211fwconv
diff --git
a/disassembler/brcm80211fwconv
b/disassembler/brcm80211fwconv
index fe5f220fc5c3db2f3b5f2dcd51756da678189931..092c043cfac4e3f06bc985dc282f6eac9df93b0f 100755
(executable)
--- a/
disassembler/brcm80211fwconv
+++ b/
disassembler/brcm80211fwconv
@@
-56,7
+56,7
@@
def indexToName(index):
except KeyError:
return "Unknown"
except KeyError:
return "Unknown"
-def parseHeader(hdr_data):
+def parseHeader(hdr_data
, sortByOffset
):
sections = []
for i in range(0, len(hdr_data), 3 * 4):
offset = ord(hdr_data[i + 0]) | (ord(hdr_data[i + 1]) << 8) |\
sections = []
for i in range(0, len(hdr_data), 3 * 4):
offset = ord(hdr_data[i + 0]) | (ord(hdr_data[i + 1]) << 8) |\
@@
-67,7
+67,10
@@
def parseHeader(hdr_data):
(ord(hdr_data[i + 10]) << 16) | (ord(hdr_data[i + 11]) << 24)
sections.append( (offset, length, index) )
(ord(hdr_data[i + 10]) << 16) | (ord(hdr_data[i + 11]) << 24)
sections.append( (offset, length, index) )
- sections.sort(key = lambda x: x[2]) # Sort by index
+ if sortByOffset:
+ sections.sort(key = lambda x: x[0]) # Sort by offset
+ else:
+ sections.sort(key = lambda x: x[2]) # Sort by index
return sections
def generateHeaderData(sections):
return sections
def generateHeaderData(sections):
@@
-95,7
+98,7
@@
def getSectionByIndex(sections, searchIndex):
return section
return None
return section
return None
-def parseHeaderFile(hdr_filepath):
+def parseHeaderFile(hdr_filepath
, sortByOffset=False
):
try:
hdr_data = file(hdr_filepath, "rb").read()
except (IOError), e:
try:
hdr_data = file(hdr_filepath, "rb").read()
except (IOError), e:
@@
-104,7
+107,7
@@
def parseHeaderFile(hdr_filepath):
if len(hdr_data) % (3 * 4) != 0:
print "Invalid header file format"
return None
if len(hdr_data) % (3 * 4) != 0:
print "Invalid header file format"
return None
- return parseHeader(hdr_data)
+ return parseHeader(hdr_data
, sortByOffset
)
def dumpInfo(hdr_filepath):
sections = parseHeaderFile(hdr_filepath)
def dumpInfo(hdr_filepath):
sections = parseHeaderFile(hdr_filepath)
@@
-142,7
+145,7
@@
def extractSection(hdr_filepath, bin_filepath, extractIndex, outfilePath):
return 0
def mergeSection(hdr_filepath, bin_filepath, mergeIndex, mergefilePath):
return 0
def mergeSection(hdr_filepath, bin_filepath, mergeIndex, mergefilePath):
- sections = parseHeaderFile(hdr_filepath)
+ sections = parseHeaderFile(hdr_filepath
, sortByOffset=True
)
if not sections:
return 1
try:
if not sections:
return 1
try:
@@
-158,9
+161,14
@@
def mergeSection(hdr_filepath, bin_filepath, mergeIndex, mergefilePath):
newBin = []
newSections = []
newOffset = 0
newBin = []
newSections = []
newOffset = 0
+ foundIt = False
for section in sections:
(offset, length, index) = section
if index == mergeIndex:
for section in sections:
(offset, length, index) = section
if index == mergeIndex:
+ if foundIt:
+ print "Confused. Multiple sections with index %d?" % index
+ return 1
+ foundIt = True
# We overwrite this section
newBin.append(merge_data)
newSections.append( (newOffset, len(merge_data), index) )
# We overwrite this section
newBin.append(merge_data)
newSections.append( (newOffset, len(merge_data), index) )
@@
-173,6
+181,9
@@
def mergeSection(hdr_filepath, bin_filepath, mergeIndex, mergefilePath):
return 1
newSections.append( (newOffset, length, index) )
newOffset += length
return 1
newSections.append( (newOffset, length, index) )
newOffset += length
+ if not foundIt:
+ print "Did not find section with index %d" % mergeIndex
+ return 1
newBin = "".join(newBin)
newHdr = generateHeaderData(newSections)
try:
newBin = "".join(newBin)
newHdr = generateHeaderData(newSections)
try: