-
Notifications
You must be signed in to change notification settings - Fork 33
Expand file tree
/
Copy pathmsc_parse.py
More file actions
309 lines (275 loc) · 11 KB
/
msc_parse.py
File metadata and controls
309 lines (275 loc) · 11 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
#!/usr/bin/python
#######################################################
# #
# Copyright Masterchain Grazcoin Grimentz 2013-2014 #
# https://github.com/grazcoin/mastercoin-tools #
# https://masterchain.info #
# masterchain@@bitmessage.ch #
# License AGPLv3 #
# #
#######################################################
import operator
from optparse import OptionParser
from msc_utils_parsing import *
# global last block on the net
last_height=get_last_height()
def parse():
######################################
# reading and setting options values #
######################################
msc_globals.init()
parser = OptionParser("usage: %prog [options]")
parser.add_option("-d", "--debug", action="store_true",dest='debug_mode', default=False,
help="turn debug mode on")
parser.add_option("-t", "--transaction",dest='single_tx',default=None,
help="hash of a specific tx to parse")
parser.add_option("-s", "--start-block",dest='starting_block_height',default=None,
help="start the parsing at a specific block height (default is last)")
parser.add_option("-c", "--chain",dest='chain',default=None,
help="parse a specific chain only (default is all)")
parser.add_option("-a", "--archive-parsed-data", action="store_true",dest='archive', default=False,
help="archive the parsed data of tx addr and general for others to download")
parser.add_option( "-r", "--repository-path", dest='repository_path', default="~/mastercoin-tools",
help="Specify the location of the mastercoin-tools repository (defaults to ~/mastercoin-tools" )
(options, args) = parser.parse_args()
msc_globals.d=options.debug_mode
single_tx=options.single_tx
requested_block_height=options.starting_block_height
chain=options.chain
# show debug on
if msc_globals.d:
debug('debug is on')
# don't bother parsing if no new block was generated since last validation
last_validated_block=0
try:
f=open(LAST_VALIDATED_BLOCK_NUMBER_FILE,'r')
last_validated_block=int(f.readline())
f.close()
if last_validated_block == int(last_height):
info('last validated block '+str(last_validated_block)+' is identical to current height')
exit(0)
except IOError:
pass
# find which block to start with
if requested_block_height == None:
# which block to start with?
revision_block_height=0 # init with 0
notes_block_height=0 # init with 0
# first check last block on revision.json
filename='www/revision.json'
try:
prev_revision_dict=load_dict_from_file(filename, all_list=True, skip_error=True)
revision_block_height=prev_revision_dict['last_block']
except KeyError:
info(filename+' does not have last_block entry')
# then check LAST_BLOCK_NUMBER_FILE
try:
f=open(LAST_BLOCK_NUMBER_FILE, 'r')
notes=f.readline()
f.close()
# FIXME: catch ValueError ?
if notes != '':
notes_block_height=int(notes)
except IOError:
info(LAST_BLOCK_NUMBER_FILE+' does not exist or has no integer.')
# take the latest block of all
starting_block_height=max(revision_block_height,notes_block_height)
msc_globals.last_block=starting_block_height
# to catch chain reorgs, check 5 blocks back
starting_block_height=int(starting_block_height) - 1
else:
starting_block_height=requested_block_height
archive=options.archive
info('starting parsing at block '+str(starting_block_height))
exodus_list=exodus_scan_list
# hack to leave MSC to the end
exodus_list.remove('1EXoDusjGwvnjZUyKkxZ4UHEf77z6A5S4P')
exodus_list.append('1EXoDusjGwvnjZUyKkxZ4UHEf77z6A5S4P')
if chain != None:
exodus_list=[chain]
for scan_addr in exodus_list:
msc_globals.exodus_scan=scan_addr
info('scanning '+scan_addr+' from block '+str(starting_block_height))
if single_tx == None:
# get all tx of exodus address
history=get_history(msc_globals.exodus_scan)
history.sort(key=output_height)
else:
# build fake history of length 1 (debug purposes)
json_tx=get_json_tx(get_raw_tx(single_tx))
marker_number=-1
marker_value=-1
i=0
for o in json_tx['outputs']:
if o['address']==msc_globals.exodus_scan:
marker_number=i
marker_value=o['value']
# FIXME: handle multiple outputs to 1EXoDus
break
else:
i+=1
if marker_number == -1:
error('tx does not belong to exodus')
t1={"output": single_tx+':'+str(marker_number),
"output_height":"0",
"value": str(marker_value)}
history=[]
history.append(t1)
###########################
### parsing starts here ###
###########################
# go over transaction from all history of exodus_scan address
last_block=0
for tx_dict in history:
value=tx_dict['value']
if starting_block_height != None:
current_block=tx_dict['output_height']
if current_block != 'Pending':
if int(current_block)<int(starting_block_height):
debug('skip block '+str(current_block)+' since starting at '+str(starting_block_height))
continue
else:
# Pending block will be checked whenever they are not Pending anymore.
continue
try:
tx_hash=tx_dict['output'].split(':')[0]
tx_output_index=tx_dict['output'].split(':')[1]
except KeyError, IndexError:
error("Cannot parse tx_dict:" + str(tx_dict))
raw_tx=get_raw_tx(tx_hash)
json_tx=get_json_tx(raw_tx, tx_hash)
if json_tx == None:
error('failed getting json_tx (None) for '+tx_hash)
(block,index)=get_tx_index(tx_hash)
if block == None or block == "failed:" or index == None:
error('failed getting block None or index None for '+tx_hash)
if last_block < int(block):
last_block = int(block)
msc_globals.last_block = last_block
outputs_list=json_tx['outputs']
(outputs_list_no_exodus, outputs_to_exodus, different_outputs_values, invalid)=examine_outputs(outputs_list, tx_hash, raw_tx)
num_of_outputs=len(outputs_list)
(block_timestamp, err)=get_block_timestamp(int(block))
if block_timestamp == None:
error('failed getting block timestamp of '+str(block)+': '+err)
# check if basic or multisig
is_basic=True
for o in outputs_list:
if is_script_multisig(o['script']):
debug('multisig tx found on output '+tx_output_index+': '+tx_hash)
is_basic=False
break
if is_basic: # basic option - not multisig
if num_of_outputs > 2: # for reference, data, marker
after_bootstrap=int(block)>int(last_exodus_bootstrap_block)
parsed=parse_simple_basic(raw_tx, tx_hash, after_bootstrap)
parsed['method']='basic'
parsed['block']=str(block)
parsed['index']=str(index)
parsed['exodus_scan']=msc_globals.exodus_scan
if not parsed.has_key('invalid'):
if invalid != None:
info(str(invalid[1])+' on '+tx_hash)
parsed['invalid']=invalid
else:
parsed['invalid']=False
parsed['tx_time']=str(block_timestamp)+'000'
#debug(str(parsed))
filename='tx/'+parsed['tx_hash']+'.json'
orig_json=None
try:
# does this tx exist? (from bootstrap)
f=open(filename, 'r')
debug(filename)
try:
orig_json=json.load(f)[0]
except (KeyError, ValueError):
try:
orig_json=json.load(f)
except ValueError:
error('failed loading json from '+filename)
f.close()
# verify bootstrap block
if orig_json.has_key('block'):
orig_block=orig_json['block']
debug('found this tx already on (previous) block '+orig_block)
if int(orig_block)>last_exodus_bootstrap_block:
debug('but it is post exodus - ignoring')
orig_json=None
else:
info('previous tx without block on '+filename)
except IOError:
pass
if orig_json != None: # it was an exodus tx
if len(orig_json)==1:
new_json=[orig_json[0],parsed]
atomic_json_dump(new_json, filename, add_brackets=False)
info('basic tx was also exodus on '+tx_hash)
else:
info('basic tx is already present on exodus on '+tx_hash)
else:
atomic_json_dump(parsed, filename)
else: # num_of_outputs <= 2 and not multisig
# could still be a bitcoin payment for a sell/buy offer
if int(block)>int(last_exodus_bootstrap_block):
parsed=parse_bitcoin_payment(raw_tx, tx_hash)
parsed['method']='bitcoin payment'
parsed['block']=str(block)
parsed['index']=str(index)
parsed['tx_time']=str(block_timestamp)+'000'
parsed['exodus_scan']=msc_globals.exodus_scan
#debug(str(parsed))
filename='tx/'+parsed['tx_hash']+'.json'
atomic_json_dump(parsed, filename)
else:
debug('skip bootstrap basic tx with less than 3 outputs '+tx_hash)
else: # multisig
if num_of_outputs == 2: # depracated simple version of multisig or sell offer with no change
parsed=parse_multisig(raw_tx, tx_hash)
if len(parsed) == 0:
continue
parsed['method']='multisig'
parsed['block']=str(block)
parsed['index']=str(index)
parsed['exodus_scan']=msc_globals.exodus_scan
if not parsed.has_key('invalid'):
if invalid != None:
info(str(invalid[1])+' on '+tx_hash)
parsed['invalid']=invalid
else:
parsed['invalid']=False
parsed['tx_time']=str(block_timestamp)+'000'
#debug(str(parsed))
filename='tx/'+parsed['tx_hash']+'.json'
atomic_json_dump(parsed, filename)
else:
if num_of_outputs > 2: # multisig
parsed=parse_multisig(raw_tx, tx_hash)
if len(parsed) == 0:
# disabled
continue
parsed['method']='multisig'
parsed['block']=str(block)
parsed['index']=str(index)
if not parsed.has_key('invalid'):
if invalid != None:
info(str(invalid[1])+' on '+tx_hash)
parsed['invalid']=invalid
else:
parsed['invalid']=False
parsed['tx_time']=str(block_timestamp)+'000'
parsed['exodus_scan']=msc_globals.exodus_scan
#debug(str(parsed))
filename='tx/'+parsed['tx_hash']+'.json'
atomic_json_dump(parsed, filename)
else: # invalid
info('multisig with a single output tx found: '+tx_hash)
# update global block height
# if single_tx == None and block != None:
# msc_globals.last_block=block
rev=get_revision_dict( last_block, options.repository_path )
atomic_json_dump(rev, 'www/revision.json', add_brackets=False)
if archive:
archive_parsed_data( options.repository_path )
if __name__ == "__main__":
parse()