-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathModel.py
804 lines (530 loc) · 24.1 KB
/
Model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
### Code Provider: *Niloofar Didar*
import os
import bpy
import bmesh
import sys
import time
import argparse
import mathutils
import math
import shutil
def get_args():
parser = argparse.ArgumentParser()
#Critical View finder:(Screenshot phase) gets obj and simp level, then put obj at closest dist #with original and simplified quality, at 6 diff fov- screen shots , out put in 12 scresshot per #each obj-> then feed to IQ2 to found critical view
#blender -b -P Model.py -- --infile Objects/CriticInp.txt
#blender-2.79-linux-glibc219-x86_64/blender -b -P test.py
_, all_arguments = parser.parse_known_args()
double_dash_index = all_arguments.index('--')
script_args = all_arguments[double_dash_index + 1: ]
# add parser rules
parser.add_argument('-fil', '--infile', help="Third Object")
parsed_script_args, _ = parser.parse_known_args(script_args)
return parsed_script_args
args = get_args()
infile=str(args.infile)
print(infile)
with open(infile, "r") as inp:
count= inp.readline()
'''
#bpy.data.objects['Camera'].rotation_euler= mathutils.Vector((1.1087, -0.0,0.8150688))
bpy.data.cameras.values()[0].lens=25
bpy.data.cameras['Camera'].lens=25
angles=4
with open(infile, "r") as inp, open("Objects/Screenshots/compare.txt", "w") as out:
count= inp.readline()
finalcount= float(count)*angles *2
out.write(str(finalcount)+ "\n")
for line in inp:
#bpy.data.objects['Camera'].location = mathutils.Vector((7.35889 , -6.92579 , 4.95831 )) #11.243 from object
input_model,Objname,Dratio = map(str,line.split())
#Dist=float(TDis) # it is preffered distance
# deselect all
bpy.ops.object.select_all(action='DESELECT')
# selection
for o in bpy.data.objects:
if o.type == 'MESH':
bpy.data.objects.remove(o)
print('\n Beginning the process of import using Blender Python API ...\n')
bpy.ops.import_scene.obj(filepath=input_model)
print('\n Obj file imported successfully ...')
scene = bpy.context.scene
obs=[]
#should be comment-for inf just
for o in bpy.data.objects:
if o.type == 'MESH':
xx=o.location.x
yy=o.location.y
zz=o.location.z
curobj=o
i=1
camx =bpy.data.objects['Camera'].location.x
camy =bpy.data.objects['Camera'].location.y
camz =bpy.data.objects['Camera'].location.z
dx = camx - xx
dy = camy - yy
dz = camz - zz
distance= math.sqrt(pow(dx, 2) + pow(dy, 2) + pow(dz, 2))
print ("Distance "+str(i)+": "+str(distance))
print('\n Ending Distance...')
obs.append(o)
#should be comment-
ctx = bpy.context.copy()
# one of the objects to join
ctx['active_object'] = obs[0]
ctx['selected_objects'] = obs
# In Blender 2.8x this needs to be the following instead:
#ctx['selected_editable_objects'] = obs
# We need the scene bases as well for joining.
# Remove this line in Blender >= 2.80!
ctx['selected_editable_bases'] = [scene.object_bases[ob.name] for ob in obs]
bpy.ops.object.join(ctx)
#nil
bpy.data.objects["Camera"].rotation_euler = mathutils.Vector((1.110029, 0.0, 0.8150688))
curobj.select=True # select object
bpy.ops.view3d.camera_to_view_selected()
bpy.data.objects["Lamp"].location= bpy.data.objects["Camera"].location
camloc=bpy.data.objects["Camera"].location
print("cam location is "+ str(camloc))
curobj.select=False # select object
#nil
#start rotating object, simplification and screenshot
bpy.context.scene.render.image_settings.file_format='BMP'
output_dir="Objects/Screenshots/"
output_file_format="bmp"
rotation_steps = 5
degree=0.872665
for o in bpy.data.objects:
if o.type == 'MESH':
decimateRatio = float(1)
for step in range(1, 3):
print('\n Beginning the process of Decimation using Blender Python API ...')
modifierName='DecimateMod'
modifier = o.modifiers.new(modifierName,'DECIMATE')
modifier.ratio = decimateRatio
print("dec ratio is "+ str(decimateRatio))
modifier.use_collapse_triangulate = True
bpy.ops.object.modifier_apply(apply_as='DATA', modifier=modifierName)
for step in range(1, rotation_steps):
o.rotation_euler=mathutils.Vector((1.5708, -0.0, degree))
# deselect all
bpy.ops.object.select_all(action='DESELECT')
# selection
curobj.select=True # select object
bpy.ops.view3d.camera_to_view_selected()
xx=o.location.x
yy=o.location.y
zz=o.location.z
obj=o
objloc=o.location
print("current obj is "+ str(obj)+" and obj location is "+ str(objloc))
i+=1
camx =bpy.data.objects['Camera'].location.x
camy =bpy.data.objects['Camera'].location.y
camz =bpy.data.objects['Camera'].location.z
dx = camx - xx
dy = camy - yy
dz = camz - zz
distance= math.sqrt(pow(dx, 2) + pow(dy, 2) + pow(dz, 2))
print ("Distance "+str(i)+": "+str(distance))
print('\n Ending Distance...')
bpy.context.scene.render.filepath = output_dir + (Objname +str(step))+ "d"+ str(round(distance))+ "r"+str(decimateRatio)
bpy.ops.render.render(write_still = True)
out.write(output_dir + (Objname +str(step))+ "d"+ str(round(distance))+"r"+str(decimateRatio)+ ".bmp"+"\n")
degree+=1.5708
decimateRatio = float(Dratio)
#end of screenshot
angles=4
# start comparing ranks of all views ( start of IQA2.py code)
#opens in3 to read all images for compare, open out to write all results, open IQAout for just one result from ubuntu
with open("Objects/Screenshots/compare.txt", "r") as inp, open("Objects/Screenshots/outIQA.txt", "w") as out:
numinp=float(inp.readline())
lines = inp.readlines()
print(lines[0])
i=0
index=0
print(numinp)
a = []
b = []
name=[]
while i<numinp:
for j in range(0, angles):
a.append (lines[i+j])
b.append (lines[i+j+angles])
temp1=a[index]
temp1=temp1[:-1] #trims one char at the end to remove \n char
temp2=b[index]
temp2=temp2[:-1]
# print(str(temp1) + " "+ str(temp2)+ " "+str(a))
path="wine gmsd.exe"
end=" > \"Objects/Screenshots/IQAout.txt\" "
#final = "wine gmsd.exe Objects/Screenshots/Andy1d1r1.0.bmp Objects/Screenshots/Andy1d1r0.5.bmp > IQAout.txt"
final ="wine gmsd.exe %s %s > Objects/Screenshots/IQAout.txt" %(temp1, temp2)
#each iqa for one comparison to be saved on iqaout and then are written to the out file
# print(final)
os.system(final)
with open("Objects/Screenshots/IQAout.txt", "r") as iqa:
#out.write(str(j+1)+" "+iqa.readline())
out.write(iqa.readline())
index+=1
objname=lines[i]
objname=objname[20:-12] #omit file address too have just name of object
address="Objects/obj/%s.obj " %objname
print(address)
name.append (address)
i+=(angles*2)
print("start checking critical view")
with open("Objects/Screenshots/outIQA.txt", "r") as inpp, open("Objects/Screenshots/cresult.txt", "w") as outt:
i2=0
j2=0
lines2 = inpp.readlines()
while i2<numinp/2:
maxim= max(lines2[i2:i2+angles])
maxdex= lines2.index(maxim)
fmaxdex=maxdex%angles + 1
minim= min(lines2[i2:i2+angles])
mindex= lines2.index(minim)
fmindex=mindex%angles + 1
print("maximum is "+ str(maxim)+ " "+str(fmaxdex)+"\n")
print("minimum is "+ str(minim)+ " "+str(fmindex))
i2+=angles
outt.write(str(fmaxdex) + " " + name[j2]+"\n")
#outt.write(str(fmindex) + " " + name[j2]+"\n")
j2+=1
# end of IQ2 and start of distance.py to decimate model and put them at diff distances:
camlens =bpy.data.cameras.values()[0].lens=25
bpy.data.cameras['Camera'].lens=25
#opens in3 to read all images for compare, open out to write all results, open
#IQAout for just one result from ubuntu
lmp=bpy.data.objects["Lamp"]
bpy.data.objects.remove(lmp)
#add new lamp
scene = bpy.context.scene
# Create new lamp datablock
lamp_data = bpy.data.lamps.new(name="Lamp2", type='HEMI')
# Create new object with our lamp datablock
lamp_object = bpy.data.objects.new(name="Lamp2", object_data=lamp_data)
# Link lamp object to the scene so it'll appear in this scene
scene.objects.link(lamp_object)
camera= bpy.context.scene.camera.data
camera.clip_end=1000
camera.clip_start=0.001
#Starting object z-height on camera sensror calculation:
#
camlens =bpy.data.cameras.values()[0].lens=25
with open("Objects/Screenshots/cresult.txt", "r") as inp, open("Objects/finalObj/model.txt", "w") as out,open("Objects/finalObj/virdistance.txt", "w") as vdist, open("Objects/finalObj/camerapos.txt", "w") as campos:
#no_of_cases = str(inp.readline())
#print(no_of_cases)
for line in inp:
# deselect all
bpy.ops.object.select_all(action='DESELECT')
# selection
#input_model,Objname,Dratio,TDis = map(str,line.split())
angle_num,input_model = map(str,line.split())
angle = int( angle_num)
objname= input_model
objname=objname[12:-4] #omit file address too have just name of object
print("angle is " + str(angle))
scene = bpy.context.scene
obs=[]
## flush environment before importing new object
for o in bpy.data.objects:
if o.type == 'MESH':
bpy.data.objects.remove(o)
##
print('\n Beginning the process of import using Blender Python API ...\n')
bpy.ops.import_scene.obj(filepath=input_model)
print('\n Obj file imported successfully ...')
for o in bpy.data.objects:
if o.type == 'MESH':
obs.append(o)
ctx = bpy.context.copy()
# one of the objects to join
ctx['active_object'] = obs[0]
ctx['selected_objects'] = obs
# In Blender 2.8x this needs to be the following instead:
#ctx['selected_editable_objects'] = obs
# We need the scene bases as well for joining.
# Remove this line in Blender >= 2.80!
ctx['selected_editable_bases'] = [scene.object_bases[ob.name] for ob in obs]
bpy.ops.object.join(ctx)
for o in bpy.data.objects:
if o.type == 'MESH':
xx=o.location.x
yy=o.location.y
zz=o.location.z
curobj=o
#nil
bpy.data.objects['Camera'].location = mathutils.Vector((7.35889 , -6.92579 , 4.95831 )) #11.243 from object
bpy.ops.object.select_all(action='DESELECT')
# selection
# this is min cam distance:
curobj.select=True
# select object
curobj.location=mathutils.Vector((0.0, -0.0 , 0.0 )) #11.243 from object
if angle==1:
degree=mathutils.Vector((1.5708, -0.0, 0.872665))
elif angle==2:
degree= mathutils.Vector((1.5708, -0.0, 0.872665+1.5708))
elif angle==3:
degree= mathutils.Vector((1.5708, -0.0, 0.872665+(2*1.5708)))
elif angle==4:
degree= mathutils.Vector((1.5708, -0.0, 0.872665+(3*1.5708)))
elif angle==5:
degree=mathutils.Vector((2.61799, -3.50811, 3.735))
elif angle==6:
degree=mathutils.Vector((4.60767, -2.00713, 5.63741))
curobj.rotation_euler=degree
bpy.data.objects["Camera"].rotation_euler = mathutils.Vector((1.110029, 0.0, 0.8150688))
bpy.ops.view3d.camera_to_view_selected()
#Nill2
bpy.context.scene.render.image_settings.file_format='BMP'
output_dir="Objects/finalObj/"
#bpy.context.scene.render.filepath = output_dir +objname
#bpy.ops.render.render(write_still = True)
#Nill2
camloc=bpy.data.objects["Camera"].location
camlocx=str(camloc.x)
camlocy=str(camloc.y)
camlocz=str(camloc.z)
print("cam location is "+ str(camloc))
curobj.select=False # select object
#nil
# in order to find the fit view of object, closets distance from camera to obj that fits object, then we use the distance as a reference to calculate all dimension for the preffered distance
#start computing distance
print('\n Starting Distance Computing...')
camx =bpy.data.objects['Camera'].location.x
camy =bpy.data.objects['Camera'].location.y
camz =bpy.data.objects['Camera'].location.z
obj= curobj
xx=obj.location.x
yy=obj.location.y
zz=obj.location.z
dx = camx - xx
dy = camy - yy
dz = camz - zz
distance= math.sqrt(pow(dx, 2) + pow(dy, 2) + pow(dz, 2))
inc_factor=distance #*0.5# increament distacne by this
# so the factor to multiply cam location by each time is incremental + old_distance/ old_distance: e.g, old-d =1 , inc = 0.5 => factor = 1 + 0.5 / 1= 1.5=> 1.5 * 1= 1.5
# next sould be 2 so factor= 1.5 + 0.5 / 1.5 =1.33 => 1.33 * old 1.5 = 2
print ("distance is "+ str(distance))
print('\n Ending Distance...')
# MAx Distance:
Height= curobj.dimensions.z
#print("dz object Dimension is "+ str(curobj.dimensions.z))
Vz= (camlens* Height)/distance
Vx=(camlens* curobj.dimensions.x)/distance
Vy=(camlens* curobj.dimensions.y)/distance
fac1= Vx/(0.758)
fac2= Vy/(0.758)
fac3=Vz/(0.758)
DFactor= (fac1+fac2+fac3)/3
print(" Facts are " + str(fac1) +"," + str(fac2)+"," + str(fac3))
print(" Dfactor is " + str(DFactor))
#change camera location based on the distance in input
#print( "Max distance will be "+str(distance * DFactor )+" and actual Distance was :"+ str(distance))
Maxdistance= math.sqrt(pow(DFactor*dx, 2) + pow(DFactor*dy, 2) + pow(DFactor*dz, 2))
#print( "Max distance by computation will be "+str(newdistance))
#after importing we start simplification
#start rotating object, simplification and screenshot
rotation_steps = 5
vir_distance=False
#degree=0.872665
old_ratio=new_ratio=1
#bpy.data.objects["Lamp"].location= bpy.data.objects["Camera"].location
decimateRatio = float(1)
#for o in bpy.data.objects:
# if o.type == 'MESH':
dis_indx=0
campos_flag=False
campos.write( str(objname)+ "\n")
for step in range(1, 6): # 5 simplification levels screenshots
#for step in range(1, 2):
bpy.data.objects["Camera"].location.x= float(camlocx)
bpy.data.objects["Camera"].location.y= float(camlocy)
bpy.data.objects["Camera"].location.z= float(camlocz)
first_time=False
distance=0
while distance<Maxdistance :
#while distance<5 :
bpy.data.objects["Lamp2"].location= bpy.data.objects["Camera"].location
#curobj.rotation_euler=mathutils.Vector((1.5708, -0.0, 0.872665))
#i+=1
dx = bpy.data.objects['Camera'].location.x - 0
dy = bpy.data.objects['Camera'].location.y - 0
dz = bpy.data.objects['Camera'].location.z - 0
distance= math.sqrt(pow(dx, 2) + pow(dy, 2) + pow(dz, 2))
#print ("Distance "+str(i)+": "+str(distance))
#print('\n Ending Distance...')
Vz= (camlens* Height)/distance
if vir_distance==False:
vdist.write( str(round(distance,2))+" " + str(Vz)+"\n")
if first_time==False:
#camera's view
curobj.select=True
bpy.ops.view3d.camera_to_view_selected()
first_time=True
#Nil did temprary
curobj.rotation_euler=degree
bpy.context.scene.render.filepath = output_dir +objname+ "deg"+ str(angle)+"d"+ str(round(distance,2))+ "r"+str(round(new_ratio,2))
bpy.ops.render.render(write_still = True)
out.write(output_dir +objname+ "deg"+ str(angle)+"d"+ str(round(distance,2))+ "r"+str(round(new_ratio,2))+ ".bmp"+"\n")
if(campos_flag==False):
campos.write( str(round(distance,2))+" "+ str(dx) + " " + str(dy) + " "+ str(dz)+"\n")
#changinf camera location
mul_fact= (inc_factor+ distance) / distance
bpy.data.objects['Camera'].location.x *= mul_fact
bpy.data.objects['Camera'].location.y *=mul_fact
bpy.data.objects['Camera'].location.z*=mul_fact
campos_flag=True
# print( "changed dim x, y, z is "+ str(bpy.data.objects['Camera'].location.x)+ str(bpy.data.objects['Camera'].location.y)+ str(bpy.data.objects['Camera'].location.z ))
vir_distance=True
old_ratio=new_ratio # 1 as old ratio
# print("screen shot was taken and dec ratio was "+ str(old_ratio))
# print('\n Beginning the process of Decimation using Blender Python API ...')
bpy.context.scene.objects.active = o
# print("before decimation object {} has {} verts, {} edges, {} polys".format(curobj.name, len(curobj.data.vertices), len(curobj.data.edges), len(curobj.data.polygons)))
new_ratio=old_ratio- float(0.20) # 0.8 as new ratio
new_ratio= round(new_ratio,2)
decimateRatio = new_ratio/old_ratio
modifierName='DecimateMod'
modifier = curobj.modifiers.new(modifierName,'DECIMATE')
modifier.ratio = decimateRatio
modifier.use_collapse_triangulate = True
bpy.ops.object.modifier_apply(apply_as='DATA', modifier=modifierName)
#print("After decimation object {} has {} verts, {} edges, {} polys".format(curobj.name, len(curobj.data.vertices), len(curobj.data.edges), len(curobj.data.polygons)))
# end of distance.py
# start IQA3.py to rank model
with open("Objects/finalObj/model.txt", "r") as inp, open("Objects/finalObj/FinalIQA.txt", "w") as out:
#numinp=float(inp.readline())
lines = inp.readlines()
i=0
index=0
token=lines[0]
token=token[35:-5]
ind=0
token3=token2=token #token2 is always fixed to first object value/ ratio num
object_index=ind
for step in range(1,4):
if object_index>= len(lines):
break
token=lines[object_index]
# fr finalObj/.. we start from index 18 but for finalObj2 it has one mre word so start from 19
token=token[17:-5]
#token=token[19:-5]
token = token.split("deg");
tok = token[1].split("r");
token2=token3= tok[1]
while token2==token3:
token=lines[i]
print (str(i))
#token=token[19:-5]
token=token[17:-5]
# 19 instead of 18 is for times when we have finalobj2 as destination which changes object addreses in one word of "2" instead of nothing
token = token.split("deg");
tok = token[1].split("r");
token3= tok[1]
#print (str(token3))
i+=1 # final i shows number of distances for each object- first one is 21
for j in range(object_index,i-1):
if object_index> len(lines):
break
b=[]
a = lines[j]
a=a[:-1]
# it is for 4 diff simplification level
temp2=lines[1*(i-1-object_index)+j]
temp2=temp2[:-1]
b.append (temp2)
temp2=lines[2*(i-1-object_index)+j]
temp2=temp2[:-1]
b.append (temp2)
temp2=lines[3*(i-1-object_index)+j]
temp2=temp2[:-1]
b.append (temp2)
temp2=lines[4*(i-1-object_index)+j]
temp2=temp2[:-1]
b.append (temp2)
#print (str(a) + "\n")
#print (str(b) +"\n")
step=0
while(step!=4):
final ="wine gmsd.exe %s %s > Objects/finalObj/Final.txt" %(a, b[step])
os.system(final)
with open("Objects/finalObj/Final.txt", "r") as iqa:
out.write(iqa.readline())
step+=1
object_index=(object_index+ ((i-1)-object_index)*5)
i=object_index
#print i
out.write("\n")
'''
# end of model and start of seprating results based on distance as well as degradation error
with open("Objects/finalObj/FinalIQA.txt", "r") as inp5, open("Objects/finalObj/Degradation_Error.txt", "w") as out5:
# here we will write down deg error from 0.8 for obj 1 to 0.2 by space then we'll have another object starting from 0.8 to 0.2---> using the corresponding data is possibe by knowing num of objects and num of distances ===> we get num of objects by the first line in distances.txt and num of distances per obj by lines in distances file before an endl
#numinp=float(inp.readline())
lines = inp5.readlines()
# in each four lines starting from first line, first is for 0.8, next is 0.6, ,04, 0.2... and it repeats so if index of lines array %4==0 it is 0.8 , we have space line between diff objects
index= lines.index('\n')
while (lines.count('\n')> 0 ):
tmplines=lines[:index]
print(index)
ind=0
j=0;
counter= len(tmplines)/4 ; # holds number of error value in each ratio
print(str(counter))
while j<4: # holds number of ratios
ind=j # in inner for we get error value per each ratio and we go to next ratio
for ii in range(0,int(counter)):
out5.write(tmplines[ind])
ind= ind+4;
out5.write("\n")
j=j+1;
lines = lines[index+1: len(lines)]
tmplines=lines[:index]
'''
# end of fetching deg-error (IQA3.py) results and start to fetch distances (data.py)
with open("Objects/finalObj/model.txt", "r") as inp, open("Objects/finalObj/Distances.txt", "w") as out:
out.write((count) )
#numinp=float(inp.readline())
lines = inp.readlines()
i=0
j=0
written=0
#while i<len(lines):
for step in range(1,int(count)+1):
#num of objects (if you have 3 just put (1,4)
dis1=0
obj_index=0
first =False
while i<len(lines):
token=lines[i]
#print i
token=token[17:-7]
token = token.split("deg");
temp2=token[1]
temp2 = temp2.split("d");
temp=temp2[1]
temp3=temp.split("r")
temp=temp3[0]
if (dis1== temp):
print ("i is "+str( i))
i=(obj_index+ ((i)-obj_index)*5)-1 # to the num of decima
obj_index=i # 1- index=15 , 2th: 15 + (18-15)*5
first= False
name=( token[0])
print (name +"\n")
out.write(name+ "\n")
out.write("\n")
#written+=1
#break
else:
out.write(temp+ "\n")
#dis1=temp
if (first==False):
dis1=temp
first=True
print (str(temp)+ " " +str(dis1) )
i+=1 # final i shows number of distances for each object- first one is 21
#out.write(name+ "\n")
'''
### Code Provider: *Niloofar Didar*