This file is indexed.

/usr/share/kissplice/kissplice.py is in kissplice 2.4.0-p1-1.

This file is owned by root:root, with mode 0o755.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
#! /usr/bin/python
 # ***************************************************************************
 #
 #                              KisSplice
 #      de-novo calling alternative splicing events from RNA-seq data.
 #
 # ***************************************************************************
 #
 # Copyright INRIA
 #  contributors :  Vincent Lacroix
 #                  Pierre Peterlongo
 #                  Gustavo Sacomoto
 #                  Alice Julien-Laferriere
 #                  David Parsons
 #                  Vincent Miele
 #
 # pierre.peterlongo@inria.fr
 # vincent.lacroix@univ-lyon1.fr
 #
 # This software is a computer program whose purpose is to detect alternative
 # splicing events from RNA-seq data.
 #
 # This software is governed by the CeCILL license under French law and
 # abiding by the rules of distribution of free software. You can  use,
 # modify and/ or redistribute the software under the terms of the CeCILL
 # license as circulated by CEA, CNRS and INRIA at the following URL
 # "http://www.cecill.info".

 # As a counterpart to the access to the source code and  rights to copy,
 # modify and redistribute granted by the license, users are provided only
 # with a limited warranty  and the software's author,  the holder of the
 # economic rights,  and the successive licensors  have only  limited
 # liability.

 # In this respect, the user's attention is drawn to the risks associated
 # with loading,  using,  modifying and/or developing or reproducing the
 # software by the user in light of its specific status of free software,
 # that may mean  that it is complicated to manipulate,  and  that  also
 # therefore means  that it is reserved for developers  and  experienced
 # professionals having in-depth computer knowledge. Users are therefore
 # encouraged to load and test the software's suitability as regards their
 # requirements in conditions enabling the security of their systems and/or
 # data to be ensured and,  more generally, to use and operate it in the
 # same conditions as regards security.
 #
 # The fact that you are presently reading this means that you have had
 # knowledge of the CeCILL license and that you accept its terms.
import os
import re
import sys
import time
import shlex
import struct
import shutil
import os.path
import tempfile
import argparse
import threading
import multiprocessing
from random import randint
from operator import itemgetter
from subprocess import Popen, PIPE, STDOUT


TIMEOUT=100000
logFile = 0
logFileName = ""

unfinished_bccs = []
num_snps = {}

# print str to the logFile
def printlg (*args):
    global logFile
    print >> logFile, ''.join(str(arg) for arg in args)

# get the timestamp as string
def getTimestamp():
    return "["+time.strftime("%H:%M:%S")+" "+time.strftime("%d/%m/%Y")+"] "


class Command(object): # deprecated in the future with Python3
    def __init__(self):
        self.process = None

    def target(self, **kwargs):
        self.process = Popen(kwargs["args"], stdout=kwargs["stdout"], stderr=PIPE)
        com = self.process.communicate()
        if com[0] and (kwargs["verbose"] or self.process.returncode != 0):
            print com[0]

        # Prints stderr that was piped by Popen
        if com[1] and (kwargs["verbose"] or self.process.returncode != 0):
            print com[1]

    def run(self, command_line, out_file = "", mode = 'w', verbose = False, timeout = sys.maxint):
        if verbose:
            print getTimestamp() + "Running "+command_line
        args = shlex.split(command_line)
        if len(out_file):
            stdout_file = open(out_file, mode)
            kwargs = {"verbose":verbose, "args":args, "stdout":stdout_file}
        else:
            kwargs = {"verbose":verbose, "args":args, "stdout":PIPE}

        # Create a Thread object that will run "self.target" with arguments kwargs
        # (given in the form of keyword argument) and start it
        thread = threading.Thread(target=self.target, kwargs=kwargs)
        thread.start()

        # Wait for end of thread or time out
        thread.join( timeout )

        # Check whether thread has ended or timed out
        # (if timed out, kill it and wait for it to actually die)
        if thread.is_alive():
            self.process.terminate()
            thread.join()

        if len(out_file):
            stdout_file.close()

        if self.process.returncode == -15:
            print >> sys.stderr, "\n\t\t *** Timeout reached! ***\n" #+ command_line
        elif self.process.returncode == 15:
            print >> sys.stderr, "\n\t\t *** Maximum number of bubbles reached! ***\n"
        elif self.process.returncode == -6:
            print >> sys.stderr, "\n\t\t *** Memory limit reached! ***\n"
        elif self.process.returncode == -11:
            print >> sys.stderr, "\n\t\t *** Problem with " + command_line.split()[0] + " ***"
            print >> sys.stderr, "\t\t *** Try increasing your stack size before running KisSplice executing: \"ulimit -s <new_limit>\" (you may first try \"ulimit -s unlimited\" if your OS accepts it. Otherwise, you can find the highest value for <new_limit> executing \"ulimit -H -s\").***\n"
            sys.exit(self.process.returncode)
        elif self.process.returncode != 0:
            print >> sys.stderr, "Problem with " + command_line.split()[0]
            sys.exit(self.process.returncode)


def mkdirTmp(tmpdir=None):
    if not tmpdir:
        workdir = tempfile.mkdtemp(prefix="kissplice.")
    else:
        workdir = tempfile.mkdtemp(prefix="kissplice.", dir=tmpdir)
    return workdir

def cleanTmp(workdir):
    shutil.rmtree(workdir)


def subprocessLauncher(command_line, out_file = "", mode = 'w', verbose = False, timeout = sys.maxint):
    command = Command()
    command.run(command_line, out_file, mode, verbose, timeout)
    return command.process.returncode

def to_file(readfiles, filename = "tmp"):
    f = open(filename, 'w')
    reads = readfiles.split(' ')
    for r in reads:
        f.write(r + "\n")
    f.close()

# Run debruijn graph construction
def build_graph(KISSPLICE_INSTDIR, workdir, readfiles, kval, graphfile, min_cov, genome_size, verbose = False):
    print getTimestamp() + "--> Building de Bruijn graph..."
    printlg(getTimestamp() + "--> Building de Bruijn graph...")
    print "Graph will be written in "+graphfile+".[edges/nodes]"
    printlg("Graph will be written in "+graphfile+".[edges/nodes]")

    all_read_files = workdir + "/all_read_filenames"
    to_file(readfiles, all_read_files)
    command_line = KISSPLICE_INSTDIR+"/ks_debruijn4 " + all_read_files+" "+str(kval)+" "+str(min_cov)+" "+str(genome_size)+" "+ workdir + "/graph --kissplice --4bloom"
    subprocessLauncher(command_line, verbose=verbose)
    shutil.move(workdir+"/graph.edges", graphfile + ".edges")
    shutil.move(workdir+"/graph.nodes", graphfile + ".nodes")
    shutil.move(workdir+"/graph.solid_kmers_binary_with_count", graphfile + ".solid_kmers_binary_with_count")

    print getTimestamp() + "--> Done!"
    printlg(getTimestamp() + "--> Done!")

#Run error_removal for the graph (overwrite edge file)
def error_removal(KISSPLICE_INSTDIR, workdir, kval, graphfile, nobuild, cutoff, keep_counts, verbose = False):
    print  "\n" + getTimestamp() + "--> Removing sequencing errors..."
    printlg("\n" + getTimestamp() + "--> Removing sequencing errors...")
    
    #checks if user passed the graph for KisSplice. In this case, maybe the error_removal step is already done
    if nobuild:
        #checks if the file created by the error_removal step is already done
        edge_suffix = "_C"+str(cutoff)+".edges"
        if os.path.isfile(graphfile+edge_suffix):
                print "Sequencing-errors-removal step skipped: using previously computed file "+graphfile+edge_suffix
                printlg("Sequencing-errors-removal step skipped: using previously computed file "+graphfile+edge_suffix)
                print getTimestamp() + "--> Done!"
                printlg(getTimestamp() + "--> Done!")
                return True

    #here we do not have the sequencing-error-removal file to use, try to create it
    #checks if the .counts is not yet created
    if os.path.isfile(graphfile+".counts") == False:
        #.counts file does not exist - try to create it
        if verbose:
                print "File "+graphfile+".counts not found. Creating it..."

        # convert binary count file to text
        if os.path.isfile(graphfile+".solid_kmers_binary_with_count"):
                fin = open(graphfile+".solid_kmers_binary_with_count", "rb")
                kmer_nbits = struct.unpack("i",fin.read(4))[0]
                k = struct.unpack("i",fin.read(4))[0]
                fout = open(graphfile+".counts", "w")
                try:
                        while True:
                                kmer_binary = struct.unpack('B'*(kmer_nbits / 8), fin.read( kmer_nbits / 8) )
                                abundance = struct.unpack( 'I', fin.read(4) ) [0]
                                kmer = ""
                                for i in xrange(k):
                                        kmer = "ACTG"[(kmer_binary[i/4] >> (2*(i%4)) ) % 4] + kmer
                                print >> fout, kmer, abundance
                except:
                        fin.close()
                fout.close()
        else:
                print "File "+graphfile+".solid_kmers_binary_with_count not found. Skipping error removal step."
                printlg("File "+graphfile+".solid_kmers_binary_with_count not found. Skipping error removal step.")
                print getTimestamp() + "--> Done!"
                printlg(getTimestamp() + "--> Done!")
                return False

    #now the .counts file may have been created. Run the error-removal
    if os.path.isfile(graphfile+".counts"):
        command_line = KISSPLICE_INSTDIR+"/ks_error_removal "+graphfile+".edges "+graphfile+".nodes "+str(kval)+" "+graphfile+".counts "+str(cutoff)+" "+graphfile+"_C"+str(cutoff)
        subprocessLauncher(command_line, verbose=verbose)
        if keep_counts == False:
                delete_counts(graphfile, verbose)
        print getTimestamp() + "--> Done!"
        printlg(getTimestamp() + "--> Done!")
        return True
    else:
        print "File "+graphfile+".counts could not be found/created. Skipping error removal step."
        printlg("File "+graphfile+".counts could not be found/created. Skipping error removal step.")
        print getTimestamp() + "--> Done!"
        printlg(getTimestamp() + "--> Done!")
        return False


#Removes the .counts and file
def delete_counts(graphfile, verbose = False):
    if os.path.isfile(graphfile+".counts"):
        if verbose:
                print "Removing "+graphfile+".counts"
        os.remove(graphfile+".counts")
    
#Run the modules on the graph
def run_modules(KISSPLICE_INSTDIR, workdir, graphfile, kval, cutoff, verbose = False, output_context = False, exec_error_removal = False):
    if not os.path.exists(workdir+"/bcc"):
        os.mkdir(workdir+"/bcc")
    print "\n" + getTimestamp() + "--> Finding the BCCs..."
    printlg("\n" + getTimestamp() + "--> Finding the BCCs...")

    edge_suffix = ".edges"
    if exec_error_removal:
        edge_suffix = "_C"+str(cutoff)+".edges"

    command_line = KISSPLICE_INSTDIR+"/ks_run_modules "+graphfile+edge_suffix+" "+graphfile+".nodes "+str(kval)+" "+workdir+"/bcc/graph"
    if output_context:
        command_line += " --output-context"

    return_code = subprocessLauncher(command_line, verbose=verbose)

    if (return_code != 0):
        print "\t\t *** Try increasing your stack size before running KisSplice executing: \"ulimit -s <new_limit>\" (you may first try \"ulimit -s unlimited\" if your OS accepts it. Otherwise, you can find the highest value for <new_limit> executing \"ulimit -H -s\").***\n"
    print getTimestamp() + "--> Done!"
    printlg(getTimestamp() + "--> Done!")

def count_alreadyfoundSNPs(workdir):
    global num_snps
    info_snp_file = open(workdir+"/bcc/graph_info_snp_bcc", 'r')
    info_snp = info_snp_file.readlines()
    for bcc_snp in info_snp:
        info = bcc_snp.split()# format: bcc_id num_snps
        num_snps[info[0]] = int(info[1])
    info_snp_file.close()

def find_bcc_ids_ordered_by_size(workdir, min_length = 4):
    f = open( workdir+"/bcc/graph_info_bcc")
    bccnum2size = f.readlines()[2:]
    bccnumorderedbysize = [int(e[0])+1 for e in sorted(enumerate([int(t.split()[1]) for t in  bccnum2size]), key=lambda x:x[1], reverse=True) if int(e[1]) >= min_length ]
    f.close()
    return (bccnum2size, bccnumorderedbysize)
    
def enumerate_all_bubbles(KISSPLICE_INSTDIR, workdir, outdir, kval, bval, output_snps, min_edit_dist, max_cycles, UL_MAX, LL_MAX, LL_MIN, timeout, nbprocs=1, verbose = False, output_context = False, output_path = False, output_branch_count = False, experimental = False, max_memory = 0):
    print "\n" + getTimestamp() + "--> Enumerating all bubbles..."
    printlg("\n" + getTimestamp() + "--> Enumerating all bubbles...")

    if os.path.isfile(workdir+"/all_bcc_type0_"+str(kval)):
      os.remove(workdir+"/all_bcc_type0_"+str(kval))
    if os.path.isfile(workdir+"/all_bcc_type1234_"+str(kval)):
        os.remove(workdir+"/all_bcc_type1234_"+str(kval))
    f = open(workdir+"/bcc/graph_info_bcc")
    n_bcc = int(f.readline())
    f.close()

    file2size = {}

    # filling num_snps
    count_alreadyfoundSNPs(workdir);
    # ordering bcc by decreasing size and filtering if <4 nodes
    bccnum2size, bccnumorderedbysize = find_bcc_ids_ordered_by_size(workdir, 4)
    
    if verbose:
        if len(bccnumorderedbysize) != len(bccnum2size):
            print "Less than 4 nodes, cannot contain a bubble!"

    # multiprocessing step-  BEGIN
    pool = multiprocessing.Pool(nbprocs)
    TASKS = []
    for i in bccnumorderedbysize:
        TASKS +=  [(enumerate_bubbles_core, i, KISSPLICE_INSTDIR, workdir, outdir, kval, bval, output_snps, min_edit_dist, max_cycles, UL_MAX, LL_MAX, LL_MIN, timeout, verbose, output_context, output_path, output_branch_count, experimental, max_memory)]

    imap_unordered_it = pool.imap_unordered(eval_func_tuple, TASKS, 1)

    for x in imap_unordered_it:
        if x != -1:
            unfinished_bccs.append(x);

    pool.close()
    pool.join()
    # multiprocessing step - END

    destinationSNPS = open(workdir+"/all_bcc_type0_"+str(kval), 'wb') ## THE FILE CONTAINS SNPS
    destination1234 = open(workdir+"/all_bcc_type1234_"+str(kval), 'wb') ## THE FILE CONTAINS other bcc
    for file in os.listdir(workdir):
        if file[0:17] == "tmp_all_bcc_type0":
            shutil.copyfileobj(open(workdir+"/"+file, 'rb'), destinationSNPS)
        if file[0:20] == "tmp_all_bcc_type1234":
            shutil.copyfileobj(open(workdir+"/"+file, 'rb'), destination1234)
    destinationSNPS.close()
    destination1234.close()

    if output_path:
        destination_paths = open(workdir+"/all_paths_k"+str(kval), 'wb')
        for file in os.listdir(workdir):
            if file[0:18] == "tmp_all_paths_bcc_":
                shutil.copyfileobj(open(workdir+"/"+file, 'rb'), destination_paths)
        destination_paths.close()

    f = open(workdir+"/all_bcc_type0_"+str(kval))
    size0 = sum(1 for line in f)
    f.close()
    f = open(workdir+"/all_bcc_type1234_"+str(kval))
    size1234 = sum(1 for line in f)
    f.close()
    n_bubbles = (size0 + size1234)/4

    print "Total number of bubbles found: ", n_bubbles
    printlg("Total number of bubbles found: ", n_bubbles)
    print getTimestamp() + "--> Done!"
    printlg(getTimestamp() + "--> Done!")




def enumerate_bubbles_core(i, KISSPLICE_INSTDIR, workdir, outdir, kval, bval, output_snps, min_edit_dist, max_cycles, UL_MAX, LL_MAX, LL_MIN, timeout, verbose = False, output_context = False, output_path = False, output_branch_count = False, experimental = False, max_memory = 0):
    if verbose:
      print "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@"
      print "Enumerating bubbles in biconnected component "+str(i)
    infofile = workdir+"/bcc/graph_info_bcc"
    contents_file_edges = workdir+"/bcc/graph_contents_edges_bcc"
    contents_file_nodes = workdir+"/bcc/graph_contents_nodes_bcc"
    basename_edges = workdir+"/bcc/graph_all_edges_bcc"
    basename_nodes = workdir+"/bcc/graph_all_nodes_bcc"

    # Contains -1 if the process finished or the bcc number if it timed out.
    flag = -1

    # already num_snps found - it is also the starting number from enumerating cycle
    num_snps_bcc = 0
    if str(i) in num_snps:
        num_snps_bcc = num_snps[str(i)]
    command_line = KISSPLICE_INSTDIR+"/ks_bubble_enumeration "+ infofile+" "+ contents_file_edges+" "+ contents_file_nodes+" "+ basename_edges+" "+ basename_nodes\
        +" "+str(i) \
        +" "+str(kval)+" "+workdir+"/bcc/tmp_bcc_sequences_"+str(kval)+"_"+multiprocessing.current_process().name+" "+str(min_edit_dist) \
        +" bcc_"+str(i) + " " + str(num_snps_bcc) + " -u "+str(UL_MAX) \
        +" -L "+str(LL_MAX)+" -l "+str(LL_MIN)+" -M "+str(max_cycles)+" -s "+str(output_snps)
    if output_context:
      command_line += " -c"
    if output_path:
      command_line += " -p"
    if bval is not None:
      command_line += " -b" + str(bval)
    if output_branch_count:
      command_line += " -v"
    if experimental:
      command_line += " -e " + str(max_memory)

      
    process_returncode = subprocessLauncher(command_line, verbose=verbose, timeout=timeout)

    # Store the bcc number if it timed out (return code -15) OR the maximum number of bubbles was reached (return code 15) OR the memory limit was exceeded (return code -6)
    if process_returncode == -15 or process_returncode == 15 or process_returncode == -6:
        flag = i

    # Always append the results if the enumeration, even when it's incomplete.
    command_line_type0 = KISSPLICE_INSTDIR+"/ks_clean_duplicates " + workdir + "/bcc/tmp_bcc_sequences_" + str(kval) +"_"+multiprocessing.current_process().name+ "_type0.fa"
    command_line_type1234 = KISSPLICE_INSTDIR+"/ks_clean_duplicates " + workdir + "/bcc/tmp_bcc_sequences_" + str(kval) +"_"+multiprocessing.current_process().name+ "_type1234.fa"
    subprocessLauncher(command_line_type0, workdir+"/tmp_all_bcc_type0_"+str(kval)+"_"+multiprocessing.current_process().name, 'a', verbose=verbose) # append ALL BCC IN THE SAME FILE
    subprocessLauncher(command_line_type1234, workdir+"/tmp_all_bcc_type1234_"+str(kval)+"_"+multiprocessing.current_process().name, 'a', verbose=verbose) # append ALL BCC IN THE SAME FILE

    if output_path:
        command_line = "cat "+workdir+"/bcc/tmp_bcc_sequences_" + str(kval) +"_"+multiprocessing.current_process().name+ ".path"
        subprocessLauncher(command_line, workdir+"/tmp_all_paths_bcc_"+str(kval)+"_"+multiprocessing.current_process().name, 'a', verbose=verbose) # append ALL BCC IN THE SAME FILE

    if verbose:
      print "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n"

    return flag

def eval_func_tuple(f_args):
    return f_args[0](*f_args[1:])

def concatenate_graph_all_log_bcc_to_all_bcc_type0(workdir, kval, output_snps):
    if output_snps==2: #concatenate all non-branching snps
        destinationSNPS = open(workdir+"/all_bcc_type0_"+str(kval), 'ab') ## THE UNIQUE FILE ALSO CONTAINS SNPS
        shutil.copyfileobj(open(workdir+"/bcc/graph_all_log_bcc", 'rb'), destinationSNPS)
        destinationSNPS.close()
    elif output_snps==1: #concatenate only non-branching Type-0a
        destinationSNPS = open(workdir+"/all_bcc_type0_"+str(kval), 'ab') ## THE UNIQUE FILE ALSO CONTAINS SNPS
        
        #append the Type_0a bubbles to the destinationSNPS file
        snpsFile = open(workdir+"/bcc/graph_all_log_bcc", 'r')
        writeLine = False
        for line in snpsFile.readlines():
                if writeLine == True:
                        destinationSNPS.write(line)
                        writeLine = False
                else:
                        if ("Type_0a" in line):
                                destinationSNPS.write(line)
                                writeLine = True
                        else:
                                writeLine = False

        destinationSNPS.close()


def check_read_coverage_and_sort_all_bubbles(KISSPLICE_INSTDIR, readfiles, workdir, outdir, kval, output_snps, infix_name,  countsMethods, minOverlap, substitutions, verbose = False):
    print "\n" + getTimestamp() + "--> Computing read coherence and coverage..."
    printlg("\n" + getTimestamp() + "--> Computing read coherence and coverage...")

    # Two KisSreads executions, one for type 0 one for type 1234
    #  Du to the k extension, anchor should be of size k+1 for SNP
    if output_snps > 0:
            commandLineType0 = KISSPLICE_INSTDIR+"/ks_kissreadsSNPS "+workdir+"/all_bcc_type0_"+str(kval)+" "+readfiles+" -i 5 -S 25 -O "+str(kval+minOverlap)+" -o "+ workdir+"/coherentType0.fa -u "+workdir+"/uncoherentType0.fa  -d " + str(substitutions) + " -c 1 -n"
            subprocessLauncher(commandLineType0, verbose=verbose)

    # no n options anymore
    commandLineType1234 = KISSPLICE_INSTDIR+"/ks_kissreadsSplice "+workdir+"/all_bcc_type1234_"+str(kval)+" "+readfiles+" -i 5 -k "+str(kval)+" -S 25 -O "+str(kval+minOverlap)+" -o "+workdir+"/coherentType1234.fa -u "+workdir+"/uncoherentType1234.fa  -d " + str(substitutions) + " -c 1 -j " + countsMethods +" -l " + str(minOverlap)
    subprocessLauncher(commandLineType1234, verbose=verbose)

    commandLineCat = "cat " +  workdir+"/uncoherentType1234.fa "
    if output_snps > 0:
            commandLineCat += workdir+"/uncoherentType0.fa "
    subprocessLauncher(commandLineCat, workdir + "/uncoherent.fa", "a", verbose=verbose )

    print getTimestamp() + "--> Done!"
    printlg(getTimestamp() + "--> Done!")

    print getTimestamp() +"--> Sorting all bubbles..."
    printlg(getTimestamp() +"--> Sorting all bubbles...")

    nb = [0]*6# counter of number of events of each type found
    eventsName = ["type_0a", "type_0b", "type_1", "type_2", "type_3", "type_4"]
    cofilel = []
    for i in range(0,6):
        cofilel.append(open(outdir+"/results_"+infix_name+"_coherents_"+eventsName[i]+".fa", 'w'))

    if output_snps > 0:
            snpsFile = open(workdir+"/coherentType0.fa", 'r')
            l = snpsFile.readlines()
            l.sort( reverse = True )
            snpsFile.close()
            for event in l:
                    eventSplitted = event.split()[-1].replace(';','\n')
                    try:
                            if ("Type_0a" in eventSplitted):
                                cofilel[0].write(eventSplitted+"\n")#Transform to Fasta type
                                nb[0] += 1
                            else:
                                cofilel[1].write(eventSplitted+"\n")#Transform to Fasta type
                                nb[1] += 1
                    except:
                            pass

    # handling coherent "other"
    cofile = open(workdir+"/coherentType1234.fa", 'r')
    l = cofile.readlines()
    l.sort(reverse=True)
    cofile.close()
    retype = re.compile('Type_\d+')
    for event in l:
        try:
            type = retype.search(event).group()
            for i in range(2,6):
                if (type=="Type_"+str(i-1)):
                    cofilel[i].write(event.split()[-1].replace(';','\n')+"\n")#Transform to Fasta type
                    nb[i] += 1
        except:
            pass


    for i in range(0,6):
        cofilel[i].close()
    uncofile = open(workdir+"/uncoherent.fa", 'r')
    uncofileout = open(outdir+"/results_"+infix_name+"_uncoherent.fa", 'w')
    for event in uncofile.readlines():
        uncofileout.write(event.split()[-1].replace(';','\n')+"\n")
    uncofile.close()
    uncofileout.close()

    print getTimestamp() + "--> Done!"
    printlg(getTimestamp() + "--> Done!")

    return nb


def sort_all_bubbles(KISSPLICE_INSTDIR, readfiles, workdir, outdir, kval, output_snps, infix_name, shouldDoReadCoherence, verbose = False):
    print "\n" + getTimestamp() + "--> Starting Bubble Output Module"
    printlg("\n" + getTimestamp() + "--> Starting Bubble Output Module")
    if shouldDoReadCoherence:
        outdir = outdir+"/results_without_read_coherency"
        if not os.path.exists(outdir):
            os.mkdir(outdir)
        print "Before checking for read coherency, all bubbles will be written to folder " + outdir
        printlg("Before checking for read coherency, all bubbles will be written to folder " + outdir)
        print "This enables you to access them even before the read coherency module finishes, which can take a long time"
        printlg("This enables you to access them even before the read coherency module finishes, which can take a long time")

    print getTimestamp() + "--> Sampling bubbles by type..."
    printlg(getTimestamp() + "--> Sampling bubbles by type...")

    concatenate_graph_all_log_bcc_to_all_bcc_type0(workdir, kval, output_snps)

    retype = re.compile('Type_\d+')
    eventsName = ["type_0a", "type_0b", "type_1", "type_2", "type_3", "type_4"]
    filel = []
    for i in range(0,6):
        filel.append(open(outdir+"/results_"+infix_name+"_"+eventsName[i]+".fa", 'w'))

    nb = [0]*6

    if output_snps > 0:
            snpsFile = open(workdir+"/all_bcc_type0_"+str(kval), 'r')
            for line in snpsFile.readlines():
                if "Type_0a" in line:
                        ofile = filel[0]
                        nb[0] += 1
                elif "Type_0b" in line:
                        ofile = filel[1]
                        nb[1] += 1
                ofile.write(line)
            snpsFile.close()


    # handling the other type
    cfile = open(workdir+"/all_bcc_type1234_"+str(kval), 'r')
    for line in cfile.readlines():
        try:
            type = retype.search(line).group()
            for i in range(1,5):
                if (type=="Type_"+str(i)):
                    ofile = filel[i+1]
                    nb[i+1] += 1
        except:
            pass
        ofile.write(line)
    cfile.close()
    for i in range(0,6):
        nb[i] /= 2
        filel[i].close()
    

    print getTimestamp() + "--> Done!"
    printlg(getTimestamp() + "--> Done!")
    print "You can now access all bubbles without read coherency in: " + outdir
    printlg("You can now access all bubbles without read coherency in: " + outdir)

    return nb

def save_bccs_from_list(bcc_list, dir_name, KISSPLICE_INSTDIR, workdir, outdir, verbose = False):
    if not os.path.exists(outdir + dir_name):
        os.mkdir(outdir + dir_name)
    infofile = workdir+"/bcc/graph_info_bcc"
    contents_file_edges = workdir+"/bcc/graph_contents_edges_bcc"
    contents_file_nodes = workdir+"/bcc/graph_contents_nodes_bcc"
    basename_edges = workdir+"/bcc/graph_all_edges_bcc"
    basename_nodes = workdir+"/bcc/graph_all_nodes_bcc"
    
    for i in bcc_list:
        command_line = KISSPLICE_INSTDIR+"/ks_print_bcc "+ infofile+" "+ contents_file_edges+" "+ contents_file_nodes+" "+ basename_edges+" "+ basename_nodes\
            +" "+str(i)+" "\
            + outdir+ dir_name + "/graph_bcc_"+str(i)+".edges "\
            + outdir+ dir_name + "/graph_bcc_"+str(i)+".nodes"
        subprocessLauncher(command_line, verbose=verbose)

def check_read_files(readfiles):
    if readfiles is None:
        return True

    allFilesAreOK = True
    for file in readfiles:
        if not os.path.isfile(file):
            print "[ERROR] File \""+file+"\" does not exist."
            allFilesAreOK = False

    if not allFilesAreOK:
        dieToFatalError("One or more read files do not exist.")


def dieToFatalError (msg):
  print "[FATAL ERROR] " + msg
  print "Try `kissplice --help` for more information"
  global logFileName
  os.remove(logFileName)
  sys.exit(1);

# ############################################################################
#                                   Main
# ############################################################################
def main():
  # @variable KISSPLICE_INSTDIR : Path to the main executable (this file)
  # This variable is initialized from CMAKE to customize this script
  # according to the install configuration.
  # @variable KS_SEC_EXEC_PATH  : Path to the secondary executables (eg ks_kissreads)
  KISSPLICE_INSTDIR = os.path.dirname(os.path.abspath(sys.argv[0])).rstrip('/')
  KS_SEC_EXEC_PATH  = KISSPLICE_INSTDIR.rsplit('/',1)[0]+'/lib//kissplice'
  print "KISSPLICE_INSTDIR: "+KISSPLICE_INSTDIR
  print "KS_SEC_EXEC_PATH: "+KS_SEC_EXEC_PATH

  # ========================================================================
  #                        Manage command line arguments
  # ========================================================================
  parser = argparse.ArgumentParser(description='kisSplice - local assembly of SNPs, indels and AS events')

  # ------------------------------------------------------------------------
  #                            Define allowed options
  # ------------------------------------------------------------------------
  parser.add_argument("-r", action="append", dest="readfiles",
                      help="input fasta/q read files or compressed (.gz) fasta/q files (mutiple, such as \"-r file1 -r file2...\") ")
  parser.add_argument('-k', action="store", dest="kval", type=int, default=41,
                      help="k-mer size (default=41)")
  parser.add_argument('-b', action="store", dest="bval", type=int, default=5, help="maximum number of branching nodes (default 5)")
  parser.add_argument('-l', action="store", dest="llmax", type=int, default=0,
                      help="maximal length of the shorter path (default: 2k+1)")
  parser.add_argument('-m', action = "store", dest = "LL_MIN", default = 0, help = "minimum length of the shorter path (default 2k-8)")
  parser.add_argument('-M', action = "store", dest = "UL_MAX", default = 1000000, help = "maximum length of the longest path (default 1000000), skipped exons longer than UL_MAX are not reported")
  parser.add_argument('-g', action="store", dest="graph_prefix", default="",
                      help="path and prefix to pre-built de Bruijn graph (suffixed by .edges/.nodes)\n \
                      if jointly used with -r, graph used to find bubbles and reads used for quantification")
  parser.add_argument('-o', action="store", dest="out_dir", default="results",
                      help="path to store the results and the summary log file (default = ./results)")
  parser.add_argument('-d', action="store", dest="path_to_tmp", default=None,
                      help="specific directory (absolute path) where to build temporary files (default temporary directory otherwise)")
  parser.add_argument('-t', action="store", dest="nbprocs", type=int, default=1,
                      help="number of cores (must be <= number of physical cores)")
  parser.add_argument('-s', action="store", dest="output_snps", default = "0", help="0, 1 or 2. Changes which types of SNPs will be output. If 0 (default), will not output SNPs. If 1, will output Type0a-SNPs. If 2, will output Type0a and Type0b SNPs (warning: this option may increase a lot the running time. You might also want to try the experimental algorithm here)")
  parser.add_argument('-v', action="store_true", dest="verbose", help="Verbose mode")
  parser.add_argument('-u', action="store_true", dest="keep_ubccs", help="keep the nodes/edges file for unfinished bccs")
  parser.add_argument('-c',  action = "store", dest = "min_cov", default = 2, help="an integer, k-mers present strictly less than this number of times in the dataset will be discarded (default 2)")
  parser.add_argument('-C',  action = "store", dest = "min_relative_cov", default = 0.05, help="a percentage from [0,1), edges with relative coverage below this number are removed (default 0.05)")
  parser.add_argument('-z', action="store", dest="genome_size", type=int, default=1000000000,
                      help="estimated genome/transcriptome size (default = 1G)")
  parser.add_argument('-e',  action = "store", dest = "min_edit_dist", default = 3,
                      help="edit distance threshold, if the two sequences (paths) of a bubble have edit distance smaller than this threshold, the bubble is classified as an inexact repeat (default 3)")
  parser.add_argument('-y',  action = "store", dest = "max_cycles", default = 100000000,
                       help="maximal number of bubbles enumeration in each bcc. If exceeded, no bubble is output for the bcc (default 100M)")
  parser.add_argument('--mismatches',  action = "store", dest = "mism", default = 0, type = int,
                      help="Maximal number of substitutions authorized between a read and a fragment (for quantification only), default 0. If you increase the mismatch and use --counts think of increasing min_overlap too.")
  parser.add_argument('--counts',  action = "store", dest = "countsMethod", default = "0", help="0,1 or 2 . Changes how the counts will be reported. If 0 (default): total counts, if 1: counts on junctions, if 2: all counts. see User guide for more information ")
  parser.add_argument('--min_overlap',  action = "store", dest = "minOverlap", default = 3, type=int, help="Set how many nt must overlap a junction to be counted by --counts option. Default=3. see User guide for more information ")
  parser.add_argument('--timeout', action='store', dest="timeout", default=TIMEOUT,
                      help="max amount of time (in seconds) spent for enumerating bubbles in each bcc. If exceeded, no bubble is output for the bcc (default "+str(TIMEOUT)+")")
  parser.add_argument('--version', action='version', version='%(prog)s 2.4.0-p1')
  parser.add_argument('--output-context', action="store_true", dest="output_context", default = False, help="Will output the maximum non-ambiguous context of a bubble")
  parser.add_argument('--output-path', action="store_true", dest="output_path", default = False, help="Will output the id of the nodes composing the two paths of the bubbles.")
  parser.add_argument('--output-branch-count', action="store_true", dest="output_branch_count", default = False, help="Will output the number of branching nodes in each path.")
  parser.add_argument('--keep-bccs', action="store_true", dest="keep_all_bccs", default = False, help="Keep the node/edges files for all bccs.")
  parser.add_argument('--experimental', action="store_true", dest="experimental", default = False, help="Uses a new experimental algorithm that searches for bubbles by listing all paths.")
  parser.add_argument('--max-memory', action="store", dest="max_memory", default="unlimited",
                      help="If you use the experimental algorithm, you must provide the maximum size of the process's virtual memory (address space) in megabytes (default unlimited). WARNING: this option does not work on Mac operating systems.")
  parser.add_argument('--keep-counts', action="store_true", dest="keep_counts", default = False, help="Keep the .counts file after the sequencing-errors-removal step.")
  

  # ------------------------------------------------------------------------
  #               Parse and interpret command line arguments
  # ------------------------------------------------------------------------
  options = parser.parse_args()

  # ------------------------------------------------------------------------
  #               Create output dir
  # ------------------------------------------------------------------------
  outdir = options.out_dir
  if not os.path.exists(outdir):
    os.mkdir(outdir)  

  # ------------------------------------------------------------------------
  #               Create the log file
  # ------------------------------------------------------------------------
  global logFile, logFileName
  logFileName = outdir+"/kissplice_log_summary_"+time.strftime("%H-%M-%S")+"_"+time.strftime("%d-%m-%Y")+"_"+str(randint(0, 1000000))
  logFile = open(logFileName, 'w')
  

 # ------------------------------------------------------------------------
 #                 Print version and command line
 # ------------------------------------------------------------------------
  print "\nThis is KisSplice, version 2.4.0-p1\n"
  printlg("This is KisSplice, version 2.4.0-p1\n")
  print "The command line was:       " + ' '.join(sys.argv)
  printlg("The command line was:       " + ' '.join(sys.argv))


 # ------------------------------------------------------------------------
 #                 Parse input options
 # ------------------------------------------------------------------------
  # check if the given read files indeed exist
  check_read_files(options.readfiles)
  readfiles = None
  only_graph = False
  if options.readfiles:
    if options.graph_prefix: # GRAPH + READS
      print "-r and -g options used together: "
      printlg("-r and -g options used together: ")
      print "the graph will be used to find events, while reads files are used for checking read-coherency and coverage"
      printlg("the graph will be used to find events, while reads files are used for checking read-coherency and coverage")
    readfiles = ' '.join(map(str, options.readfiles))
  else:
    if not options.graph_prefix:
      parser.print_usage()
      dieToFatalError("kissplice requires at least a read file or a pre-built graph")
    else: # GRAPH
      only_graph = True

  nobuild = False
  if options.graph_prefix:
    nobuild = True

  # --------------------------------------------------------- Output options
  output_snps = (int)(options.output_snps)
  if output_snps<0 or output_snps>2:
          print "-s is not 0, 1 or 2. Defaulting to 0."
          printlg("-s is not 0, 1 or 2. Defaulting to 0.")
          output_snps = 0

  print "Using the read files:      ", readfiles
  printlg("Using the read files:      ", readfiles)
  print "Results will be stored in: ", os.path.abspath(options.out_dir)
  printlg("Results will be stored in: ", os.path.abspath(options.out_dir))
  print "Summary log file will be saved in: ", os.path.abspath(logFileName)
  printlg("Summary log file will be saved in: ", os.path.abspath(logFileName))
  print "\n"
  printlg("\n")

  # ------------------------------------------------------------- k-mer size
  kval = options.kval
  if kval%2 == 0:
    dieToFatalError("please use only odd value for k") #otherwise, DBG use k-1 and output context do not work

  # ------------------------------------- Maximal length of the shorter path
  if options.llmax != 0:
    LL_MAX = options.llmax
  else:
    LL_MAX = 2 * kval + 1
  # The following are not optional but work along with llmax
  UL_MAX = options.UL_MAX # Defines maximum upper and lower path bounds
  if options.LL_MIN != 0:
    LL_MIN= options.LL_MIN
  else:
    LL_MIN = 2 * kval - 8

  min_ll_max = 2 * kval + 1
  if LL_MAX < min_ll_max:
    dieToFatalError("maximal length of the shorter path (" + str(LL_MAX) + ") should be >= 2k+1 =" + str(min_ll_max) + ")")

  
  #-------------------------------- fix LL_MIN, LL_MAX and UL_MAX --------------------------------------
  LL_MIN = int(LL_MIN)-2
  LL_MAX = int(LL_MAX)-2
  UL_MAX = int(UL_MAX)-2

  # ------------------------------------------------------- Other parameters
  min_cov = options.min_cov
  min_edit_dist = options.min_edit_dist
  max_cycles = options.max_cycles
  countsMethod = options.countsMethod
  minOverlap = options.minOverlap

  # ========================================================================
  #            Construct intermediate and output file names
  # ========================================================================
  workdir = mkdirTmp(options.path_to_tmp)
  infix_name = "" # will be the central part of the output file names
  if options.graph_prefix:
    graphfile = options.graph_prefix
  if options.readfiles:
    for file in options.readfiles:
      justfilename =  file.split("/")[-1].split(".")[0] #remove what is before the "/" and what is after the "."
      infix_name += justfilename+"_"
    infix_name = infix_name[0:200] # Truncate it to contain at most 200 characteres
    infix_name += "k" + str(kval)
    if not options.graph_prefix:
      graphfile = options.out_dir+"/graph_"+infix_name # Output graph file
  else:
    infix_name = graphfile.split("/")[-1].split(".")[0] #remove what is before the "/" and what is after the "."


  # ========================================================================
  #                                   RUN
  # ========================================================================
  # ------------------------------------------------------------------------
  #                          Build De Bruijn Graph
  # ------------------------------------------------------------------------
  if not nobuild:
    t = time.time()
    build_graph(KS_SEC_EXEC_PATH, workdir, readfiles, kval, graphfile, min_cov, options.genome_size, options.verbose)
    print "Elapsed time: ", (time.time() - t), " seconds"
    printlg("Elapsed time: ", (time.time() - t), " seconds")

  # ------------------------------------------------------------------------
  #                          Error removal
  # ------------------------------------------------------------------------
  t = time.time()
  if float(options.min_relative_cov) > 0.0001:
     exec_error_removal = error_removal(KS_SEC_EXEC_PATH, workdir, kval, graphfile, nobuild, options.min_relative_cov, options.keep_counts, options.verbose)
  else:
     exec_error_removal = False
  
  print "Elapsed time: ", (time.time() - t), " seconds"
  printlg("Elapsed time: ", (time.time() - t), " seconds")
  
  

  # ------------------------------------------------------------------------
  #                        Decompose and simplify DBG
  # ------------------------------------------------------------------------
  t = time.time()
  run_modules(KS_SEC_EXEC_PATH, workdir, graphfile, kval, options.min_relative_cov, options.verbose, options.output_context, exec_error_removal)
  print "Elapsed time: ", (time.time() - t), " seconds"
  printlg("Elapsed time: ", (time.time() - t), " seconds")


  # ------------------------------------------------------------------------
  #                             Enumerate Bubbles
  # ------------------------------------------------------------------------
  t = time.time()
  enumerate_all_bubbles(KS_SEC_EXEC_PATH, workdir, outdir, kval, options.bval, output_snps, min_edit_dist, max_cycles, \
                        UL_MAX, LL_MAX, LL_MIN, float(options.timeout), options.nbprocs, options.verbose, \
                        options.output_context, options.output_path, options.output_branch_count, options.experimental, options.max_memory)
  print "Elapsed time: ", (time.time() - t), " seconds"
  printlg("Elapsed time: ", (time.time() - t), " seconds")


  # ------------------------------------------------------------------------
  #            Check read coverage (optionnal) and sort bubbles
  # ------------------------------------------------------------------------
  t = time.time()
  nb = sort_all_bubbles(KS_SEC_EXEC_PATH, readfiles, workdir, outdir, kval, output_snps, infix_name, not only_graph, options.verbose)
  if not only_graph:
    nb = check_read_coverage_and_sort_all_bubbles(KS_SEC_EXEC_PATH, readfiles, workdir, outdir, kval, output_snps, infix_name, countsMethod, minOverlap, options.mism, options.verbose)

  print "Elapsed time: ", (time.time() - t), " seconds\n"
  printlg("Elapsed time: ", (time.time() - t), " seconds\n")

  if only_graph:
    print "\n\n \t\t ******** We are done, final results are in files "+outdir+"/results_"+infix_name+"_type_*.fa **********"
    printlg("\n\n \t\t ******** We are done, final results are in files "+outdir+"/results_"+infix_name+"_type_*.fa **********")
  else:
    print "\n\n \t\t ******** We are done, final coherent results are in files "+outdir+"/results_"+infix_name+"_coherents_type_*.fa ********** "
    printlg("\n\n \t\t ******** We are done, final coherent results are in files "+outdir+"/results_"+infix_name+"_coherents_type_*.fa ********** ")
    print " \t\t ******** All non read coherent results are in files "+outdir+"/results_"+infix_name+"_uncoherent.fa ****** \n\n"
    printlg(" \t\t ******** All non read coherent results are in files "+outdir+"/results_"+infix_name+"_uncoherent.fa ****** \n\n")


  # ------------------------------------------------------------------------
  #                           Manage BCCs
  # ------------------------------------------------------------------------
  if len(unfinished_bccs) != 0:
      print "\t\t ******** There were " + str(len(unfinished_bccs)) + " BCCs with unfinished enumeration ********"
      printlg("\t\t ******** There were " + str(len(unfinished_bccs)) + " BCCs with unfinished enumeration ********")
      if not options.keep_ubccs and not options.keep_all_bccs: 
          print "\t\t ******** Re-run with `-u` to retrieve the unfinished components ********\n"
          printlg("\t\t ******** Re-run with `-u` to retrieve the unfinished components ********\n")

  if options.keep_ubccs:
    bcc_dir = "/unfinished_bcc"  
    print "\t\t Backup files for the unfinished BCCs are in " + outdir + bcc_dir + "\n"
    printlg("\t\t Backup files for the unfinished BCCs are in " + outdir + bcc_dir + "\n")
    save_bccs_from_list(unfinished_bccs, bcc_dir, KS_SEC_EXEC_PATH, workdir, outdir, options.verbose)
          
  if options.keep_all_bccs:
    bcc_dir = "/bcc"  
    print "\t\t Edge and node files of all BCCs are in " + outdir + bcc_dir + "\n"  
    printlg("\t\t Edge and node files of all BCCs are in " + outdir + bcc_dir + "\n")
    all_bccs = find_bcc_ids_ordered_by_size(workdir)[1]
    save_bccs_from_list(all_bccs, bcc_dir, KS_SEC_EXEC_PATH, workdir, outdir, options.verbose)

  
  if options.output_path: # move paths file to outdir
    shutil.move(workdir+"/all_paths_k"+str(kval), outdir + "/all_paths_k"+str(kval))

          
          
  # ------------------------------------------------------------------------
  #                 Output number of events of each type
  # ------------------------------------------------------------------------
  print "\t\t TYPES:"
  printlg("\t\t TYPES:")
  if output_snps!=0:
    print "\t\t\t 0a: Single SNPs, Inexact Repeats or sequencing substitution errors, "+str(nb[0])+" found"
    printlg("\t\t\t 0a: Single SNPs, Inexact Repeats or sequencing substitution errors, "+str(nb[0])+" found")
    if output_snps==2:
            print "\t\t\t 0b: Multiple SNPs, Inexact Repeats or sequencing substitution errors, "+str(nb[1])+" found"
            printlg("\t\t\t 0b: Multiple SNPs, Inexact Repeats or sequencing substitution errors, "+str(nb[1])+" found")
    else:
            print "\t\t\t 0b: Run with -s 2 to also search for Multiple SNPs (warning: this option may increase a lot the running time. You might also want to try the experimental algorithm here)"
            printlg("\t\t\t 0b: Run with -s 2 to also search for Multiple SNPs (warning: this option may increase a lot the running time. You might also want to try the experimental algorithm here)")
  else:
    print "\t\t\t 0: Run with -s option set to 1 or 2 to also search for SNPs"
    printlg("\t\t\t 0: Run with -s option set to 1 or 2 to also search for SNPs")
  print "\t\t\t 1: Alternative Splicing Events, "+str(nb[2])+" found"
  printlg("\t\t\t 1: Alternative Splicing Events, "+str(nb[2])+" found")
  print "\t\t\t 2: Inexact Tandem Repeats, "+str(nb[3])+" found"
  printlg("\t\t\t 2: Inexact Tandem Repeats, "+str(nb[3])+" found")
  print "\t\t\t 3: Short Indels (<3nt), "+str(nb[4])+" found"
  printlg("\t\t\t 3: Short Indels (<3nt), "+str(nb[4])+" found")
  print "\t\t\t 4: All others, composed by a shorter path of length > 2k not being a SNP, "+str(nb[5])+" found"
  printlg("\t\t\t 4: All others, composed by a shorter path of length > 2k not being a SNP, "+str(nb[5])+" found")


  print "\n\n \t\t ******** A summary of the execution can be found in the log file: " + os.path.abspath(logFileName) + "**********"
  printlg("\n\n \t\t ******** A summary of the execution can be found in the log file: " + os.path.abspath(logFileName) + "**********")

  # ------------------------------------------------------------------------
  #                           Clean tmp directory
  # ------------------------------------------------------------------------
  logFile.close()
  cleanTmp(workdir)

if __name__ == '__main__':
    main()