/usr/lib/python3/dist-packages/snakemake/io.py is in snakemake 4.3.1-1.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 | __author__ = "Johannes Köster"
__copyright__ = "Copyright 2015, Johannes Köster"
__email__ = "koester@jimmy.harvard.edu"
__license__ = "MIT"
import collections
import os
import shutil
import re
import stat
import time
import datetime
import json
import copy
import functools
import subprocess as sp
from itertools import product, chain
from collections import Iterable, namedtuple
from snakemake.exceptions import MissingOutputException, WorkflowError, WildcardError, RemoteFileException
from snakemake.logging import logger
from inspect import isfunction, ismethod
from copy import deepcopy
from snakemake.common import DYNAMIC_FILL
def lstat(f):
return os.stat(f,
follow_symlinks=os.stat not in os.supports_follow_symlinks)
def lutime(f, times):
#In some cases, we have a platform where os.supports_follow_symlink includes stat()
#but not utime(). This leads to an anomaly. In any case we never want to touch the
#target of a link.
if os.utime in os.supports_follow_symlinks:
#...utime is well behaved
os.utime(f, times, follow_symlinks=False)
elif not os.path.islink(f):
#...symlinks not an issue here
os.utime(f, times)
else:
try:
# try the system command
if times:
fmt_time = lambda sec: datetime.fromtimestamp(sec).strftime("%Y%m%d%H%M.%S")
atime, mtime = times
sp.check_call(["touch", "-h", f, "-a", "-t", fmt_time(atime)])
sp.check_call(["touch", "-h", f, "-m", "-t", fmt_time(mtime)])
else:
sp.check_call(["touch", "-h", f])
except sp.CalledProcessError:
pass
#...problem system. Do nothing.
logger.warning("Unable to set utime on symlink {}. Your Python build does not support it.".format(f))
return None
def lchmod(f, mode):
os.chmod(f,
mode,
follow_symlinks=os.chmod not in os.supports_follow_symlinks)
class IOCache:
def __init__(self):
self.mtime = dict()
self.exists = dict()
self.size = dict()
self.active = True
def clear(self):
self.mtime.clear()
self.exists.clear()
self.size.clear()
def deactivate(self):
self.clear()
self.active = False
def IOFile(file, rule=None):
assert rule is not None
f = _IOFile(file)
f.rule = rule
return f
class _IOFile(str):
"""
A file that is either input or output of a rule.
"""
__slots__ = ["_is_function", "_file", "rule", "_regex"]
def __new__(cls, file):
obj = str.__new__(cls, file)
obj._is_function = isfunction(file) or ismethod(file)
obj._is_function = obj._is_function or (
isinstance(file, AnnotatedString) and bool(file.callable))
obj._file = file
obj.rule = None
obj._regex = None
if obj.is_remote:
obj.remote_object._iofile = obj
return obj
def iocache(func):
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
if self.rule.workflow.iocache.active:
cache = getattr(self.rule.workflow.iocache, func.__name__)
if self in cache:
return cache[self]
v = func(self, *args, **kwargs)
cache[self] = v
return v
else:
return func(self, *args, **kwargs)
return wrapper
def _refer_to_remote(func):
"""
A decorator so that if the file is remote and has a version
of the same file-related function, call that version instead.
"""
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
if self.is_remote:
if hasattr(self.remote_object, func.__name__):
return getattr(self.remote_object, func.__name__)(*args, **
kwargs)
return func(self, *args, **kwargs)
return wrapper
@property
def is_remote(self):
return is_flagged(self._file, "remote_object")
@property
def is_ancient(self):
return is_flagged(self._file, "ancient")
def update_remote_filepath(self):
# if the file string is different in the iofile, update the remote object
# (as in the case of wildcard expansion)
remote_object = get_flag_value(self._file, "remote_object")
if remote_object._file != self._file:
remote_object._iofile = self
@property
def should_keep_local(self):
return get_flag_value(self._file, "remote_object").keep_local
@property
def should_stay_on_remote(self):
return get_flag_value(self._file, "remote_object").stay_on_remote
@property
def remote_object(self):
return get_flag_value(self._file, "remote_object")
@property
@_refer_to_remote
def file(self):
if not self._is_function:
return self._file
else:
raise ValueError("This IOFile is specified as a function and "
"may not be used directly.")
def check(self):
hint = (
"It can also lead to inconsistent results of the file-matching "
"approach used by Snakemake."
)
if self._file.startswith("./"):
logger.warning("Relative file path '{}' starts with './'. This is redundant "
"and strongly discouraged. {} You can simply omit the './' "
"for relative file paths.".format(self._file, hint))
if self._file.startswith(" "):
logger.warning("File path '{}' starts with whitespace. "
"This is likely unintended. {}".format(self._file, hint))
if self._file.endswith(" "):
logger.warning("File path '{}' ends with whitespace. "
"This is likely unintended. {}".format(self._file, hint))
if "\n" in self._file:
logger.warning("File path '{}' contains line break. "
"This is likely unintended. {}".format(self._file, hint))
if _double_slash_regex.search(self._file) is not None:
logger.warning("File path {} contains double '{}'. "
"This is likely unintended. {}".format(
self._file, os.path.sep, hint))
@property
@iocache
@_refer_to_remote
def exists(self):
return self.exists_local
@property
def exists_local(self):
return os.path.exists(self.file)
@property
def exists_remote(self):
return (self.is_remote and self.remote_object.exists())
@property
def protected(self):
return self.exists_local and not os.access(self.file, os.W_OK)
@property
@iocache
@_refer_to_remote
def mtime(self):
return self.mtime_local
@property
def mtime_local(self):
# do not follow symlinks for modification time
return lstat(self.file).st_mtime
@property
def flags(self):
return getattr(self._file, "flags", {})
@property
@iocache
@_refer_to_remote
def size(self):
return self.size_local
@property
def size_local(self):
# follow symlinks but throw error if invalid
self.check_broken_symlink()
return os.path.getsize(self.file)
def check_broken_symlink(self):
""" Raise WorkflowError if file is a broken symlink. """
if not self.exists_local and lstat(self.file):
raise WorkflowError("File {} seems to be a broken symlink.".format(
self.file))
@_refer_to_remote
def is_newer(self, time):
""" Returns true of the file is newer than time, or if it is
a symlink that points to a file newer than time. """
if self.is_ancient:
return False
elif self.is_remote:
#If file is remote but provider does not override the implementation this
#is the best we can do.
return self.mtime > time
else:
return os.stat(self, follow_symlinks=True).st_mtime > time or self.mtime > time
def download_from_remote(self):
if self.is_remote and self.remote_object.exists():
if not self.should_stay_on_remote:
logger.info("Downloading from remote: {}".format(self.file))
self.remote_object.download()
logger.info("Finished download.")
else:
raise RemoteFileException(
"The file to be downloaded does not seem to exist remotely.")
def upload_to_remote(self):
if self.is_remote:
logger.info("Uploading to remote: {}".format(self.file))
self.remote_object.upload()
logger.info("Finished upload.")
def prepare(self):
path_until_wildcard = re.split(DYNAMIC_FILL, self.file)[0]
dir = os.path.dirname(path_until_wildcard)
if len(dir) > 0 and not os.path.exists(dir):
try:
os.makedirs(dir)
except OSError as e:
# ignore Errno 17 "File exists" (reason: multiprocessing)
if e.errno != 17:
raise e
def protect(self):
mode = (lstat(self.file).st_mode & ~stat.S_IWUSR & ~stat.S_IWGRP
& ~stat.S_IWOTH)
if os.path.isdir(self.file):
for root, dirs, files in os.walk(self.file):
for d in dirs:
lchmod(os.path.join(self.file, d), mode)
for f in files:
lchmod(os.path.join(self.file, f), mode)
else:
lchmod(self.file, mode)
def remove(self, remove_non_empty_dir=False):
remove(self, remove_non_empty_dir=remove_non_empty_dir)
def touch(self, times=None):
""" times must be 2-tuple: (atime, mtime) """
try:
lutime(self.file, times)
except OSError as e:
if e.errno == 2:
raise MissingOutputException(
"Output file {} of rule {} shall be touched but "
"does not exist.".format(self.file, self.rule.name),
lineno=self.rule.lineno,
snakefile=self.rule.snakefile)
else:
raise e
def touch_or_create(self):
try:
self.touch()
except MissingOutputException:
# create empty file
with open(self.file, "w") as f:
pass
def apply_wildcards(self,
wildcards,
fill_missing=False,
fail_dynamic=False):
f = self._file
if self._is_function:
f = self._file(Namedlist(fromdict=wildcards))
# this bit ensures flags are transferred over to files after
# wildcards are applied
file_with_wildcards_applied = IOFile(
apply_wildcards(f,
wildcards,
fill_missing=fill_missing,
fail_dynamic=fail_dynamic,
dynamic_fill=DYNAMIC_FILL),
rule=self.rule)
file_with_wildcards_applied.clone_flags(self)
return file_with_wildcards_applied
def get_wildcard_names(self):
return get_wildcard_names(self.file)
def contains_wildcard(self):
return contains_wildcard(self.file)
def regex(self):
if self._regex is None:
# compile a regular expression
self._regex = re.compile(regex(self.file))
return self._regex
def constant_prefix(self):
first_wildcard = _wildcard_regex.search(self.file)
if first_wildcard:
return self.file[:first_wildcard.start()]
return self.file
def match(self, target):
return self.regex().match(target) or None
def format_dynamic(self):
return self.replace(DYNAMIC_FILL, "{*}")
def clone_flags(self, other):
if isinstance(self._file, str):
self._file = AnnotatedString(self._file)
if isinstance(other._file, AnnotatedString):
self._file.flags = getattr(other._file, "flags", {}).copy()
if "remote_object" in self._file.flags:
self._file.flags['remote_object'] = copy.copy(
self._file.flags['remote_object'])
self.update_remote_filepath()
def set_flags(self, flags):
if isinstance(self._file, str):
self._file = AnnotatedString(self._file)
self._file.flags = flags
def __eq__(self, other):
f = other._file if isinstance(other, _IOFile) else other
return self._file == f
def __hash__(self):
return self._file.__hash__()
_double_slash_regex = (re.compile(r"([^:]//|^//)")
if os.path.sep == "/"
else re.compile(r"\\\\"))
_wildcard_regex = re.compile(
r"""
\{
(?=( # This lookahead assertion emulates an 'atomic group'
# which is required for performance
\s*(?P<name>\w+) # wildcard name
(\s*,\s*
(?P<constraint> # an optional constraint
([^{}]+ | \{\d+(,\d+)?\})* # allow curly braces to nest one level
) # ... as in '{w,a{3,5}}'
)?\s*
))\1
\}
""", re.VERBOSE)
def wait_for_files(files, latency_wait=3, force_stay_on_remote=False):
"""Wait for given files to be present in filesystem."""
files = list(files)
def get_missing():
return [
f for f in files
if not (f.exists_remote
if (isinstance(f, _IOFile) and
f.is_remote and
(force_stay_on_remote or f.should_stay_on_remote))
else os.path.exists(f))]
missing = get_missing()
if missing:
logger.info("Waiting at most {} seconds for missing files.".format(
latency_wait))
for _ in range(latency_wait):
if not get_missing():
return
time.sleep(1)
raise IOError("Missing files after {} seconds:\n{}".format(
latency_wait, "\n".join(get_missing())))
def get_wildcard_names(pattern):
return set(match.group('name')
for match in _wildcard_regex.finditer(pattern))
def contains_wildcard(path):
return _wildcard_regex.search(path) is not None
def contains_wildcard_constraints(pattern):
return any(match.group('constraint') for match in _wildcard_regex.finditer(pattern))
def remove(file, remove_non_empty_dir=False):
if file.is_remote and file.should_stay_on_remote:
if file.exists_remote:
file.remote_object.remove()
elif os.path.isdir(file) and not os.path.islink(file):
if remove_non_empty_dir:
shutil.rmtree(file)
else:
try:
os.removedirs(file)
except OSError as e:
# skip non empty directories
if e.errno == 39:
logger.info("Skipped removing non-empty directory {}".format(e.filename))
else:
logger.warning(str(e))
#Remember that dangling symlinks fail the os.path.exists() test, but
#we definitely still want to zap them. try/except is the safest way.
else:
try:
os.remove(file)
except FileNotFoundError:
pass
def regex(filepattern):
f = []
last = 0
wildcards = set()
for match in _wildcard_regex.finditer(filepattern):
f.append(re.escape(filepattern[last:match.start()]))
wildcard = match.group("name")
if wildcard in wildcards:
if match.group("constraint"):
raise ValueError(
"Constraint regex must be defined only in the first "
"occurence of the wildcard in a string.")
f.append("(?P={})".format(wildcard))
else:
wildcards.add(wildcard)
f.append("(?P<{}>{})".format(wildcard, match.group("constraint") if
match.group("constraint") else ".+"))
last = match.end()
f.append(re.escape(filepattern[last:]))
f.append("$") # ensure that the match spans the whole file
return "".join(f)
def apply_wildcards(pattern,
wildcards,
fill_missing=False,
fail_dynamic=False,
dynamic_fill=None,
keep_dynamic=False):
def format_match(match):
name = match.group("name")
try:
value = wildcards[name]
if fail_dynamic and value == dynamic_fill:
raise WildcardError(name)
return str(value) # convert anything into a str
except KeyError as ex:
if keep_dynamic:
return "{{{}}}".format(name)
elif fill_missing:
return dynamic_fill
else:
raise WildcardError(str(ex))
return re.sub(_wildcard_regex, format_match, pattern)
def not_iterable(value):
return isinstance(value, str) or isinstance(value, dict) or not isinstance(
value, Iterable)
def is_callable(value):
return (callable(value) or
(isinstance(value, _IOFile) and value._is_function))
class AnnotatedString(str):
def __init__(self, value):
self.flags = dict()
self.callable = value if is_callable(value) else None
def flag(value, flag_type, flag_value=True):
if isinstance(value, AnnotatedString):
value.flags[flag_type] = flag_value
return value
if not_iterable(value):
value = AnnotatedString(value)
value.flags[flag_type] = flag_value
return value
return [flag(v, flag_type, flag_value=flag_value) for v in value]
def is_flagged(value, flag):
if isinstance(value, AnnotatedString):
return flag in value.flags and value.flags[flag]
if isinstance(value, _IOFile):
return flag in value.flags and value.flags[flag]
return False
def get_flag_value(value, flag_type):
if isinstance(value, AnnotatedString):
if flag_type in value.flags:
return value.flags[flag_type]
else:
return None
def ancient(value):
"""
A flag for an input file that shall be considered ancient; i.e. its timestamp shall have no effect on which jobs to run.
"""
if is_flagged(value, "remote"):
raise SyntaxError(
"Ancient and remote flags are mutually exclusive.")
return flag(value, "ancient")
def temp(value):
"""
A flag for an input or output file that shall be removed after usage.
"""
if is_flagged(value, "protected"):
raise SyntaxError(
"Protected and temporary flags are mutually exclusive.")
if is_flagged(value, "remote"):
raise SyntaxError("Remote and temporary flags are mutually exclusive.")
return flag(value, "temp")
def temporary(value):
""" An alias for temp. """
return temp(value)
def protected(value):
""" A flag for a file that shall be write protected after creation. """
if is_flagged(value, "temp"):
raise SyntaxError(
"Protected and temporary flags are mutually exclusive.")
if is_flagged(value, "remote"):
raise SyntaxError("Remote and protected flags are mutually exclusive.")
return flag(value, "protected")
def dynamic(value):
"""
A flag for a file that shall be dynamic, i.e. the multiplicity
(and wildcard values) will be expanded after a certain
rule has been run """
annotated = flag(value, "dynamic", True)
tocheck = [annotated] if not_iterable(annotated) else annotated
for file in tocheck:
matches = list(_wildcard_regex.finditer(file))
#if len(matches) != 1:
# raise SyntaxError("Dynamic files need exactly one wildcard.")
for match in matches:
if match.group("constraint"):
raise SyntaxError(
"The wildcards in dynamic files cannot be constrained.")
return annotated
def touch(value):
return flag(value, "touch")
def unpack(value):
return flag(value, "unpack")
def local(value):
"""Mark a file as local file. This disables application of a default remote
provider.
"""
if is_flagged(value, "remote"):
raise SyntaxError("Remote and local flags are mutually exclusive.")
return flag(value, "local")
def expand(*args, **wildcards):
"""
Expand wildcards in given filepatterns.
Arguments
*args -- first arg: filepatterns as list or one single filepattern,
second arg (optional): a function to combine wildcard values
(itertools.product per default)
**wildcards -- the wildcards as keyword arguments
with their values as lists
"""
filepatterns = args[0]
if len(args) == 1:
combinator = product
elif len(args) == 2:
combinator = args[1]
if isinstance(filepatterns, str):
filepatterns = [filepatterns]
def flatten(wildcards):
for wildcard, values in wildcards.items():
if isinstance(values, str) or not isinstance(values, Iterable):
values = [values]
yield [(wildcard, value) for value in values]
try:
return [filepattern.format(**comb)
for comb in map(dict, combinator(*flatten(wildcards)))
for filepattern in filepatterns]
except KeyError as e:
raise WildcardError("No values given for wildcard {}.".format(e))
def limit(pattern, **wildcards):
"""
Limit wildcards to the given values.
Arguments:
**wildcards -- the wildcards as keyword arguments
with their values as lists
"""
return pattern.format(**{
wildcard: "{{{},{}}}".format(wildcard, "|".join(values))
for wildcard, values in wildcards.items()
})
def glob_wildcards(pattern, files=None):
"""
Glob the values of the wildcards by matching the given pattern to the filesystem.
Returns a named tuple with a list of values for each wildcard.
"""
pattern = os.path.normpath(pattern)
first_wildcard = re.search("{[^{]", pattern)
dirname = os.path.dirname(pattern[:first_wildcard.start(
)]) if first_wildcard else os.path.dirname(pattern)
if not dirname:
dirname = "."
names = [match.group('name')
for match in _wildcard_regex.finditer(pattern)]
Wildcards = namedtuple("Wildcards", names)
wildcards = Wildcards(*[list() for name in names])
pattern = re.compile(regex(pattern))
if files is None:
files = (os.path.normpath(os.path.join(dirpath, f))
for dirpath, dirnames, filenames in os.walk(dirname)
for f in chain(filenames, dirnames))
for f in files:
match = re.match(pattern, f)
if match:
for name, value in match.groupdict().items():
getattr(wildcards, name).append(value)
return wildcards
def update_wildcard_constraints(pattern,
wildcard_constraints,
global_wildcard_constraints):
"""Update wildcard constraints
Args:
pattern (str): pattern on which to update constraints
wildcard_constraints (dict): dictionary of wildcard:constraint key-value pairs
global_wildcard_constraints (dict): dictionary of wildcard:constraint key-value pairs
"""
def replace_constraint(match):
name = match.group("name")
constraint = match.group("constraint")
newconstraint = wildcard_constraints.get(name, global_wildcard_constraints.get(name))
if name in examined_names:
return match.group(0)
examined_names.add(name)
# Don't override if constraint already set
if constraint is not None:
if name in wildcard_constraints:
raise ValueError("Wildcard {} is constrained by both the rule and the file pattern. Consider removing one of the constraints.")
return match.group(0)
# Only update if a new constraint has actually been set
elif newconstraint is not None:
return "{{{},{}}}".format(name, newconstraint)
else:
return match.group(0)
examined_names = set()
updated = re.sub(_wildcard_regex, replace_constraint, pattern)
# inherit flags
if isinstance(pattern, AnnotatedString):
updated = AnnotatedString(updated)
updated.flags = deepcopy(pattern.flags)
return updated
# TODO rewrite Namedlist!
class Namedlist(list):
"""
A list that additionally provides functions to name items. Further,
it is hashable, however the hash does not consider the item names.
"""
def __init__(self, toclone=None, fromdict=None, plainstr=False):
"""
Create the object.
Arguments
toclone -- another Namedlist that shall be cloned
fromdict -- a dict that shall be converted to a
Namedlist (keys become names)
"""
list.__init__(self)
self._names = dict()
if toclone:
self.extend(map(str, toclone) if plainstr else toclone)
if isinstance(toclone, Namedlist):
self.take_names(toclone.get_names())
if fromdict:
for key, item in fromdict.items():
self.append(item)
self.add_name(key)
def add_name(self, name):
"""
Add a name to the last item.
Arguments
name -- a name
"""
self.set_name(name, len(self) - 1)
def set_name(self, name, index, end=None):
"""
Set the name of an item.
Arguments
name -- a name
index -- the item index
"""
self._names[name] = (index, end)
if end is None:
setattr(self, name, self[index])
else:
setattr(self, name, Namedlist(toclone=self[index:end]))
def get_names(self):
"""
Get the defined names as (name, index) pairs.
"""
for name, index in self._names.items():
yield name, index
def take_names(self, names):
"""
Take over the given names.
Arguments
names -- the given names as (name, index) pairs
"""
for name, (i, j) in names:
self.set_name(name, i, end=j)
def items(self):
for name in self._names:
yield name, getattr(self, name)
def allitems(self):
next = 0
for name, index in sorted(self._names.items(),
key=lambda item: (item[1][0], item[1][0] + 1 if item[1][1] is None else item[1][1])):
start, end = index
if end is None:
end = start + 1
if start > next:
for item in self[next:start]:
yield None, item
yield name, getattr(self, name)
next = end
for item in self[next:]:
yield None, item
def insert_items(self, index, items):
self[index:index + 1] = items
add = len(items) - 1
for name, (i, j) in self._names.items():
if i > index:
self._names[name] = (i + add, None if j is None else j + add)
elif i == index:
self.set_name(name, i, end=i + len(items))
def keys(self):
return self._names
def plainstrings(self):
return self.__class__.__call__(toclone=self, plainstr=True)
def get(self, key, default_value=None):
return self.__dict__.get(key, default_value)
def __getitem__(self, key):
try:
return super().__getitem__(key)
except TypeError:
pass
return getattr(self, key)
def __hash__(self):
return hash(tuple(self))
def __str__(self):
return " ".join(map(str, self))
class InputFiles(Namedlist):
pass
class OutputFiles(Namedlist):
pass
class Wildcards(Namedlist):
pass
class Params(Namedlist):
pass
class Resources(Namedlist):
pass
class Log(Namedlist):
pass
def _load_configfile(configpath):
"Tries to load a configfile first as JSON, then as YAML, into a dict."
try:
with open(configpath) as f:
try:
return json.load(f, object_pairs_hook=collections.OrderedDict)
except ValueError:
f.seek(0) # try again
try:
import yaml
except ImportError:
raise WorkflowError("Config file is not valid JSON and PyYAML "
"has not been installed. Please install "
"PyYAML to use YAML config files.")
try:
# From http://stackoverflow.com/a/21912744/84349
class OrderedLoader(yaml.Loader):
pass
def construct_mapping(loader, node):
loader.flatten_mapping(node)
return collections.OrderedDict(
loader.construct_pairs(node))
OrderedLoader.add_constructor(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
construct_mapping)
return yaml.load(f, OrderedLoader)
except yaml.YAMLError:
raise WorkflowError("Config file is not valid JSON or YAML. "
"In case of YAML, make sure to not mix "
"whitespace and tab indentation.")
except FileNotFoundError:
raise WorkflowError("Config file {} not found.".format(configpath))
def load_configfile(configpath):
"Loads a JSON or YAML configfile as a dict, then checks that it's a dict."
config = _load_configfile(configpath)
if not isinstance(config, dict):
raise WorkflowError("Config file must be given as JSON or YAML "
"with keys at top level.")
return config
##### Wildcard pumping detection #####
class PeriodicityDetector:
def __init__(self, min_repeat=20, max_repeat=100):
"""
Args:
max_repeat (int): The maximum length of the periodic substring.
min_repeat (int): The minimum length of the periodic substring.
"""
self.regex = re.compile(
"((?P<value>.+)(?P=value){{{min_repeat},{max_repeat}}})$".format(
min_repeat=min_repeat - 1,
max_repeat=max_repeat - 1))
def is_periodic(self, value):
"""Returns the periodic substring or None if not periodic."""
m = self.regex.search(value) # search for a periodic suffix.
if m is not None:
return m.group("value")
|