/usr/share/pyshared/neo/io/baseio.py is in python-neo 0.3.3-1.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 | # -*- coding: utf-8 -*-
"""
baseio
======
Classes
-------
BaseIO - abstract class which should be overridden, managing how a
file will load/write its data
If you want a model for developing a new IO start from exampleIO.
"""
import collections
from neo.core import (AnalogSignal, AnalogSignalArray, Block,
Epoch, EpochArray, Event, EventArray,
IrregularlySampledSignal,
RecordingChannel, RecordingChannelGroup,
Segment, Spike, SpikeTrain, Unit)
from neo.io.tools import create_many_to_one_relationship
read_error = "This type is not supported by this file format for reading"
write_error = "This type is not supported by this file format for writing"
class BaseIO(object):
"""
Generic class to handle all the file read/write methods for the key objects
of the core class. This template is file-reading/writing oriented but it
can also handle data read from/written to a database such as TDT sytem
tanks or SQLite files.
This is an abstract class that will be subclassed for each format
The key methods of the class are:
- ``read()`` - Read the whole object structure, return a list of Block
objects
- ``read_block(lazy=True, cascade=True, **params)`` - Read Block object
from file with some parameters
- ``read_segment(lazy=True, cascade=True, **params)`` - Read Segment
object from file with some parameters
- ``read_spiketrainlist(lazy=True, cascade=True, **params)`` - Read
SpikeTrainList object from file with some parameters
- ``write()`` - Write the whole object structure
- ``write_block(**params)`` - Write Block object to file with some
parameters
- ``write_segment(**params)`` - Write Segment object to file with
some parameters
- ``write_spiketrainlist(**params)`` - Write SpikeTrainList object to
file with some parameters
The class can also implement these methods:
- ``read_XXX(lazy=True, cascade=True, **params)``
- ``write_XXX(**params)``
where XXX could be one of the objects supported by the IO
Each class is able to declare what can be accessed or written directly
discribed by **readable_objects** and **readable_objects**.
The object types can be one of the classes defined in neo.core
(Block, Segment, AnalogSignal, ...)
Each class do not necessary support all the whole neo hierarchy but part
of it.
This is discribe with **supported_objects**.
All IOs must support at least Block with a read_block()
** start a new IO **
If you want to implement your own file format, you should create a class
that will inherit from this BaseFile class and implement the previous
methods.
See ExampleIO in exampleio.py
"""
is_readable = False
is_writable = False
supported_objects = []
readable_objects = []
writeable_objects = []
has_header = False
is_streameable = False
read_params = {}
write_params = {}
name = 'BaseIO'
description = 'This IO does not read or write anything'
extentions = []
mode = 'file' # or 'fake' or 'dir' or 'database'
def __init__(self, filename=None, **kargs):
self.filename = filename
######## General read/write methods #######################
def read(self, lazy=False, cascade=True, **kargs):
if Block in self.readable_objects:
if (hasattr(self, 'read_all_blocks') and
callable(getattr(self, 'read_all_blocks'))):
return self.read_all_blocks(lazy=lazy, cascade=cascade,
**kargs)
return [self.read_block(lazy=lazy, cascade=cascade, **kargs)]
elif Segment in self.readable_objects:
bl = Block(name='One segment only')
if not cascade:
return bl
seg = self.read_segment(lazy=lazy, cascade=cascade, **kargs)
bl.segments.append(seg)
create_many_to_one_relationship(bl)
return [bl]
else:
raise NotImplementedError
def write(self, bl, **kargs):
if Block in self.writeable_objects:
if isinstance(bl, collections.Sequence):
assert hasattr(self, 'write_all_blocks'), \
'%s does not offer to store a sequence of blocks' % \
self.__class__.__name__
self.write_all_blocks(bl, **kargs)
else:
self.write_block(bl, **kargs)
elif Segment in self.writeable_objects:
assert len(bl.segments) == 1, \
'%s is based on segment so if you try to write a block it ' + \
'must contain only one Segment' % self.__class__.__name__
self.write_segment(bl.segments[0], **kargs)
else:
raise NotImplementedError
######## All individual read methods #######################
def read_block(self, **kargs):
assert(Block in self.readable_objects), read_error
def read_segment(self, **kargs):
assert(Segment in self.readable_objects), read_error
def read_unit(self, **kargs):
assert(Unit in self.readable_objects), read_error
def read_spiketrain(self, **kargs):
assert(SpikeTrain in self.readable_objects), read_error
def read_spike(self, **kargs):
assert(Spike in self.readable_objects), read_error
def read_analogsignal(self, **kargs):
assert(AnalogSignal in self.readable_objects), read_error
def read_irregularlysampledsignal(self, **kargs):
assert(IrregularlySampledSignal in self.readable_objects), read_error
def read_analogsignalarray(self, **kargs):
assert(AnalogSignalArray in self.readable_objects), read_error
def read_recordingchannelgroup(self, **kargs):
assert(RecordingChannelGroup in self.readable_objects), read_error
def read_recordingchannel(self, **kargs):
assert(RecordingChannel in self.readable_objects), read_error
def read_event(self, **kargs):
assert(Event in self.readable_objects), read_error
def read_eventarray(self, **kargs):
assert(EventArray in self.readable_objects), read_error
def read_epoch(self, **kargs):
assert(Epoch in self.readable_objects), read_error
def read_epocharray(self, **kargs):
assert(EpochArray in self.readable_objects), read_error
######## All individual write methods #######################
def write_block(self, bl, **kargs):
assert(Block in self.writeable_objects), write_error
def write_segment(self, seg, **kargs):
assert(Segment in self.writeable_objects), write_error
def write_unit(self, ut, **kargs):
assert(Unit in self.writeable_objects), write_error
def write_spiketrain(self, sptr, **kargs):
assert(SpikeTrain in self.writeable_objects), write_error
def write_spike(self, sp, **kargs):
assert(Spike in self.writeable_objects), write_error
def write_analogsignal(self, anasig, **kargs):
assert(AnalogSignal in self.writeable_objects), write_error
def write_irregularlysampledsignal(self, irsig, **kargs):
assert(IrregularlySampledSignal in self.writeable_objects), write_error
def write_analogsignalarray(self, anasigar, **kargs):
assert(AnalogSignalArray in self.writeable_objects), write_error
def write_recordingchannelgroup(self, rcg, **kargs):
assert(RecordingChannelGroup in self.writeable_objects), write_error
def write_recordingchannel(self, rc, **kargs):
assert(RecordingChannel in self.writeable_objects), write_error
def write_event(self, ev, **kargs):
assert(Event in self.writeable_objects), write_error
def write_eventarray(self, ea, **kargs):
assert(EventArray in self.writeable_objects), write_error
def write_epoch(self, ep, **kargs):
assert(Epoch in self.writeable_objects), write_error
def write_epocharray(self, epa, **kargs):
assert(EpochArray in self.writeable_objects), write_error
|