source: SHX/trunk/SeismicHandler/patches/obspy_.py @ 894

Revision 894, 14.1 KB checked in by marcus, 7 years ago (diff)
  • working on trace attributes
  • Property svn:eol-style set to native
Line 
1# -*- coding: utf-8 -*-
2
3#    This file is part of Seismic Handler eXtended (SHX). For terms of use and
4#    license information please see license.txt and visit
5#    http://www.seismic-handler.org/portal/wiki/Shx/LicenseTerms
6
7"""
8This modules defines extensions to obspy. They are patched into the
9original obspy modules at a very basic level in SeismicHandler/__init__.py
10
11This module is named obspy_.py to prevent import loops.
12"""
13
14import numpy as np
15import math
16from obspy.core import Stream, Trace
17from obspy.core.util import AttribDict
18from obspy.sh.core import fromUTCDateTime, toUTCDateTime
19from SeismicHandler.basics.messages import log_message, get_runtime
20from SeismicHandler.basics.error import ShxError
21from SeismicHandler.core import Traces, Hidden
22from SeismicHandler.utils import nearest_int
23
24__all__ = []
25
26
27def _stream_auto_merge(self, *args, **kwargs):
28    """
29    Call obspy's Stream merge method to fill up gaps using the latest pre-gap
30    sample value. This destroys the spectra of the data but it's common.
31    """
32
33    # Change value data type to float32 as in former versions of SH.
34    # This will be changed in later versions.
35    for t in self.traces:
36        if t.data.dtype != np.dtype('float32'):
37            # makes only a copy if needed (astype always copies data)
38            t.data = np.require(t.data, dtype='float32')
39
40    # disabling merging because of unwanted side effects
41    return
42
43    if len(self.traces) > 1:
44        try:
45            self.merge(method=1, fill_value='latest')
46        except Exception, E:
47            log_message("debug.auto_merge", E)
48
49def stream_auto_merge_init(self, *args, **kwargs):
50    self.__shx_init__(*args, **kwargs)
51    _stream_auto_merge(self)
52
53def trace_prepare_image_data(self, width, height, timewindow=None, zoom=1,
54                                           norm="window", norm_amplitude=None):
55    """
56    Preparing trace data for fast plotting. Using numpy's minmax feature - this
57    will not just downsample the data!
58
59    width:         total width of plotting window in pixels
60    height:        hight of plotting window in pixels
61    zoom:          global zoom factor (y extension)
62    timewindow:    current global time window (tuple of min and max)
63    norm:          norm method: "window", "total" or float value
64                   (effects y extension)
65    max_amplitude: maximum global amplitude. If set this values is used for
66                   normation
67    """
68    log_message('debug.patches', "Preparing plotting data...", self.id)
69
70    if not timewindow:
71        # use trace's total dimensions
72        windowstart, windowend = (self.get_info("relstart"),
73                                                       self.get_info("relend"))
74    else:
75        windowstart, windowend = timewindow
76
77    # get slice for time window
78    pt = self.slice_relative(windowstart, windowend)
79
80    duration_total = windowend - windowstart
81    duration_trace = pt.stats.endtime - pt.stats.starttime
82
83    # width of the total screen that is covered by trace data (pixels)
84    pixel_width = duration_trace * width / duration_total
85
86    # remember values for caching
87    CacheTraceID = \
88             (windowstart, windowend, "%.5f" % pixel_width, height, zoom, norm)
89    try:
90        cachedX = CacheTraceID[:3] == self.shx.CacheTraceID[:3]
91        cachedY = cachedX and CacheTraceID[3:] == self.shx.CacheTraceID[3:]
92    except:
93        cachedX = False
94        cachedY = False
95
96    if not cachedX:
97        # save new ID
98        self.shx.CacheTraceID = CacheTraceID
99
100        # data points per pixel
101        npts = pt.stats.npts
102        dpp = int(npts / pixel_width)
103
104        # get copy of data
105        self.shx.PlotData = pt.data.copy()
106    else:
107        log_message('debug.patches.devel',
108                                    "using cachedX data for", CacheTraceID[:3])
109
110    # use minmax approach if more than 4 data points per pixel
111    # self.shx_plotdata will be changed
112    if not cachedX and dpp >= 4:
113        # do integer division since array shape values cannot be floats
114        dimx = npts // dpp
115        covered = dpp * dimx
116
117        # get data for reshape operation
118        data = self.shx.PlotData[:covered]
119
120        # extra treatment for remaining values
121        remaining = self.shx.PlotData[covered:]
122
123        # reshape data and get min/max values
124        data.shape = dimx, dpp
125        _min = data.min(axis=1)
126        _max = data.max(axis=1)
127        # combine data
128        joined = np.append([_min], [_max], axis=0).T.flatten()
129        # handle remaining
130        if len(remaining):
131            joined = np.append(joined, [remaining.min(), remaining.max()])
132
133        log_message(
134            'debug.patches.devel',
135             "pixel_width", pixel_width, "minmax", "npts", npts, "dpp",
136             dpp, "dimx", dimx, "covered", covered, "len_rest",
137             len(remaining), "data_len", len(data), "joined", len(joined),
138             "width", width
139        )
140
141        # At this stage the x-transformed data can be cached! If pixel_width
142        # doesn't change, the data can be reused to save time.
143        self.shx.PlotData = joined
144
145    log_message('debug.patches.devel',
146                                  width, height, norm, type(self.shx.PlotData))
147
148    if cachedY:
149        log_message('debug.patches.devel',
150                                    "using cachedY data for", CacheTraceID[3:])
151        return
152
153    # renew cache id
154    self.shx.CacheTraceID = CacheTraceID
155
156    # get basis for normation
157    if hasattr(norm, "lower"):
158        norm = norm.upper()[0]
159        if norm == "W":
160            norm = abs(pt.max())
161        elif norm == "T":
162            norm = self.max()
163        else:
164            raise ValueError("Invalid input for normation!")
165
166    # Calculate y extension, leads to raw "image" data.
167    # Use a copy to save time if only y changes occur (e.g. zooming).
168    # Apply normation, calibration and total height factor
169    y = self.shx.PlotData.copy()
170    y *= self.stats.calib / norm * height / 2 * zoom
171
172#    print self.shxPlotData.min(), self.shxPlotData.max(), y.min(), y.max()
173
174    # factor for x stretching (trade-off between pixel_width and array shape)
175    stretch = pixel_width / len(y)
176    width = int(math.ceil(pixel_width))
177
178    # calculate line data
179    # do not rely on framework axis' management -> shift/flip y-axis
180    offset = height // 2
181    lines = []
182    oldx, oldy = 0, -y[0] + offset
183
184    for i in xrange(1, len(y)):
185        newx = i * stretch
186        newy = -y[i] + offset
187        lines.append([oldx, oldy, newx, newy])
188        oldx, oldy = newx, newy
189
190    self.shx.ImageData = lines
191    self.shx.PlotPixels = pixel_width
192
193def trace_init(self, *args, **kwargs):
194    self.__shx_init__(*args, **kwargs)
195
196    # container for SHX releated data
197    if getattr(self, "shx", None) is None:
198        self.shx = AttribDict()
199    # additional headers
200    if not "sh" in self.stats:
201        self.stats.sh = AttribDict()
202    else:
203        # change present keys to upper case
204        for k in self.stats.sh:
205            v = self.stats.sh.pop(k)
206            self.stats.sh[k.upper()] = v
207
208    self.establish()
209
210def trace_invalidate_cache(self):
211    """
212    Mark the cached pixel data as dirty. Needed if data is altered inplace.
213    """
214    msg = "Pixel cache marked dirty for trace %u" % self.index()
215    log_message("debug.patches", msg)
216    self.shx.CacheTraceID = []
217
218
219def trace_get_info(self, name, default=None):
220    """
221    Mapping of Seismic Handler trace information entries to ObsPy status info.
222    """
223    name = name.upper()
224
225    # identical name, obspy always lower case
226    try:
227        value = self.stats[name.lower()]
228        if isinstance(value, bool):
229            value = value and "Y" or "N"
230        return value
231    except KeyError:
232        pass
233
234    # mapping
235    if name in ["LENGTH", "DSPCNT", "ALLOC"]:
236        return self.stats.npts
237
238    if name == "CHAN1":
239        return self.stats.channel[0]
240
241    if name == "CHAN2":
242        return self.stats.channel[1]
243
244    if name == "COMP":
245        return self.stats.channel[2]
246
247    if name == "START":
248        return fromUTCDateTime(self.stats.starttime)
249
250    if name == "DATE":
251        return fromUTCDateTime(self.stats.starttime).split("_")[0]
252
253    if name == "TIME":
254        return fromUTCDateTime(self.stats.starttime).split("_")[1]
255
256    if name == "MAXVAL":
257        return self.data.max()
258
259    if name == "MINVAL":
260        return self.data.min()
261
262    # relative positions regarding T-ORIGIN
263    if name == "RELSTART":
264        return self.get_info("T-ORIGIN", 0)
265
266    if name == "RELEND":
267        return self.stats.endtime - self.stats.starttime + \
268                                                   self.get_info("T-ORIGIN", 0)
269
270    # length in seconds
271    if name == "LENGTH_TIME":
272        return self.stats.endtime - self.stats.starttime
273
274    # inside additional headers
275    header = self.stats.sh.get(name, None)
276    if header is not None:
277        if isinstance(header, bool):
278            header = header and "Y" or "N"
279        return header
280
281    # finally look into self defined entries
282    if name in self.shx:
283        return self.shx[name]
284
285    if default is None:
286        raise ShxError("Info entry not found", status=1703)
287    else:
288        return default
289
290def trace_set_info(self, name, value):
291    """
292    Setting obspy trace data from SH's info entry names
293    """
294    name = name.upper()
295
296    flags = ["MODIF",]
297
298    # in SH far more values were read-only
299    if name in ["LENGTH", "MAXVAL", "SH", "ALLOC", "DSPCNT"]:
300        raise ShxError("info entry is read-only", status=1738)
301    elif name == "CHAN1":
302        channel = [i for i in self.stats.channel]
303        channel[0] = value.upper()
304        self.stats.channel = "".join(channel)
305    elif name == "CHAN2":
306        channel = [i for i in self.stats.channel]
307        channel[1] = value.upper()
308        self.stats.channel = "".join(channel)
309    elif name == "COMP":
310        channel = [i for i in self.stats.channel]
311        channel[2] = value.upper()
312        self.stats.channel = "".join(channel)
313    elif name == "START":
314        self.stats.starttime = toUTCDateTime(value)
315    elif name.lower() in self.stats:
316        self.stats[name.lower()] = value
317    elif name.upper() in flags:
318        if value:
319            self.stats[name.lower()] = True
320        else:
321            self.stats[name.lower()] = False
322    else:
323        # all other additional entries put inside stats.sh
324        self.stats.sh[name] = value
325
326def trace_is_hidden(self):
327    parent = getattr(self.shx, "_parent", None)
328    if id(Hidden) == id(parent):
329        return 1
330    else:
331        return 0
332
333def trace_establish(self):
334    """
335    Set default info entries for SH(X) trace. Etc.
336    """
337    if self.shx.get("_established", False):
338        return
339
340    self.shx._established = True
341    self.shx._parent = Traces
342
343    self.stats.sh.ZOOM = get_runtime("zoom", 1.)
344    self.stats.sh.ATTRIB = '0'
345    self.stats.sh.REDUCTION = get_runtime("reduction", 1)
346    self.stats.sh.NORM = get_runtime("norm", 0.5)
347    self.stats.sh.WEIGHT = get_runtime("weight", 1.)
348    self.stats.sh.NOCALIB = False
349    self.stats.sh.MODIF = False
350    self.stats.sh.AMPLICUT = False
351    self.stats.sh.USFLG1 = False
352    self.stats.sh.USFLG2 = False
353
354    # unknown
355    self.stats.sh.DSPFST = 0
356
357    # depends
358    if not "FROMQ" in self.stats.sh:
359        self.stats.sh.FROMQ = False
360    if not "QUAL" in self.stats.sh:
361        self.stats.sh.QUAL = False
362    if not self.stats.get("channel", None):
363        self.stats.channel = "   "
364    # Seismic Handler has another minimum start date/time
365    if self.stats.starttime == toUTCDateTime("1-JAN-1970_00:00:00"):
366        self.stats.starttime = toUTCDateTime("1-JUL-1970_12:00:00")
367
368    # plotting x offset
369    self.stats.sh["T-ORIGIN"] = 0.
370    # plotting y offset
371    self.stats.sh["S-ORIGIN"] = 0.
372    # plotting
373    self.stats.sh.OVERLAY = False
374
375    # change data type
376    self.data = np.require(self.data, "float32")
377
378def trace_index(self, one=False):
379    """
380    Return trace position within parent's list. If "one" is set to True,
381    numbering will start at 1.
382    """
383    idx = self.shx._parent.traces.index(self)
384    if one:
385        idx += 1
386    return idx
387
388def trace_get_datawindow(self, lo, hi):
389    """
390    Return plain copy of data within relative time window defined by lo and hi.
391    """
392    t = self.slice_relative(lo, hi)
393    return t.data
394
395def trace_slice_relative(self, lo, hi):
396    """
397    Slice trace using relative time axis, not absolute time values.
398    """
399    start = self.get_info("relstart")
400    end = self.get_info("relend")
401    torigin = self.get_info("t-origin")
402
403    if start < lo:
404        start = lo
405
406    if end > hi:
407        end = hi
408
409    start = self.stats.starttime + start - torigin
410    end = self.stats.starttime + end - torigin
411
412    return self.slice(start, end)
413
414def trace_get_sample_id(self, time, check=True):
415    """
416    Return index of sample at "time" inside trace's data. If "check" the result
417    is checked against present data borders.
418    """
419    torigin = self.get_info("t-origin")
420    index = nearest_int((time - torigin) / self.stats.delta)
421   
422    if not check:
423        return index
424
425    if index < 0:
426        index = 0
427
428    if index >= len(self):
429        index = len(self) - 1
430
431    return index
432
433def trace_copy(self):
434    """
435    Return copy of trace with respect to NOCOPY flags.
436    """
437    new = self._obspy_copy()
438
439    # check for info entries that may not be copied:
440    NOCOPY = ["CALIB", "OPINFO", "USR1", "USR2", "MODIF"]
441
442    for i in NOCOPY:
443        try:
444            del new.stats.sh[i.upper()]
445        except:
446            pass
447
448    new.establish()
449    return new
450
451# monkey patching obspy stream class: traces are merge automatically if stream
452# get's altered.
453Stream.__shx_init__, Stream.__init__ = Stream.__init__, stream_auto_merge_init
454
455# monkey patching obspy trace class
456Trace.__shx_init__, Trace.__init__ = Trace.__init__, trace_init
457# additional methods
458Trace.prepare_image_data = trace_prepare_image_data
459Trace.get_info = trace_get_info
460Trace.set_info = trace_set_info
461Trace.is_hidden = trace_is_hidden
462Trace.establish = trace_establish
463Trace.index = trace_index
464Trace.get_datawindow = trace_get_datawindow
465Trace.slice_relative = trace_slice_relative
466Trace._obspy_copy, Trace.copy = Trace.copy, trace_copy
467Trace.get_sample_id = trace_get_sample_id
468Trace.invalidate_cache = trace_invalidate_cache
469
470log_message("info.patches", "Applied monkey patches for ObsPy")
Note: See TracBrowser for help on using the repository browser.