source: SHX/trunk/SeismicHandler/patches/obspy_.py @ 931

Revision 931, 14.2 KB checked in by marcus, 6 years ago (diff)
  • Property svn:eol-style set to native
Line 
1# -*- coding: utf-8 -*-
2
3#    This file is part of Seismic Handler eXtended (SHX). For terms of use and
4#    license information please see license.txt and visit
5#    http://www.seismic-handler.org/portal/wiki/Shx/LicenseTerms
6
7"""
8This modules defines extensions to obspy. They are patched into the
9original obspy modules at a very basic level in SeismicHandler/__init__.py
10
11This module is named obspy_.py to prevent import loops.
12"""
13
14import numpy as np
15import math
16from obspy.core import Stream, Trace
17from obspy.core.util import AttribDict
18from obspy.sh.core import fromUTCDateTime, toUTCDateTime
19from SeismicHandler.basics.messages import log_message, get_runtime
20from SeismicHandler.basics.error import ShxError
21from SeismicHandler.core import Traces, Hidden
22from SeismicHandler.utils import nearest_int
23
24__all__ = []
25
26
27def _stream_auto_merge(self, *args, **kwargs):
28    """
29    Call obspy's Stream merge method to fill up gaps using the latest pre-gap
30    sample value. This destroys the spectra of the data but it's common.
31    """
32
33    # Change value data type to float32 as in former versions of SH.
34    # This will be changed in later versions.
35    for t in self.traces:
36        if t.data.dtype != np.dtype('float32'):
37            # makes only a copy if needed (astype always copies data)
38            t.data = np.require(t.data, dtype='float32')
39
40    # disabling merging because of unwanted side effects
41    return
42
43    if len(self.traces) > 1:
44        try:
45            self.merge(method=1, fill_value='latest')
46        except Exception, E:
47            log_message("debug.auto_merge", E)
48
49def stream_auto_merge_init(self, *args, **kwargs):
50    self.__shx_init__(*args, **kwargs)
51    _stream_auto_merge(self)
52
53def trace_prepare_image_data(self, width, height, timewindow=None, zoom=1,
54                                           norm="window", norm_amplitude=None):
55    """
56    Preparing trace data for fast plotting. Using numpy's minmax feature - this
57    will not just downsample the data!
58
59    width:         total width of plotting window in pixels
60    height:        hight of plotting window in pixels
61    zoom:          global zoom factor (y extension)
62    timewindow:    current global time window (tuple of min and max)
63    norm:          norm method: "window", "total" or float value
64                   (effects y extension)
65    max_amplitude: maximum global amplitude. If set this values is used for
66                   normation
67    """
68    log_message('debug.patches', "Preparing plotting data...", self.id)
69
70    if not timewindow:
71        # use trace's total dimensions
72        windowstart, windowend = (self.get_info("relstart"),
73                                                       self.get_info("relend"))
74    else:
75        windowstart, windowend = timewindow
76
77    # get slice for time window
78    pt = self.slice_relative(windowstart, windowend)
79
80    duration_total = windowend - windowstart
81    duration_trace = pt.stats.endtime - pt.stats.starttime
82
83    # width of the total screen that is covered by trace data (pixels)
84    pixel_width = duration_trace * width / duration_total
85
86    # remember values for caching
87    CacheTraceID = \
88             (windowstart, windowend, "%.5f" % pixel_width, height, zoom, norm)
89    try:
90        cachedX = CacheTraceID[:3] == self.shx.CacheTraceID[:3]
91        cachedY = cachedX and CacheTraceID[3:] == self.shx.CacheTraceID[3:]
92    except:
93        cachedX = False
94        cachedY = False
95
96    if not cachedX:
97        # save new ID
98        self.shx.CacheTraceID = CacheTraceID
99
100        # data points per pixel
101        npts = pt.stats.npts
102        dpp = int(npts / pixel_width)
103
104        # get copy of data
105        self.shx.PlotData = pt.data.copy()
106    else:
107        log_message('debug.patches.devel',
108                                    "using cachedX data for", CacheTraceID[:3])
109
110    # use minmax approach if more than 4 data points per pixel
111    # self.shx_plotdata will be changed
112    if not cachedX and dpp >= 4:
113        # do integer division since array shape values cannot be floats
114        dimx = npts // dpp
115        covered = dpp * dimx
116
117        # get data for reshape operation
118        data = self.shx.PlotData[:covered]
119
120        # extra treatment for remaining values
121        remaining = self.shx.PlotData[covered:]
122
123        # reshape data and get min/max values
124        data.shape = dimx, dpp
125        _min = data.min(axis=1)
126        _max = data.max(axis=1)
127        # combine data
128        joined = np.append([_min], [_max], axis=0).T.flatten()
129        # handle remaining
130        if len(remaining):
131            joined = np.append(joined, [remaining.min(), remaining.max()])
132
133        log_message(
134            'debug.patches.devel',
135             "pixel_width", pixel_width, "minmax", "npts", npts, "dpp",
136             dpp, "dimx", dimx, "covered", covered, "len_rest",
137             len(remaining), "data_len", len(data), "joined", len(joined),
138             "width", width
139        )
140
141        # At this stage the x-transformed data can be cached! If pixel_width
142        # doesn't change, the data can be reused to save time.
143        self.shx.PlotData = joined
144
145    log_message('debug.patches.devel',
146                                  width, height, norm, type(self.shx.PlotData))
147
148    if cachedY:
149        log_message('debug.patches.devel',
150                                    "using cachedY data for", CacheTraceID[3:])
151        return
152
153    # renew cache id
154    self.shx.CacheTraceID = CacheTraceID
155
156    # get basis for normation
157    if hasattr(norm, "lower"):
158        norm = norm.upper()[0]
159        if norm == "W":
160            norm = abs(pt.max())
161        elif norm == "T":
162            norm = self.max()
163        else:
164            raise ValueError("Invalid input for normation!")
165
166    # Calculate y extension, leads to raw "image" data.
167    # Use a copy to save time if only y changes occur (e.g. zooming).
168    # Apply normation, calibration and total height factor
169    y = self.shx.PlotData.copy()
170    y *= self.stats.calib / norm * height / 2 * zoom
171
172#    print self.shxPlotData.min(), self.shxPlotData.max(), y.min(), y.max()
173
174    # factor for x stretching (trade-off between pixel_width and array shape)
175    stretch = pixel_width / len(y)
176    width = int(math.ceil(pixel_width))
177
178    # calculate line data
179    # do not rely on framework axis' management -> shift/flip y-axis
180    offset = height // 2
181    lines = []
182    oldx, oldy = 0, -y[0] + offset
183
184    for i in xrange(1, len(y)):
185        newx = i * stretch
186        newy = -y[i] + offset
187        lines.append([oldx, oldy, newx, newy])
188        oldx, oldy = newx, newy
189
190    self.shx.ImageData = lines
191    self.shx.PlotPixels = pixel_width
192
193def trace_init(self, *args, **kwargs):
194    self.__shx_init__(*args, **kwargs)
195
196    # container for SHX releated data
197    if getattr(self, "shx", None) is None:
198        self.shx = AttribDict()
199    # additional headers
200    if not "sh" in self.stats:
201        self.stats.sh = AttribDict()
202    else:
203        # change present keys to upper case
204        for k in self.stats.sh:
205            v = self.stats.sh.pop(k)
206            self.stats.sh[k.upper()] = v
207
208    self.establish()
209
210def trace_invalidate_cache(self):
211    """
212    Mark the cached pixel data as dirty. Needed if data is altered inplace.
213    """
214    try:
215        msg = "Pixel cache marked dirty for trace %u" % self.index()
216    except:
217        msg = "Pixel cache marked dirty for unbounded trace %u" % id(self)
218    log_message("debug.patches", msg)
219    self.shx.CacheTraceID = []
220
221
222def trace_get_info(self, name, default=None):
223    """
224    Mapping of Seismic Handler trace information entries to ObsPy status info.
225    """
226    name = name.upper()
227
228    # identical name, obspy always lower case
229    try:
230        value = self.stats[name.lower()]
231        if isinstance(value, bool):
232            value = value and "Y" or "N"
233        return value
234    except KeyError:
235        pass
236
237    # mapping
238    if name in ["LENGTH", "DSPCNT", "ALLOC"]:
239        return self.stats.npts
240
241    if name == "CHAN1":
242        return self.stats.channel[0]
243
244    if name == "CHAN2":
245        return self.stats.channel[1]
246
247    if name == "COMP":
248        return self.stats.channel[2]
249
250    if name == "START":
251        return fromUTCDateTime(self.stats.starttime)
252
253    if name == "DATE":
254        return fromUTCDateTime(self.stats.starttime).split("_")[0]
255
256    if name == "TIME":
257        return fromUTCDateTime(self.stats.starttime).split("_")[1]
258
259    if name == "MAXVAL":
260        return self.data.max()
261
262    if name == "MINVAL":
263        return self.data.min()
264
265    # relative positions regarding T-ORIGIN
266    if name == "RELSTART":
267        return self.get_info("T-ORIGIN", 0)
268
269    if name == "RELEND":
270        return self.stats.endtime - self.stats.starttime + \
271                                                   self.get_info("T-ORIGIN", 0)
272
273    # length in seconds
274    if name == "LENGTH_TIME":
275        return self.stats.endtime - self.stats.starttime
276
277    # inside additional headers
278    header = self.stats.sh.get(name, None)
279    if header is not None:
280        if isinstance(header, bool):
281            header = header and "Y" or "N"
282        return header
283
284    # finally look into self defined entries
285    if name in self.shx:
286        return self.shx[name]
287
288    if default is None:
289        raise ShxError("Info entry not found", status=1703)
290    else:
291        return default
292
293def trace_set_info(self, name, value):
294    """
295    Setting obspy trace data from SH's info entry names
296    """
297    name = name.upper()
298
299    flags = ["MODIF",]
300
301    # in SH far more values were read-only
302    if name in ["LENGTH", "MAXVAL", "SH", "ALLOC", "DSPCNT"]:
303        raise ShxError("info entry is read-only", status=1738)
304    elif name == "CHAN1":
305        channel = [i for i in self.stats.channel]
306        channel[0] = value.upper()
307        self.stats.channel = "".join(channel)
308    elif name == "CHAN2":
309        channel = [i for i in self.stats.channel]
310        channel[1] = value.upper()
311        self.stats.channel = "".join(channel)
312    elif name == "COMP":
313        channel = [i for i in self.stats.channel]
314        channel[2] = value.upper()
315        self.stats.channel = "".join(channel)
316    elif name == "START":
317        self.stats.starttime = toUTCDateTime(value)
318    elif name.lower() in self.stats:
319        self.stats[name.lower()] = value
320    elif name.upper() in flags:
321        if value:
322            self.stats[name.lower()] = True
323        else:
324            self.stats[name.lower()] = False
325    else:
326        # all other additional entries put inside stats.sh
327        self.stats.sh[name] = value
328
329def trace_is_hidden(self):
330    parent = getattr(self.shx, "_parent", None)
331    if id(Hidden) == id(parent):
332        return 1
333    else:
334        return 0
335
336def trace_establish(self):
337    """
338    Set default info entries for SH(X) trace. Etc.
339    """
340    if self.shx.get("_established", False):
341        return
342
343    self.shx._established = True
344    self.shx._parent = Traces
345
346    self.stats.sh.ZOOM = get_runtime("zoom", 1.)
347    self.stats.sh.ATTRIB = '0'
348    self.stats.sh.REDUCTION = get_runtime("reduction", 1)
349    self.stats.sh.NORM = get_runtime("norm", 0.5)
350    self.stats.sh.WEIGHT = get_runtime("weight", 1.)
351    self.stats.sh.NOCALIB = False
352    self.stats.sh.MODIF = False
353    self.stats.sh.AMPLICUT = False
354    self.stats.sh.USFLG1 = False
355    self.stats.sh.USFLG2 = False
356
357    # unknown
358    self.stats.sh.DSPFST = 0
359
360    # depends
361    if not "FROMQ" in self.stats.sh:
362        self.stats.sh.FROMQ = False
363    if not "QUAL" in self.stats.sh:
364        self.stats.sh.QUAL = False
365    if not self.stats.get("channel", None):
366        self.stats.channel = "   "
367    # Seismic Handler has another minimum start date/time
368    if self.stats.starttime == toUTCDateTime("1-JAN-1970_00:00:00"):
369        self.stats.starttime = toUTCDateTime("1-JUL-1970_12:00:00")
370
371    # plotting x offset
372    self.stats.sh["T-ORIGIN"] = 0.
373    # plotting y offset
374    self.stats.sh["S-ORIGIN"] = 0.
375    # plotting
376    self.stats.sh.OVERLAY = False
377
378    # change data type
379    self.data = np.require(self.data, "float32")
380
381def trace_index(self, one=False):
382    """
383    Return trace position within parent's list. If "one" is set to True,
384    numbering will start at 1.
385    """
386    idx = self.shx._parent.traces.index(self)
387    if one:
388        idx += 1
389    return idx
390
391def trace_get_datawindow(self, lo, hi):
392    """
393    Return plain copy of data within relative time window defined by lo and hi.
394    """
395    t = self.slice_relative(lo, hi)
396    return t.data
397
398def trace_slice_relative(self, lo, hi):
399    """
400    Slice trace using relative time axis, not absolute time values.
401    """
402    start = self.get_info("relstart")
403    end = self.get_info("relend")
404    torigin = self.get_info("t-origin")
405
406    if start < lo:
407        start = lo
408
409    if end > hi:
410        end = hi
411
412    start = self.stats.starttime + start - torigin
413    end = self.stats.starttime + end - torigin
414
415    return self.slice(start, end)
416
417def trace_get_sample_id(self, time, check=True):
418    """
419    Return index of sample at "time" inside trace's data. If "check" the result
420    is checked against present data borders.
421    """
422    torigin = self.get_info("t-origin")
423    index = nearest_int((time - torigin) / self.stats.delta)
424   
425    if not check:
426        return index
427
428    if index < 0:
429        index = 0
430
431    if index >= len(self):
432        index = len(self) - 1
433
434    return index
435
436def trace_copy(self):
437    """
438    Return copy of trace with respect to NOCOPY flags.
439    """
440    new = self._obspy_copy()
441
442    # check for info entries that may not be copied:
443    NOCOPY = ["CALIB", "OPINFO", "USR1", "USR2", "MODIF"]
444
445    for i in NOCOPY:
446        try:
447            del new.stats.sh[i.upper()]
448        except:
449            pass
450
451    new.establish()
452    return new
453
454# monkey patching obspy stream class: traces are merge automatically if stream
455# get's altered.
456Stream.__shx_init__, Stream.__init__ = Stream.__init__, stream_auto_merge_init
457
458# monkey patching obspy trace class
459Trace.__shx_init__, Trace.__init__ = Trace.__init__, trace_init
460# additional methods
461Trace.prepare_image_data = trace_prepare_image_data
462Trace.get_info = trace_get_info
463Trace.set_info = trace_set_info
464Trace.is_hidden = trace_is_hidden
465Trace.establish = trace_establish
466Trace.index = trace_index
467Trace.get_datawindow = trace_get_datawindow
468Trace.slice_relative = trace_slice_relative
469Trace._obspy_copy, Trace.copy = Trace.copy, trace_copy
470Trace.get_sample_id = trace_get_sample_id
471Trace.invalidate_cache = trace_invalidate_cache
472
473log_message("info.patches", "Applied monkey patches for ObsPy")
Note: See TracBrowser for help on using the repository browser.