source: SHX/trunk/SeismicHandler/patches/obspy_.py @ 1119

Revision 1119, 17.3 KB checked in by klaus, 4 years ago (diff)

metastatus: inidicate by trace color; busy cursor

  • Property svn:eol-style set to native
Line 
1# -*- coding: utf-8 -*-
2
3#    This file is part of Seismic Handler eXtended (SHX). For terms of use and
4#    license information please see license.txt and visit
5#    http://www.seismic-handler.org/portal/wiki/Shx/LicenseTerms
6
7"""
8This modules defines extensions to obspy. They are patched into the
9original obspy modules at a very basic level in SeismicHandler/__init__.py
10
11This module is named obspy_.py to prevent import loops.
12"""
13
14import numpy as np
15import sys
16import math
17import uuid
18from obspy.core import Stream, Trace
19from obspy.core.util import AttribDict
20from obspy.sh.core import fromUTCDateTime, toUTCDateTime
21from SeismicHandler.config import get_runtime
22from SeismicHandler.basics.messages import log_message #, get_runtime
23from SeismicHandler.basics.error import ShxError
24from SeismicHandler.core import Traces, Hidden
25from SeismicHandler.utils import nearest_int
26from SeismicHandler.basics import timeit, timestore
27
28__all__ = []
29
30
31graphics_hook = {}
32
33
34#@timeit
35def _stream_auto_merge(self, *args, **kwargs):
36    """
37    Call obspy's Stream merge method to fill up gaps using the latest pre-gap
38    sample value. This destroys the spectra of the data but it's common.
39    """
40
41    # Change value data type to float32 as in former versions of SH.
42    # This will be changed in later versions.
43    for t in self.traces:
44        if t.data.dtype != np.dtype('float32'):
45            # makes only a copy if needed (astype always copies data)
46            t.data = np.require(t.data, dtype='float32')
47
48    # disabling merging because of unwanted side effects
49    return
50
51    if len(self.traces) > 1:
52        try:
53            self.merge(method=1, fill_value='latest')
54        except Exception, E:
55            log_message("debug.auto_merge", E)
56
57#@timeit
58def stream_auto_merge_init(self, *args, **kwargs):
59    self.__shx_init__(*args, **kwargs)
60    _stream_auto_merge(self)
61
62#@timeit
63def trace_prepare_image_data(self, width, height, timewindow=None, zoom=1,
64                                           norm="window", norm_amplitude=None):
65    """
66    Preparing trace data for fast plotting. Using numpy's minmax feature - this
67    will not just downsample the data!
68
69    width:         total width of plotting window in pixels
70    height:        height of plotting window in pixels
71    zoom:          global zoom factor (y extension)
72    timewindow:    current global time window (tuple of min and max)
73    norm:          norm method: "window", "total" or float value
74                   (effects y extension)
75    max_amplitude: maximum global amplitude. If set this values is used for
76                   normation
77    """
78    shx_graphics = self.get_graphics()
79    if shx_graphics == None:
80        return
81
82    log_message('debug.patches', "Preparing plotting data...", self.id)
83
84    if not timewindow:
85        # use trace's total dimensions
86        windowstart, windowend = (self.get_info("relstart"),
87                                                       self.get_info("relend"))
88    else:
89        windowstart, windowend = timewindow
90
91    # get slice for time window
92    try:
93        pt = self.slice_relative(windowstart, windowend)
94    except:
95        return
96
97    duration_total = windowend - windowstart
98    duration_trace = pt.stats.endtime - pt.stats.starttime
99
100    # width of the total screen that is covered by trace data (pixels)
101    pixel_width = duration_trace * width / duration_total
102
103    # remember values for caching
104    CacheTraceID = \
105             (windowstart, windowend, "%.5f" % pixel_width, height, zoom, norm)
106    try:
107        cachedX = CacheTraceID[:3] == shx_graphics.CacheTraceID[:3]
108        cachedY = cachedX and CacheTraceID[3:] == shx_graphics.CacheTraceID[3:]
109    except:
110        cachedX = False
111        cachedY = False
112
113    if not cachedX:
114        # save new ID
115        shx_graphics.CacheTraceID = CacheTraceID
116
117        # data points per pixel
118        npts = pt.stats.npts
119        dpp = int(npts / pixel_width)
120
121        # get copy of data
122        shx_graphics.PlotData = pt.data.copy()
123    else:
124        log_message('debug.patches.devel',
125                                    "using cachedX data for", CacheTraceID[:3])
126
127    # use minmax approach if more than 4 data points per pixel
128    # self.shx_plotdata will be changed
129    if not cachedX and dpp >= 4:
130        # do integer division since array shape values cannot be floats
131        dimx = npts // dpp
132        covered = dpp * dimx
133
134        # get data for reshape operation
135        data = shx_graphics.PlotData[:covered]
136
137        # extra treatment for remaining values
138        remaining = shx_graphics.PlotData[covered:]
139
140        # reshape data and get min/max values
141        data.shape = dimx, dpp
142        _min = data.min(axis=1)
143        _max = data.max(axis=1)
144        # combine data
145        joined = np.append([_min], [_max], axis=0).T.flatten()
146        # handle remaining
147        if len(remaining):
148            joined = np.append(joined, [remaining.min(), remaining.max()])
149
150        log_message(
151            'debug.patches.devel',
152             "pixel_width", pixel_width, "minmax", "npts", npts, "dpp",
153             dpp, "dimx", dimx, "covered", covered, "len_rest",
154             len(remaining), "data_len", len(data), "joined", len(joined),
155             "width", width
156        )
157
158        # At this stage the x-transformed data can be cached! If pixel_width
159        # doesn't change, the data can be reused to save time.
160        shx_graphics.PlotData = joined
161
162    #ks log_message('debug.patches.devel',
163    #ks                              width, height, norm, type(self.shx.PlotData))
164
165    if cachedY:
166        log_message('debug.patches.devel',
167                                    "using cachedY data for", CacheTraceID[3:])
168        return
169
170    # renew cache id
171    shx_graphics.CacheTraceID = CacheTraceID
172
173    # get basis for normation
174    if hasattr(norm, "lower"):
175        norm = norm.upper()[0]
176        if norm == "W":
177            norm = abs(pt.max())
178        elif norm == "T":
179            norm = self.max()
180        else:
181            raise ValueError("Invalid input for normation!")
182   
183    if norm < 1.e-20:
184        # zero trace, no norm possible
185        norm = 1.
186
187    # Calculate y extension, leads to raw "image" data.
188    # Use a copy to save time if only y changes occur (e.g. zooming).
189    # Apply normation, calibration and total height factor
190    try:
191        trczoom = self.stats.sh.ZOOM
192    except:
193        trczoom = 1.0
194    y = shx_graphics.PlotData.copy()
195    y *= self.stats.calib / norm * height / 2 * zoom * trczoom
196
197#    print self.shxPlotData.min(), self.shxPlotData.max(), y.min(), y.max()
198
199    # factor for x stretching (trade-off between pixel_width and array shape)
200    stretch = pixel_width / len(y)
201    width = int(math.ceil(pixel_width))
202
203    # calculate line data
204    # do not rely on framework axis' management -> shift/flip y-axis
205    offset = height // 2
206    lines = []
207    oldx, oldy = 0, -y[0] + offset
208
209    for i in xrange(1, len(y)):
210        newx = i * stretch
211        newy = -y[i] + offset
212        lines.append([oldx, oldy, newx, newy])
213        oldx, oldy = newx, newy
214
215    shx_graphics.ImageData = lines
216    shx_graphics.PlotPixels = pixel_width
217
218#@timeit
219def trace_init(self, *args, **kwargs):
220    self.__shx_init__(*args, **kwargs)
221
222    # container for SHX releated data
223    if getattr(self, "shx", None) is None:
224        self.shx = AttribDict()
225    # additional headers
226    if not "sh" in self.stats:
227        self.stats.sh = AttribDict()
228    else:
229        # change present keys to upper case
230        for k in self.stats.sh:
231            v = self.stats.sh.pop(k)
232            self.stats.sh[k.upper()] = v
233
234    # container for graphics temp data, to be exluded from deepcopy,
235    # defined in establish
236    self.establish()
237
238#@timeit
239def trace_invalidate_cache(self):
240    """
241    Mark the cached pixel data as dirty. Needed if data is altered inplace.
242    """
243    try:
244        msg = "Pixel cache marked dirty for trace %u" % self.index()
245    except:
246        msg = "Pixel cache marked dirty for unbounded trace %u" % id(self)
247    log_message("debug.patches", msg)
248    shx_graphics = self.get_graphics()
249    if shx_graphics != None:
250        shx_graphics.CacheTraceID = []
251
252
253#@timeit
254def trace_get_info(self, name, default=None):
255    """
256    Mapping of Seismic Handler trace information entries to ObsPy status info.
257    """
258    name = name.upper()
259
260    # identical name, obspy always lower case
261    try:
262        value = self.stats[name.lower()]
263        if isinstance(value, bool):
264            value = value and "Y" or "N"
265        return value
266    except KeyError:
267        pass
268
269    # mapping
270    if name in ["LENGTH", "DSPCNT", "ALLOC"]:
271        return self.stats.npts
272
273    if name == "CHAN1":
274        return self.stats.channel[0]
275
276    if name == "CHAN2":
277        return self.stats.channel[1]
278
279    if name == "COMP":
280        return self.stats.channel[2]
281
282    if name == "START":
283        return fromUTCDateTime(self.stats.starttime)
284
285    if name == "DATE":
286        return fromUTCDateTime(self.stats.starttime).split("_")[0]
287
288    if name == "TIME":
289        return fromUTCDateTime(self.stats.starttime).split("_")[1]
290
291    if name == "MAXVAL":
292        return self.data.max()
293
294    if name == "MINVAL":
295        return self.data.min()
296
297    # relative positions regarding T-ORIGIN
298    if name == "RELSTART":
299        return self.get_info("T-ORIGIN", 0)
300
301    if name == "RELEND":
302        return self.stats.endtime - self.stats.starttime + \
303                                                   self.get_info("T-ORIGIN", 0)
304
305    # length in seconds
306    if name == "LENGTH_TIME":
307        return self.stats.endtime - self.stats.starttime
308   
309    if name == "INDEX":
310        return self.index(True)
311
312    # inside additional headers
313    header = self.stats.sh.get(name, None)
314    if header is not None:
315        if isinstance(header, bool):
316            header = header and "Y" or "N"
317        return header
318
319    # finally look into self defined entries
320    if name in self.shx:
321        return self.shx[name]
322
323    if default is None:
324        raise ShxError("Info entry not found", status=1703)
325    else:
326        return default
327
328#@timeit
329def trace_set_info(self, name, value):
330    """
331    Setting obspy trace data from SH's info entry names
332    """
333    name = name.upper()
334
335    flags = ["MODIF",]
336
337    # in SH far more values were read-only
338    if name in ["LENGTH", "MAXVAL", "SH", "ALLOC", "DSPCNT"]:
339        raise ShxError("info entry is read-only", status=1738)
340    elif name == "CHAN1":
341        channel = [i for i in self.stats.channel]
342        channel[0] = value.upper()
343        self.stats.channel = "".join(channel)
344    elif name == "CHAN2":
345        channel = [i for i in self.stats.channel]
346        channel[1] = value.upper()
347        self.stats.channel = "".join(channel)
348    elif name == "COMP":
349        channel = [i for i in self.stats.channel]
350        channel[2] = value.upper()
351        self.stats.channel = "".join(channel)
352    elif name == "START":
353        self.stats.starttime = toUTCDateTime(value)
354    elif name.lower() in self.stats:
355        self.stats[name.lower()] = value
356    elif name.upper() in flags:
357        if value:
358            self.stats[name.lower()] = True
359        else:
360            self.stats[name.lower()] = False
361    else:
362        # all other additional entries put inside stats.sh
363        if name in ('T-ORIGIN',):
364            self.stats.sh[name] = float(value)
365        elif name in ('METASTATUS',):
366            self.stats.sh[name] = int(value)
367        else:
368            self.stats.sh[name] = value
369
370#@timeit
371def trace_is_hidden(self):
372    parent = getattr(self.shx, "_parent", None)
373    if id(Hidden) == id(parent):
374        return 1
375    else:
376        return 0
377
378#@timeit
379def trace_establish(self):
380    """
381    Set default info entries for SH(X) trace. Etc.
382    """
383    if getattr(self, "shx_graphics", None) is None:
384        self.shx_graphics = AttribDict()
385
386    if self.shx.get("_established", False):
387        return
388
389    self.shx._established = True
390    self.shx._parent = Traces
391    if not hasattr(self.shx,'original'):
392        self.shx.original = None
393    isoriginal = self.shx.original == None
394
395    self.stats.sh.ZOOM = get_runtime("zoom", 1.)
396    self.stats.sh.ATTRIB = '0'
397    self.stats.sh.REDUCTION = get_runtime("reduction", 1)
398    self.stats.sh.NORM = get_runtime("norm", 0.5)
399    self.stats.sh.WEIGHT = get_runtime("weight", 1.)
400    self.stats.sh.NOCALIB = False
401    self.stats.sh.MODIF = False
402    self.stats.sh.AMPLICUT = False
403    self.stats.sh.USFLG1 = False
404    self.stats.sh.USFLG2 = False
405    self.stats.sh.UNIQUE_ID = uuid.uuid4().hex
406    self.stats.sh.METASTATUS = 0
407    self.stats.sh.ORIGINAL = isoriginal
408
409    # unknown
410    self.stats.sh.DSPFST = 0
411
412    # depends
413    if not "FROMQ" in self.stats.sh:
414        self.stats.sh.FROMQ = False
415    if not "QUAL" in self.stats.sh:
416        self.stats.sh.QUAL = False
417    if not self.stats.get("channel", None):
418        self.stats.channel = "   "
419    # Seismic Handler has another minimum start date/time
420    if self.stats.starttime == toUTCDateTime("1-JAN-1970_00:00:00"):
421        self.stats.starttime = toUTCDateTime("1-JUL-1970_12:00:00")
422
423    # plotting x offset
424    self.stats.sh["T-ORIGIN"] = 0.
425    # plotting y offset
426    self.stats.sh["S-ORIGIN"] = 0.
427    # plotting
428    self.stats.sh.OVERLAY = False
429
430    # change data type
431    self.data = np.require(self.data, "float32")
432
433#@timeit
434def trace_index(self, one=False):
435    """
436    Return trace position within parent's list. If "one" is set to True,
437    numbering will start at 1 otherwise at 0.
438    Functionality unclear, KS.
439    """
440    # the following code is incredibly slow
441    #idx = self.shx._parent.traces.index(self)
442    # much faster is
443    for i in range(len(self.shx._parent.traces)):
444        if self.shx._parent.traces[i].stats.sh.UNIQUE_ID == self.stats.sh.UNIQUE_ID:
445            idx = i
446            break
447    else:
448        idx = 0
449    if one:
450        idx += 1
451    return idx
452
453#@timeit
454def trace_get_datawindow(self, lo, hi):
455    """
456    Return plain copy of data within relative time window defined by lo and hi.
457    """
458    t = self.slice_relative(lo, hi)
459    return t.data
460
461#@timeit
462def trace_slice_relative(self, lo, hi):
463    """
464    Slice trace using relative time axis, not absolute time values.
465    """
466    start = self.get_info("relstart")
467    end = self.get_info("relend")
468    torigin = self.get_info("t-origin")
469
470    if start < lo:
471        start = lo
472
473    if end > hi:
474        end = hi
475
476    start = self.stats.starttime + start - torigin
477    end = self.stats.starttime + end - torigin
478
479    return self.slice(start, end)
480
481#@timeit
482def trace_get_sample_id(self, time, check=True):
483    """
484    Return index of sample at "time" inside trace's data. If "check" the result
485    is checked against present data borders.
486    """
487    torigin = self.get_info("t-origin")
488    index = nearest_int((time - torigin) / self.stats.delta)
489   
490    if not check:
491        return index
492
493    if index < 0:
494        index = 0
495
496    if index >= len(self):
497        index = len(self) - 1
498
499    return index
500
501#@timeit
502def trace_copy(self):
503    """
504    Return copy of trace with respect to NOCOPY flags.
505    """
506   
507    # exclude parent and original from deepcopy
508    parentstore = self.shx._parent
509    self.shx._parent = None
510    if 'original' in self.shx.keys():
511        originalstore = self.shx.original
512        self.shx.original = None
513    else:
514        originalstore = None
515   
516    # exclude graphic attributes from deepcopy as these may change during copy
517    graphstore = None
518    if getattr(self, "shx_graphics", None) is not None:
519        graphstore = self.shx_graphics
520        graphics_hook[self.stats.sh.UNIQUE_ID] = self.shx_graphics
521        delattr( self, 'shx_graphics' )
522           
523    sys.stdout.flush()
524    new = self._obspy_copy()
525   
526    # restore _parent and original
527    self.shx._parent = parentstore
528    new.shx._parent = parentstore
529    if originalstore != None:
530        self.shx.original = originalstore
531        # new.shx.original = None
532
533    # restore graphical attributes
534    if graphstore != None:
535        self.shx_graphics = graphstore
536        graphics_hook.pop(self.stats.sh.UNIQUE_ID)
537   
538    new.stats.sh.UNIQUE_ID = uuid.uuid4().hex
539    #new.shx._established = False
540
541    # check for info entries that may not be copied:
542    NOCOPY = ["CALIB", "OPINFO", "USR1", "USR2", "MODIF"]
543
544    for i in NOCOPY:
545        try:
546            del new.stats.sh[i.upper()]
547        except:
548            pass
549
550    new.establish()
551    return new
552
553def trace_get_graphics( self ):
554    try:
555        return self.shx_graphics
556    except:
557        if self.stats.sh.UNIQUE_ID in graphics_hook:
558            return graphics_hook[self.stats.sh.UNIQUE_ID]
559        else:
560            print "dbg: no graphics found in obspy_.graphics_hook"
561            return None
562
563# monkey patching obspy stream class: traces are merge automatically if stream
564# get's altered.
565Stream.__shx_init__, Stream.__init__ = Stream.__init__, stream_auto_merge_init
566
567# monkey patching obspy trace class
568Trace.__shx_init__, Trace.__init__ = Trace.__init__, trace_init
569# additional methods
570Trace.prepare_image_data = trace_prepare_image_data
571Trace.get_info = trace_get_info
572Trace.set_info = trace_set_info
573Trace.is_hidden = trace_is_hidden
574Trace.establish = trace_establish
575Trace.index = trace_index
576Trace.get_datawindow = trace_get_datawindow
577Trace.slice_relative = trace_slice_relative
578Trace._obspy_copy, Trace.copy = Trace.copy, trace_copy
579Trace.get_sample_id = trace_get_sample_id
580Trace.invalidate_cache = trace_invalidate_cache
581Trace.get_graphics = trace_get_graphics
582
583log_message("info.patches", "Applied monkey patches for ObsPy")
Note: See TracBrowser for help on using the repository browser.