Skip to content
This repository was archived by the owner on Nov 15, 2023. It is now read-only.

Commit

Permalink
mostly ok on local
Browse files Browse the repository at this point in the history
  • Loading branch information
kwinkunks committed Jun 25, 2019
1 parent 1d9a5f5 commit e7b25ae
Show file tree
Hide file tree
Showing 6 changed files with 245 additions and 174 deletions.
1 change: 0 additions & 1 deletion .ebignore
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
__pycache__

2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
secrets.py
.DS_Store
.vscode/

.elasticbeanstalk
.ebextensions
Expand Down
117 changes: 78 additions & 39 deletions main.py → app.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
"""
from io import BytesIO
import uuid
import base64

from flask import Flask
from flask import make_response
Expand All @@ -31,17 +32,20 @@ def handle_invalid_usage(error):
return response


#
# Seismic frequency and SEGY bot
#
@application.route('/freq')
def freq():

# Params from inputs.
url = request.args.get('url')
b64 = request.args.get('image')
method = request.args.get('method') or 'xing'
avg = request.args.get('avg') or 'mean'
region = request.args.get('region')
ntraces = request.args.get('ntraces') or '10'
trace_spacing = request.args.get('trace_spacing') or 'regular'
bins = request.args.get('bins') or '9'
bins = request.args.get('bins') or '11'
t_min = request.args.get('tmin') or '0'
t_max = request.args.get('tmax') or '1'
dt_param = request.args.get('dt') or 'auto'
Expand All @@ -57,7 +61,7 @@ def freq():

# Condition or generate params.
ntraces = int(ntraces)
bins = float(bins)
bins = int(bins)
t_min = float(t_min)
t_max = float(t_max)
uuid1 = str(uuid.uuid1())
Expand All @@ -67,27 +71,48 @@ def freq():
region = []

# Fetch and crop image.
try:
r = requests.get(url)
im = Image.open(BytesIO(r.content))
except Exception:
result = {'job_uuid': uuid.uuid1()}
result['status'] = 'failed'
m = 'Error. Unable to open image from target URI. '
result['message'] = m
result['parameters'] = utils.build_params(method, avg,
t_min, t_max,
region,
trace_spacing,
url=url)
return jsonify(result)
if url:
try:
r = requests.get(url)
im = Image.open(BytesIO(r.content))
except Exception:
payload = {'job_uuid': uuid1}
payload['parameters'] = utils.build_params(method, avg,
t_min, t_max, dt_param,
region,
trace_spacing,
url=url)
mess = 'Unable to open image from target URI.'
raise InvalidUsage(mess, status_code=410, payload=payload)

elif b64:
try:
im = Image.open(BytesIO(base64.b64decode(b64)))
except Exception:
payload = {'job_uuid': uuid1}
payload['parameters'] = utils.build_params(method, avg,
t_min, t_max, dt_param,
region,
trace_spacing,
url=url)
mess = 'Could not decode payload image. Check base64 encoding.'
raise InvalidUsage(mess, status_code=410, payload=payload)
else:
payload = {'job_uuid': uuid1}
payload['parameters'] = utils.build_params(method, avg,
t_min, t_max, dt_param,
region,
trace_spacing,
url=url)
mess = 'You must provide an image.'
raise InvalidUsage(mess, status_code=410, payload=payload)

if region:
try:
im = im.crop(region)
except Exception:
m = 'Improper crop parameters '
raise InvalidUsage(m+region, status_code=410)
mess = 'Improper crop parameters '
raise InvalidUsage(mess, status_code=410)

width, height = im.size[0], im.size[1]

Expand All @@ -113,11 +138,13 @@ def freq():
grey = geophysics.is_greyscale(im)
i = np.asarray(im) - 128
i = i.astype(np.int8)
if not grey:
if (not grey) and (i.ndim == 3):
r, g, b = i[..., 0], i[..., 1], i[..., 2]
i = np.sqrt(0.299 * r**2. + 0.587 * g**2. + 0.114 * b**2.)
else:
elif i.ndim == 3:
i = i[..., 0]
else:
i = i

# Get SEGY file link, if requested.
if segy:
Expand All @@ -131,6 +158,7 @@ def freq():
file_link = utils.get_url(databytes, uuid1)

# Do analysis.
print("Starting analysis")
m = {'auto': geophysics.freq_from_autocorr,
'fft': geophysics.freq_from_fft,
'xing': geophysics.freq_from_crossings}
Expand All @@ -139,51 +167,59 @@ def freq():
t_min,
t_max,
traces,
m[method.lower()])
m[method])

print("Finished analysis")

# Compute statistics.
print("***** f_list:", f_list)

fsd, psd = np.nanstd(f_list), np.nanstd(p_list)
fn, pn = len(f_list), len(p_list)

if avg.lower() == 'trim' and fn > 4:
f = geophysics.trim_mean(f_list, 0.2)
if np.isnan(f):
f = 0
elif avg.lower() == 'mean' or (avg == 'trim' and fn <= 4):
f = np.nanmean(f_list)
else:
m = 'avg parameter must be trim or mean'
raise InvalidUsage(m, status_code=410)
mess = 'avg parameter must be trim or mean'
raise InvalidUsage(mess, status_code=410)

if avg.lower() == 'trim' and pn > 4:
p = geophysics.trim_mean(p_list, 0.2)
elif avg.lower() == 'mean' or (avg == 'trim' and pn <= 4):
p = np.nanmean(p_list)
else:
m = 'avg parameter must be trim or mean'
raise InvalidUsage(m, status_code=410)
mess = 'avg parameter must be trim or mean'
raise InvalidUsage(mess, status_code=410)

snrsd = np.nanstd(snr_list)
snr = np.nanmean(snr_list)

# Spectrum.
print("Starting spectrum")

try:
spec = np.mean(np.dstack(specs), axis=-1)
assert len(f_list) > 0
spec = np.nanmean(np.dstack(specs), axis=-1)
fs = i.shape[0] / (t_max - t_min)
freq = np.fft.rfftfreq(i.shape[0], 1/fs)
f_min = np.amin(mis)
f_max = np.amax(mas)
except:
print("Failed spectrum")

# Probably the image is not greyscale.
result = {'job_uuid': uuid.uuid1()}
result['status'] = 'failed'
m = 'Analysis error. Probably the colorbar is not greyscale.'
result['message'] = m
result['parameters'] = utils.build_params(method.lower(), avg.lower(),
t_min, t_max,
region,
trace_spacing,
url=url)

return jsonify(result)
payload = {'job_uuid': uuid1}
payload['parameters'] = utils.build_params(method, avg,
t_min, t_max, dt_param,
region,
trace_spacing,
url=url)
mess = 'Analysis error. Probably the colorbar is not greyscale.'
raise InvalidUsage(mess, status_code=410, payload=payload)

# Histogram.
if bins:
Expand Down Expand Up @@ -233,7 +269,9 @@ def freq():

return jsonify(result)


#
# Bruges logo and text generators
#
@application.route('/bruges')
@application.route('/bruges.png')
def bruges_png():
Expand Down Expand Up @@ -301,6 +339,7 @@ def main():
return render_template('index.html',
title='Home')


if __name__ == "__main__":
application.debug = True
application.run()
49 changes: 40 additions & 9 deletions geophysics.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def hilbert(s, phi=0):
real and complex parts separately.
"""
n = s.size
m = np.ceil((n + 1) / 2)
m = int(np.ceil((n + 1) / 2))

r0 = np.exp(1j * np.radians(phi))

Expand Down Expand Up @@ -58,31 +58,40 @@ def trim_mean(i, proportion):
"""
a = np.sort(np.array(i))
k = int(np.floor(a.size * proportion))
return np.mean(a[k:-k])
return np.nanmean(a[k:-k])


def parabolic(f, x):
"""
Interpolation.
"""
x = int(x)
f = np.concatenate([f, [f[-1]]])
xv = 1/2. * (f[x-1] - f[x+1]) / (f[x-1] - 2 * f[x] + f[x+1]) + x
yv = f[x] - 1/4. * (f[x-1] - f[x+1]) * (xv - x)
return (xv, yv)


def freq_from_crossings(sig, fs):
indices = ((sig[1:] >= 0) & (sig[:-1] < 0)).nonzero()
"""
Dominant frequency from zero-crossings.
"""
indices, = np.where((sig[1:] >= 0) & (sig[:-1] < 0))
crossings = [i - sig[i] / (sig[i+1] - sig[i]) for i in indices]
print("************* xings", crossings)
return fs / np.mean(np.diff(crossings))


def freq_from_autocorr(sig, fs):
"""
Dominant frequency from autocorrelation.
"""
sig = sig + 128
corr = np.convolve(sig, sig[::-1], mode='full')
corr = corr[len(corr)/2:]
corr = corr[int(len(corr)/2):]
d = np.diff(corr)
start = (d > 0).nonzero()[0][0] # nonzero() returns a tuple
peak = np.argmax(corr[start:]) + start
peak = np.argmax(corr[int(start):]) + start
px, py = parabolic(corr, peak)
return fs / px

Expand All @@ -96,7 +105,10 @@ def get_spectrum(signal, fs):
sig = db - np.amax(db) + 20
indices = ((sig[1:] >= 0) & (sig[:-1] < 0)).nonzero()
crossings = [z - sig[z] / (sig[z+1] - sig[z]) for z in indices]
mi, ma = np.amin(crossings), np.amax(crossings)
try:
mi, ma = np.amin(crossings), np.amax(crossings)
except:
mi, ma = 0, 0
x = np.arange(0, len(f)) # for back-interpolation
f_min = np.interp(mi, x, f)
f_max = np.interp(ma, x, f)
Expand All @@ -105,6 +117,9 @@ def get_spectrum(signal, fs):


def freq_from_fft(signal, fs):
"""
Dominant frequency from FFT.
"""
f, a, f_min, f_max = get_spectrum(signal, fs)
i = np.argmax(a)
true_i = parabolic(np.log(a), i)[0]
Expand Down Expand Up @@ -140,7 +155,7 @@ def get_phase(i):

# Get the interpolated phase values
results = []
for ix in biggest_pruned:
for ix in map(int, biggest_pruned):
true_i = parabolic(np.log(abs(e)+0.01), ix)[0]
x = np.arange(0, len(e))
rad = np.interp(true_i, x, np.angle(e))
Expand All @@ -164,20 +179,36 @@ def analyse(i, t_min, t_max, trace_indices, func):

spec, freq, phase, snr = [], [], [], []
mis, mas = [], []

print("****** i has shape", i.shape)
print("****** traceindices", trace_indices)

for ti in trace_indices:
trace = i[:, ti]
try:
f = func(trace, fs)
print("**************** f", f)
freq.append(f)
except Exception as e:
print("**!! func ** ", e)

try:
p = get_phase(trace)
phase.append(p)
except Exception as e:
print("**!! phase ** ", e)

try:
snr.append(get_snr(trace))
except Exception as e:
print("**!! snr ** ", e)

try:
frq, amp, fmi, fma = get_spectrum(trace, fs)
spec.append(amp)
mis.append(fmi)
mas.append(fma)
except Exception:
continue
except Exception as e:
print("**!! spec ** ", e)

return spec, freq, phase, snr, mis, mas
4 changes: 2 additions & 2 deletions templates/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,8 @@ <h2>Parameters</h2>
<li><code>tmax</code> — the time of the bottom pixel, in s (default <code>1.0</code>)</li>
<li><code>ntraces</code> — number of traces to extract to get statistics from (default 10)</li>
<li><code>trace_spacing</code> — can be <code>regular</code> or <code>random</code></li>
<li><code>bins</code> — number of bins for the histogram (default <code>9</code>); use <code>0</code> for no histogram.</li>
<li><code>region</code> — the region to analyse in pixels, like <code>100,100,900,900</code> (default is all of it)</li>
<li><code>bins</code> — number of bins for the histogram (default <code>11</code>); use <code>0</code> for no histogram.</li>
<li><code>region</code> — the region to analyse in pixels, like <code>100,100,900,900</code> (default is all of it). Coordinates are left, top, right, bottom (or, equivalently, (x, y) for the top-left corner, then (x, y) for the bottom-right corner. All measured in pixels from the origin at top-left.</li>
</ul>
<h2>Example</h2>
<p>This GET request is the same as the one shown in the link above.</p>
Expand Down
Loading

0 comments on commit e7b25ae

Please sign in to comment.