#! /usr/bin/env python

# --------------------------------------------------------------------------
#
# example of use MouseTrap module for a massive detection of disturbances 
# in a large dataset using SeisComP database
#
# author: Jiri Vackar
#
# described in paper Vackar, Burjanek, and Zahradnik (2014)
#
# you need for running this example:
#   - access to SeisComP database
#   - access to ArcLink server (SED ArcLink server has free access, only set your e-mail address)
#   - your own Postgre SQL database for saving results
#   - specified structure prepared in your DB (use SwissMouse.sql)
# --------------------------------------------------------------------------


import pg # PyGreSQL
import numpy as np
from obspy.core import UTCDateTime
from obspy.arclink import Client #ArcLink
from obspy.core.util.geodetics import gps2DistAzimuth
from peak_values import peak_values # calculate peak values (pga etc.) at different frequencies
import os
from MouseTrap import *

# constants
mag = 1 # minimal magnitude to consider event
time_value = 1 # select events in last `time_value` `time_unit`
time_unit = 'month'
t_before = 120 # records start `t_before` seconds before the event
t_after = 300 # records end `t_after` seconds after the event
outdir = 'Swiss' # directory where the fit plots and sac files are saved to

if not os.path.exists(outdir):
    os.makedirs(outdir)

log = open('%s/log.txt' % outdir, 'w')
SQLlog = pg.connect('my_user_name', 'localhost', 5432, None, None, 'db_user_name', 'db_password') # SET USERNAME AND PASSWORD

con1 = pg.connect('my_user_name', 'server.tld', 5434, None, None, 'db_user_name', 'db_password') # SET USERNAME AND PASSWORD

query = """
SELECT
	event.m_publicid AS eventid,
	origin.m_publicid AS originid,
	magnitude.m_magnitude_value AS mag,
	magnitude.m_type AS magtype,
	origin.m_time_value AS time, 
	origin.m_longitude_value AS long,
	origin.m_latitude_value AS lat,
	origin.m_depth_value AS depth,
	origin.m_depth_uncertainty AS depth_uncertainty
FROM origin
 INNER JOIN event ON event.m_preferredoriginid = origin.m_publicid
 INNER JOIN magnitude ON event.m_preferredmagnitudeid = magnitude.m_publicid
WHERE
 origin.m_time_value > now() - '{y:g} {u:s}'::interval AND
 (event.m_type = 'earthquake' or event.m_type = '' or event.m_type is null)
 AND
 m_magnitude_value > {mag:3.1f}
ORDER BY time
""".format(y=time_value, u=time_unit, mag=mag)

log.write('SQL query: SELECT events with M > %3.1f in last %i %s(s)...\n' % (mag, time_value, time_unit))
q1 = con1.query(query)
log.write('Events: ' + str(q1.ntuples()) + '\n\n')
events = q1.dictresult()

client = Client(host="arclink.ethz.ch", user="")  # set your e-mail address: user="name@domain.tld"

for event in events:
	query = """SELECT
	network.m_code AS net,
	station.m_code AS sta,
	sensorlocation.m_code AS loc,
	substring(stream.m_code FROM 1 FOR 2) AS channels,
	station.m_longitude AS long,
	station.m_latitude AS lat,
	ST_DISTANCE(ST_GeographyFromText('SRID=4326;POINT({long:14.8f} {lat:14.8f})'),
		ST_GeographyFromText('SRID=4326;POINT(' || station.m_longitude || ' ' || station.m_latitude ||')') ) / 1000 AS dist_in_km
	FROM network
	INNER JOIN station ON station._parent_oid = network._oid
	INNER JOIN sensorlocation ON sensorlocation._parent_oid = station._oid
	INNER JOIN stream ON stream._parent_oid = sensorlocation._oid
	WHERE
	st_distance(ST_GeographyFromText('SRID=4326;POINT({long:14.8f} {lat:14.8f})'), 
	ST_GeographyFromText('SRID=4326;POINT(' || station.m_longitude || ' ' || station.m_latitude ||')') ) < (100 * 10 ^ ({mag:14.8f} * 1.1))
	AND
	st_distance(ST_GeographyFromText('SRID=4326;POINT({long:14.8f} {lat:14.8f})'), 
	ST_GeographyFromText('SRID=4326;POINT(' || station.m_longitude || ' ' || station.m_latitude ||')') ) < 100e3 AND
	stream.m_start < '{t}'::timestamp + '{t_b:g} second'::interval
	AND
	(stream.m_end > '{t}'::timestamp + '{t_e:g} second'::interval OR stream.m_end IS NULL)
	GROUP BY net, sta, loc, channels, long, lat, dist_in_km, stream.m_end
	ORDER BY dist_in_km, sta, loc
	""".format(long=event['long'], lat=event['lat'], mag=event['mag'], t=event['time'], t_b=t_before, t_e=t_after)

	q2 = con1.query(query)
	if q2.ntuples():
		SQLlog.query("INSERT INTO events VALUES ('{event:s}', '{origin:s}', '{mag:g}', '{magtype:s}', '{t:s}', '{long:g}', '{lat:g}', {dep:g}, {depunc:g})".format(event=event['eventid'], origin=event['originid'], mag=event['mag'], magtype=event['magtype'], t=event['time'], long=event['long'], lat=event['lat'], dep=(0 if event['depth'] is None else event['depth']), depunc=(0 if event['depth_uncertainty'] is None else event['depth_uncertainty'])))
	records = q2.dictresult()
	for rec in records:
		if not rec['channels'] in ['EH', 'HH']: # we are interest in these channels only (BB + short-period)
			continue
		
		DistAz = gps2DistAzimuth(rec['lat'], rec['long'], event['lat'], event['long'])
		log.write('{t:22s}   {magtype:>6s}={mag:3.1f} ({elong:10.5f}, {elat:10.5f}):  {net:4s} {sta:6s} ({slong:10.5f}, {slat:10.5f}) {loc:4s} {ch:4s}* ({dist:4.1f} km)'.format(t=event['time'], magtype=event['magtype'], mag=event['mag'], net=rec['net'], sta=rec['sta'], loc=rec['loc'], ch=rec['channels'], dist=rec['dist_in_km'], elong=event['long'], elat=event['lat'], slong=rec['long'], slat=rec['lat']))
		q0 = SQLlog.query("INSERT INTO records VALUES (DEFAULT, '{eventid:s}', '{net:s}', '{sta:s}', '{loc:s}', '{ch:s}', {long:g}, {lat:g}, {dist:g}, {az:g}) RETURNING id".format(eventid=event['eventid'], net=rec['net'], sta=rec['sta'], loc=rec['loc'], ch=rec['channels'], long=rec['long'], lat=rec['lat'], dist=DistAz[0], az=DistAz[1]))
		id = q0.dictresult()[0]['id']
		
		# download data
		t = UTCDateTime(event['time'])
		try:
			st = client.getWaveform(rec['net'], rec['sta'], rec['loc'], rec['channels']+'*', t-t_before, t + t_after) # st = client.getWaveform('CH', 'VANNI', '', 'BH*', t-120, t + 300)
		except:
			log.write('    Downloading unsuccessful.\n')
			SQLlog.query("UPDATE records SET error=1 WHERE id=(%r)" % id)
			continue
		dirname = st[0].stats.network+'_'+st[0].stats.station+'_'+rec['channels']+'x_'+str(st[0].stats.starttime)[0:19]
		os.mkdir(outdir+'/'+dirname)
		if (st.__len__() != 3):
			log.write('    Gap in data.\n')
			SQLlog.query("UPDATE records SET error=2 WHERE id=(%r)" % id)
			continue

		# record preparation and tests of data quality
		if NoiseTest1_demean(st, t_before): # remove mean, test signal to noise ratio
			log.write('    Record too noisy.\n')
			SQLlog.query("UPDATE records SET error=3 WHERE id=(%r)" % id)
			continue
		for i in range(3): # save waveform files for debuging
			component = st[i].stats.channel[2]
			st[i].write(outdir+'/'+dirname+'/tmp_'+component+'.sac', format='SAC')
		st_corr = st.copy() # keep original record in `st_corr`
		ToDisplacement(st, bitrate=10) # integrate and decimate
		if NoiseTest2(st, t_before): # test signal to noise ratio in integrated record
			log.write('    Record too noisy after integration.\n')
			SQLlog.query("UPDATE records SET error=4 WHERE id=(%r)" % id)
			continue

		# replace channels letters to N E Z
		comps = ['N', 'E', 'Z']
		strtr = {}
		problems = []
		for i in range(3):
			comp = st[i].stats.channel[2]
			if comp in comps:
				strtr[comp] = comp
				comps.remove(comp)
			else:
				problems.append(comp)
		for comp in problems:
			strtr[comp] = comps[0]
			comps = comps[1:]
		for i in range(3):
			comp = st[i].stats.channel[2]
			st[i].stats.channel = st[i].stats.channel[:2] + strtr[comp]

		# download poles and zeros via ArcLink
		try:
			paz = client.getPAZ(rec['net'], rec['sta'], rec['loc'], st[0].stats.channel, t)
		except:
			log.write('    Downloading poles and zeros via ArcLink failed.\n')
			SQLlog.query("UPDATE records SET error=5 WHERE id=(%r)" % id)
			continue
		else:
			SQLlog.query("INSERT INTO record_sensor VALUES (%r, '%s', '%s', '%s')" % (id, paz['sensor_manufacturer'], paz['sensor_model'], paz['sensitivity_unit']))

		# calculate peak values
		st_corr.integrate()
		if st_corr[0].stats.channel[0] == 'H': # broad band
			st_corr.filter("highpass", freq=0.025) # to improve: use the instrument corner frequency instead
		else: # short period
			st_corr.filter("highpass", freq=1)
		st_corr.simulate(paz_remove = paz)
		for tr in st_corr:
			comp = tr.stats.channel[2]
			try:
				peaks = peak_values(tr.slice(t, t+50))
			except:
				try:
					SQLlog.query("INSERT INTO record_warning VALUES (%r, 4)" % id)
				except:
					pass
			else:
				SQLlog.query("INSERT INTO record_peaks VALUES (%r,%r, %r,%r,%r, %r,%r, %r,%r, %r,%r)" % ((id, comp)+peaks))

		# inversion and output
		length = max(len(st[0]), len(st[1]), len(st[2]))
		dt = st[0].stats.delta
		try:
			mys = mouse()
			mys.create(paz, length*2, dt, length*dt, 1)
			mys.fit_3D(st, t_min=105, t_max=210)
			mys.plot(st, outfile='%s/%s/fit.png'%(outdir,dirname), xmax=300, ylabel='raw displacement')
			phi_deg = mys.phi*180./np.pi
			theta_deg = mys.theta*180./np.pi
		except:
			log.write('    Mouse inversion crashed.\n')
			SQLlog.query("UPDATE records SET error=11 WHERE id=(%r)" % id)
			continue
		else:
			log.write('    mouse onset: %6.1f s; fit: %5.2f' % (mys.onset, mys.fit))
			if mys.exist() == 2:	log.write('  MOUSE!\n')
			elif mys.exist() == 1:	log.write('  mouse?\n')
			else:			log.write('\n')
			SQLlog.query("INSERT INTO mouse1fit VALUES (%r, %r, %r, %r, %r, %r)" % (id, mys.onset, mys.amplitude, phi_deg, theta_deg, mys.fit))
			SQLlog.query("UPDATE records SET error=0 WHERE id=(%r)" % id)

			log2 = open('%s/%s/log.txt' % (outdir, dirname), 'w')
			log2.write('=== mouse 1: acceleration discontinuity ===\n')
			log2.write('time of onset:       %6.1f s\n' % mys.onset)
			log2.write('amplitude:       %10.2e m s^-2\n' % mys.amplitude)
			log2.write('azimuth (phi):       %6.1f deg\n' % phi_deg)
			log2.write('inclination (theta): %6.1f deg\n' % theta_deg)
			log2.write('fit:                %7.2f\n' % mys.fit)
			log2.close()

log.close()