Skip to content
Snippets Groups Projects
Forked from agnumpde / dune-tectonic
139 commits ahead of the upstream repository.
Code owners
Assign users and groups as approvers for specific file changes. Learn more.
2d_event_writer.py~ 3.65 KiB
import ConfigParser as cp
import os
import numpy as np
import csv
import h5py

from support.maximum import maximum
from support.norm import norm
from support.find_quakes import find_quakes
from support.slip_beginnings import slip_beginnings
from support.slip_endings import slip_endings
from support.max_distance import max_distance


NBODIES = 2
FINAL_TIME = 1000  # s
FINAL_VELOCITY = 1e-5  # m/s
THRESHOLD_VELOCITY = 0.5*FINAL_VELOCITY  # 1000e-6 + FINAL_VELOCITY

TANGENTIAL_COORDS = 1

# read config ini
config = cp.ConfigParser()
config_path = os.path.join('config.ini')
config.read(config_path)
sim_path = config.get('directories', 'simulation')
exp_path = config.get('directories', 'experiment')
out_path = config.get('directories', 'output')

# create output directory
out_dir = os.path.join(out_path)
if not os.path.exists(out_dir):
    os.mkdir(out_dir)

h5path = os.path.join(sim_path)
h5file = h5py.File(os.path.join(h5path, 'output.h5'), 'r')

# read time
relative_time = np.array(h5file['relativeTime'])
real_time = relative_time * FINAL_TIME

for body_ID in range(NBODIES):
    body = 'body' + str(body_ID)

    if body not in h5file:
        continue

    # read data
    coordinates = np.array(h5file[body + '/coordinates'])
    displacement = np.array(h5file[body + '/displacement'])
    velocity = np.array(h5file[body + '/velocity'])

    num_vertices = displacement.shape[1]
    velocity_norm = norm(velocity[:, :, TANGENTIAL_COORDS:])
    maximum_velocity = maximum(velocity_norm)

    [quake_starts, quake_ends] = find_quakes(THRESHOLD_VELOCITY,
                                             maximum_velocity)

    quakes = {}
    for quake_ID in range(len(quake_starts)):
        quake_start = int(quake_starts[quake_ID])
        quake_end = int(quake_ends[quake_ID])

        quake = {}
        quake['start'] = real_time[quake_start]
        quake['end'] = real_time[quake_end]

        local_slipping_times = velocity_norm[quake_start:quake_end, :] \
            > THRESHOLD_VELOCITY
        quake_displacement = displacement[quake_start:quake_end, :, :]
        slip = np.zeros(num_vertices)

        for i in range(num_vertices):
            if any(local_slipping_times[:, i]):
                starts = slip_beginnings(local_slipping_times[:, i])
                ends = slip_endings(local_slipping_times[:, i])
                slip[i] = np.linalg.norm(quake_displacement[ends, i, :]
                                         - quake_displacement[starts, i, :])

        quake['peakSlip'] = max(slip)
        quake['peakSlipRate'] = max(maximum_velocity[quake_start:quake_end])

        max_rupture_width = 0
        for time_step in range(local_slipping_times.shape[0]):
            slipping_time = local_slipping_times[time_step, :]
            if not any(slipping_time):
                continue

            slipping_coords = coordinates[slipping_time] \
                + displacement[quake_start + time_step, slipping_time]
            if len(slipping_coords) > 1:
                pair = np.array(max_distance(slipping_coords))
                max_rupture_width = max(max_rupture_width,
                                        np.linalg.norm(pair[0] - pair[1]))

        quake['ruptureWidth'] = max_rupture_width
        quakes[quake_ID] = quake

    # output quake data to csv file
    csv_columns = quakes[0].keys()
    csv_file_path = os.path.join(out_path, 'events' + str(body_ID) + '.csv')
    try:
        with open(csv_file_path, 'w') as csv_file:
            writer = csv.DictWriter(csv_file, fieldnames=csv_columns)
            writer.writeheader()
            for quake_ID in quakes:
                writer.writerow(quakes[quake_ID])
    except IOError:
        print('I/O error')

h5file.close()