Skip to content

Commit

Permalink
Add automated pep8 format check (#91)
Browse files Browse the repository at this point in the history
* Add workflow for python script format check

* Add pep8 format style

* Update .pep8

* Add flag to ignore local config

* Format files with autopep8

* Format

* Add workflow for python script format check

* Add pep8 format style

* Update .pep8

* Add flag to ignore local config

* Format files with autopep8

* Format

* Format fixes

* Space alignment
  • Loading branch information
kursatyurt authored Mar 30, 2022
1 parent bc005c4 commit c396315
Show file tree
Hide file tree
Showing 12 changed files with 269 additions and 141 deletions.
15 changes: 15 additions & 0 deletions .github/workflows/pep8.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
name: autopep8
on: [push, pull_request]
jobs:
autopep8_check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: autopep8
id: autopep8
uses: peter-evans/autopep8@v1
with:
args: --ignore-local-config --recursive --diff --aggressive --aggressive --exit-code --ignore E402 --max-line-length 120 .
- name: Fail if autopep8 made changes
if: ${{ steps.autopep8.outputs.exit-code == 2 }}
run: exit 1
6 changes: 6 additions & 0 deletions .pep8
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
[pycodestyle]
max_line_length = 120
ignore = E402
in-place = true
aggressive = 2
recursive = true
24 changes: 14 additions & 10 deletions contrib/mapping-tester/gatherstats.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,12 @@
import argparse
import glob


def parseArguments(args):
parser = argparse.ArgumentParser(description="Gathers stats after a run")
parser.add_argument('-o', '--outdir', default="cases", help='Directory to generate the test suite in.')
parser.add_argument('-f', '--file', type=argparse.FileType('w'), default="stats.csv", help='The resulting CSV file containing all stats.')
parser.add_argument('-f', '--file', type=argparse.FileType('w'), default="stats.csv",
help='The resulting CSV file containing all stats.')
return parser.parse_args(args)


Expand All @@ -19,16 +21,18 @@ def statsFromTimings(dir):
file = os.path.join(dir, "precice-B-events.json")
if os.path.isfile(file):
try:
timings={}
with open(file,"r") as jsonfile:
timings = {}
with open(file, "r") as jsonfile:
timings = json.load(jsonfile)["Ranks"][0]["Timings"]
stats["globalTime"] = timings["_GLOBAL"]["Max"]
stats["initializeTime"] = timings["initialize"]["Max"]
computeMappingName = [ x for x in timings.keys() if x.startswith("advance/map") and x.endswith("computeMapping.FromMeshAToMeshB")][0]
mapDataName = [ x for x in timings.keys() if x.startswith("advance/map") and x.endswith("mapData.FromMeshAToMeshB")][0]
computeMappingName = [x for x in timings.keys() if x.startswith(
"advance/map") and x.endswith("computeMapping.FromMeshAToMeshB")][0]
mapDataName = [x for x in timings.keys() if x.startswith(
"advance/map") and x.endswith("mapData.FromMeshAToMeshB")][0]
stats["computeMappingTime"] = timings[computeMappingName]["Max"]
stats["mapDataTime"] = timings[mapDataName]["Max"]
except:
except BaseException:
pass
return stats

Expand All @@ -43,7 +47,7 @@ def memoryStats(dir):
try:
with open(memfile, "r") as file:
total = sum([float(e) / 1024.0 for e in file.readlines()])
except:
except BaseException:
pass
stats[f"peakMem{P}"] = total

Expand All @@ -61,15 +65,15 @@ def main(argv):
allstats = []
fields = []
for file in statFiles:
print("Found: "+file)
casedir= os.path.join(args.outdir, os.path.dirname(file))
print("Found: " + file)
casedir = os.path.join(args.outdir, os.path.dirname(file))
parts = os.path.normpath(file).split(os.sep)
assert(len(parts) >= 5)
mapping, constraint, meshes, ranks, _ = parts[-5:]
meshA, meshB = meshes.split('-')
ranksA, ranksB = meshes.split('-')

with open(os.path.join(args.outdir, file),"r") as jsonfile:
with open(os.path.join(args.outdir, file), "r") as jsonfile:
stats = json.load(jsonfile)
stats["mapping"] = mapping
stats["constraint"] = constraint
Expand Down
97 changes: 57 additions & 40 deletions contrib/mapping-tester/generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,12 @@
import argparse
from jinja2 import Template


def generateConfig(template, setup):
template = Template(template)
return template.render(setup)


def as_iter(something):
try:
iter(something)
Expand Down Expand Up @@ -41,14 +43,14 @@ def generateCases(setup):
"constraint": constraint,
"options": mapping.get("options", "")
},
"A" : {
"A": {
"ranks": ranksA,
"mesh": {
"name": inname,
"file": infile,
}
},
"B" : {
"B": {
"ranks": ranksB,
"mesh": {
"name": outname,
Expand All @@ -68,11 +70,11 @@ def getCaseFolders(case):
"{}-{}".format(
case["A"]["mesh"]["name"],
case["B"]["mesh"]["name"]
),
"{}-{}".format(
),
"{}-{}".format(
case["A"]["ranks"],
case["B"]["ranks"]
)]
)]


def caseToSortable(case):
Expand All @@ -91,40 +93,39 @@ def caseToSortable(case):

def createMasterRunScripts(casemap, dir):
common = ["#!/bin/bash",
"",
'cd "$( dirname "${BASH_SOURCE[0]}" )"',
"RUNNER=/bin/bash",
""]
"",
'cd "$( dirname "${BASH_SOURCE[0]}" )"',
"RUNNER=/bin/bash",
""]

# Generate master runner script
content = common + [
"${RUNNER} " + os.path.join(case, "runall.sh")
for case in casemap.keys()
]
open(os.path.join(dir, "runall.sh"),"w").writelines([ line + "\n" for line in content ])
"${RUNNER} " + os.path.join(case, "runall.sh")
for case in casemap.keys()
]
open(os.path.join(dir, "runall.sh"), "w").writelines([line + "\n" for line in content])

# Generate master postprocessing script
post = common + [
"${RUNNER} " + os.path.join(case, "postprocessall.sh")
for case in casemap.keys()
]
open(os.path.join(dir, "postprocessall.sh"),"w").writelines([ line + "\n" for line in post ])
"${RUNNER} " + os.path.join(case, "postprocessall.sh")
for case in casemap.keys()
]
open(os.path.join(dir, "postprocessall.sh"), "w").writelines([line + "\n" for line in post])

for case, instances in casemap.items():
# Generate master runner script
content = common + [
"${RUNNER} " + os.path.join(*instance, "run-wrapper.sh")
for instance in instances
]
open(os.path.join(dir, case, "runall.sh"),"w").writelines([ line + "\n" for line in content ])
"${RUNNER} " + os.path.join(*instance, "run-wrapper.sh")
for instance in instances
]
open(os.path.join(dir, case, "runall.sh"), "w").writelines([line + "\n" for line in content])

# Generate master postprocessing script
post = common + [
"${RUNNER} " + os.path.join(*instance, "post.sh")
for instance in instances
]
open(os.path.join(dir, case, "postprocessall.sh"),"w").writelines([ line + "\n" for line in post ])

"${RUNNER} " + os.path.join(*instance, "post.sh")
for instance in instances
]
open(os.path.join(dir, case, "postprocessall.sh"), "w").writelines([line + "\n" for line in post])


def createRunScript(outdir, path, case):
Expand All @@ -133,15 +134,19 @@ def createRunScript(outdir, path, case):
ameshLocation = os.path.relpath(os.path.join(outdir, "meshes", amesh, str(aranks), amesh), path)

# Generate runner script
acmd = "/usr/bin/time -f %M -a -o memory-A.log preciceMap -v -p A --data \"{}\" --mesh {} || kill 0 &".format(case["function"], ameshLocation)
if aranks > 1: acmd = "mpirun -n {} $ASTE_A_MPIARGS {}".format(aranks, acmd)
acmd = "/usr/bin/time -f %M -a -o memory-A.log preciceMap -v -p A --data \"{}\" --mesh {} || kill 0 &".format(
case["function"], ameshLocation)
if aranks > 1:
acmd = "mpirun -n {} $ASTE_A_MPIARGS {}".format(aranks, acmd)

bmesh = case["B"]["mesh"]["name"]
branks = case["B"]["ranks"]
bmeshLocation = os.path.relpath(os.path.join(outdir, "meshes", bmesh, str(branks), bmesh), path)
mapped_data_name = case["function"] + "(mapped)"
bcmd = "/usr/bin/time -f %M -a -o memory-B.log preciceMap -v -p B --data \"{}\" --mesh {} --output mapped || kill 0 &".format(mapped_data_name, bmeshLocation)
if branks > 1: bcmd = "mpirun -n {} $ASTE_B_MPIARGS {}".format(branks, bcmd)
bcmd = "/usr/bin/time -f %M -a -o memory-B.log preciceMap -v -p B --data \"{}\" --mesh {} --output mapped || kill 0 &".format(
mapped_data_name, bmeshLocation)
if branks > 1:
bcmd = "mpirun -n {} $ASTE_B_MPIARGS {}".format(branks, bcmd)

content = [
"#!/bin/bash",
Expand Down Expand Up @@ -171,15 +176,15 @@ def createRunScript(outdir, path, case):
"fi",
"rm -f running",
]
open(os.path.join(path, "run.sh"),"w").writelines([ line + "\n" for line in content ])
open(os.path.join(path, "run.sh"), "w").writelines([line + "\n" for line in content])

# Generate wrapper script for runner
wrapper = [
"#!/bin/bash",
'cd "$( dirname "${BASH_SOURCE[0]}" )"',
"/bin/bash run.sh 2>&1 | tee run.log"
]
open(os.path.join(path, "run-wrapper.sh"),"w").writelines([ line + "\n" for line in wrapper ])
open(os.path.join(path, "run-wrapper.sh"), "w").writelines([line + "\n" for line in wrapper])

# Generate post processing script
post_content = [
Expand All @@ -194,15 +199,17 @@ def createRunScript(outdir, path, case):
]
if (branks == 1):
copycmd = "cp {}.conn.txt mapped.conn.txt".format(bmeshLocation)
diffcmd = "vtk_calculator.py --mesh mapped.txt -o error.vtk --diff --stats \"{}\" | tee diff.log".format(case["function"])
diffcmd = "vtk_calculator.py --mesh mapped.txt -o error.vtk --diff --stats \"{}\" | tee diff.log".format(
case["function"])
post_content += [copycmd, diffcmd]
else:
[recoveryFileLocation, tmpPrefix] = os.path.split(os.path.normpath(bmeshLocation))
tmprecoveryFile = recoveryFileLocation + "/{}_recovery.json".format(bmesh)
joincmd = "join_mesh.py --mesh mapped -r {} -o result.vtk".format(tmprecoveryFile)
diffcmd = "vtk_calculator.py --data error --diffdata \"{1}\" --diff --stats --mesh result.vtk --function \"{0}\" | tee diff.log".format(case["function"], mapped_data_name)
post_content += [joincmd,diffcmd]
open(os.path.join(path, "post.sh"),"w").writelines([ line + "\n" for line in post_content ])
diffcmd = "vtk_calculator.py --data error --diffdata \"{1}\" --diff --stats --mesh result.vtk --function \"{0}\" | tee diff.log".format(
case["function"], mapped_data_name)
post_content += [joincmd, diffcmd]
open(os.path.join(path, "post.sh"), "w").writelines([line + "\n" for line in post_content])


def setupCases(outdir, template, cases):
Expand All @@ -211,8 +218,8 @@ def setupCases(outdir, template, cases):
folders = getCaseFolders(case)
casemap.setdefault(folders[0], []).append(folders[1:])
name = [outdir] + folders
path=os.path.join(*name)
config=os.path.join(path, "precice-config.xml")
path = os.path.join(*name)
config = os.path.join(path, "precice-config.xml")

print(f"Generating {path}")
os.makedirs(path, exist_ok=True)
Expand All @@ -228,8 +235,18 @@ def setupCases(outdir, template, cases):
def parseArguments(args):
parser = argparse.ArgumentParser(description="Generator for a mapping test suite")
parser.add_argument('-o', '--outdir', default="cases", help='Directory to generate the test suite in.')
parser.add_argument('-s', '--setup', type=argparse.FileType('r'), default="setup.json", help='The test setup file to use.')
parser.add_argument('-t', '--template', type=argparse.FileType('r'), default="config-template.xml", help='The precice config template to use.')
parser.add_argument(
'-s',
'--setup',
type=argparse.FileType('r'),
default="setup.json",
help='The test setup file to use.')
parser.add_argument(
'-t',
'--template',
type=argparse.FileType('r'),
default="config-template.xml",
help='The precice config template to use.')
return parser.parse_args(args)


Expand Down
32 changes: 23 additions & 9 deletions contrib/mapping-tester/plotconv.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,18 +6,32 @@
import matplotlib.pyplot as plt
import math


def parseArguments(args):
parser = argparse.ArgumentParser(description="Creates convergence plots from gathered stats")
parser.add_argument('-f', '--file', type=argparse.FileType('r'), default="stats.csv", help='The CSV file containing the gathered stats.')
parser.add_argument('-f', '--file', type=argparse.FileType('r'), default="stats.csv",
help='The CSV file containing the gathered stats.')
return parser.parse_args(args)


def lavg(l):
return math.exp(sum(map(math.log, l)) / len(l))


# seaborn.color_palette("colorblind", 10).as_hex()
style_colours = ['#0173b2', '#de8f05', '#029e73', '#d55e00', '#cc78bc', '#ca9161', '#fbafe4', '#949494', '#ece133', '#56b4e9']
style_colours = [
'#0173b2',
'#de8f05',
'#029e73',
'#d55e00',
'#cc78bc',
'#ca9161',
'#fbafe4',
'#949494',
'#ece133',
'#56b4e9']
style_markers = ["o", "D", "s"]
styles = [ (c, m) for m in style_markers for c in style_colours]
styles = [(c, m) for m in style_markers for c in style_colours]


def plotConv(ax, df, yname):
Expand All @@ -33,7 +47,7 @@ def plotConv(ax, df, yname):
# 1st order line
fox = [xmax, xmin]
foy1 = ymax
foy2 = foy1 * (fox[1]/fox[0])
foy2 = foy1 * (fox[1] / fox[0])
foy = [foy1, foy2]
ax.axline((fox[0], foy[0]), (fox[1], foy[1]), color="lightgray", linewidth=1.0, zorder=-1)
ax.annotate(
Expand All @@ -46,7 +60,7 @@ def plotConv(ax, df, yname):
# # 2nd order line
sox = [xmin, xmax]
soy1 = ymin
soy2 = soy1 * ((sox[1]/sox[0])**2)
soy2 = soy1 * ((sox[1] / sox[0])**2)
soy = [soy1, soy2]
print(sox, soy)
ax.axline((sox[0], soy[0]), (sox[1], soy[1]), color="lightgray", linewidth=1.0, zorder=-1)
Expand All @@ -59,7 +73,7 @@ def plotConv(ax, df, yname):


def plotError(df):
yname="relative-l2"
yname = "relative-l2"
fig, ax = plt.subplots(sharex=True, sharey=True)
series = df.groupby("mapping")
for grouped, style in zip(series, styles):
Expand Down Expand Up @@ -88,7 +102,7 @@ def plotError(df):


def plotMemory(df):
yname="peakMemB"
yname = "peakMemB"
fig, ax = plt.subplots(sharex=True, sharey=True)
series = df.groupby("mapping")
for grouped, style in zip(series, styles):
Expand Down Expand Up @@ -117,7 +131,7 @@ def plotMemory(df):


def plotComputeMappingTime(df):
yname="computeMappingTime"
yname = "computeMappingTime"
fig, ax = plt.subplots(sharex=True, sharey=True)
series = df.groupby("mapping")
for grouped, style in zip(series, styles):
Expand Down Expand Up @@ -147,7 +161,7 @@ def plotComputeMappingTime(df):


def plotMapDataTime(df):
yname="mapDataTime"
yname = "mapDataTime"
fig, ax = plt.subplots(sharex=True, sharey=True)
series = df.groupby("mapping")
for grouped, style in zip(series, styles):
Expand Down
Loading

0 comments on commit c396315

Please sign in to comment.