Skip to content

Commit

Permalink
granie bug fixed
Browse files Browse the repository at this point in the history
  • Loading branch information
janursa committed Aug 22, 2024
1 parent f98576c commit debe9ff
Show file tree
Hide file tree
Showing 4 changed files with 159 additions and 2 deletions.
2 changes: 1 addition & 1 deletion src/methods/multi_omics/granie_ns/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ workflow run_wf {
| granie.run(
fromState: [
multiomics_rna_r: "multiomics_rna_r",
multiomics_ata_r: "multiomics_ata_r",
multiomics_atac_r: "multiomics_atac_r",
num_workers: "num_workers"
],
toState: [prediction:"prediction"]
Expand Down
2 changes: 1 addition & 1 deletion src/robustness_analysis/add_noise_grn.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
layer = 'scgen_pearson'
grn_folder = 'resources/grn_models'
grn_folder_noised = 'resources/supplementary/grn_models_noised'
noise_ratio = 0.2
noise_ratio = 0.5
# permute_ratio = 0.2

# Ensure the output folder exists
Expand Down
28 changes: 28 additions & 0 deletions src/workflows/robustness_analysis/config.vsh.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@

functionality:
name: run_robustness_analysis
namespace: "workflows"
info:
label: run_robustness_analysis
summary: "Evaluates GRNs and provides scores using regression analysis."


resources:
- type: nextflow_script
path: main.nf
entrypoint: run_wf
- type: file
path: ../../api/task_info.yaml
dependencies:
- name: common/extract_metadata
repository: openproblemsv2
- name: metrics/regression_1
repositories:
- name: openproblemsv2
type: github
repo: openproblems-bio/openproblems-v2
tag: main_build
platforms:
- type: nextflow
directives:
label: [ midtime, midmem, lowcpu ]
129 changes: 129 additions & 0 deletions src/workflows/robustness_analysis/main.nf
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@

workflow auto {
findStatesTemp(params, meta.config)
| meta.workflow.run(
auto: [publish: "state"]
)
}

workflow run_wf {
take:
input_ch

main:

// construct list of metrics
metrics = [
regression_1
]

/***************************
* RUN METRICS *
***************************/
score_ch = input_ch
| map{ id, state ->
[id, state + ["_meta": [join_id: id]]]
}

| positive_control.run(
runIf: { id, state ->
state.method_id == 'positive_control'
},
fromState: [
perturbation_data: "perturbation_data",
layer: "layer",
tf_all: "tf_all"
],
toState: {id, output, state ->
state + [
prediction: output.prediction
]
}
)
| negative_control.run(
runIf: { id, state ->
state.method_id == 'negative_control'
},
fromState: [
perturbation_data: "perturbation_data"
],
toState: {id, output, state ->
state + [
prediction: output.prediction
]
}
)

// run all metrics
| runEach(
components: metrics,
id: { id, state, comp ->
id + "." + comp.config.functionality.name
},
// use 'fromState' to fetch the arguments the component requires from the overall state
fromState: [
perturbation_data: "perturbation_data",
layer: "layer",
prediction: "prediction",
subsample: "subsample",
reg_type: "reg_type",
method_id: "method_id",
max_workers: "max_workers",
consensus: "consensus"
],
// use 'toState' to publish that component's outputs to the overall state
toState: { id, output, state, comp ->
state + [
metric_id: comp.config.functionality.name,
metric_output: output.score
]
}
)

output_ch = score_ch

// extract the scores
| extract_metadata.run(
key: "extract_scores",
fromState: [input: "metric_output"],
toState: { id, output, state ->
state + [
score_uns: readYaml(output.output).uns
]
}
)

| joinStates { ids, states ->
assert states[0]._meta, "no _meta found in state[0]"
// store the metric configs in a file
def metric_configs = metrics.collect{it.config}
def metric_configs_yaml_blob = toYamlBlob(metric_configs)
def metric_configs_file = tempFile("metric_configs.yaml")
metric_configs_file.write(metric_configs_yaml_blob)

def task_info_file = meta.resources_dir.resolve("task_info.yaml")

// store the scores in a file
def score_uns = states.collect{it.score_uns}
def score_uns_yaml_blob = toYamlBlob(score_uns)
def score_uns_file = tempFile("score_uns.yaml")
score_uns_file.write(score_uns_yaml_blob)

def new_state = [
metric_configs: metric_configs_file,
scores: score_uns_file,
_meta: states[0]._meta
]

["output", new_state]
}

// merge all of the output data
| joinStates{ ids, states ->
def mergedStates = states.inject([:]) { acc, m -> acc + m }
[ids[0], mergedStates]
}

emit:
output_ch
}

0 comments on commit debe9ff

Please sign in to comment.