Skip to content

Commit

Permalink
Adding more documentation
Browse files Browse the repository at this point in the history
  • Loading branch information
GMW99 committed May 17, 2024
1 parent bbfabea commit 25c5fcd
Showing 1 changed file with 10 additions and 10 deletions.
20 changes: 10 additions & 10 deletions examples/mvp.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -362,7 +362,7 @@
"source": [
"class FlaxKompressor(Kompressor):\n",
" def __init__(\n",
" self, encode_fn, decode_fn, padding, model_fn, predictions_fn, seed=None\n",
" self, encode_fn, decode_fn, padding, model_fn, predictions_fn\n",
" ):\n",
" super().__init__(encode_fn=encode_fn, decode_fn=decode_fn, padding=padding)\n",
" self.model_fn = model_fn\n",
Expand Down Expand Up @@ -541,7 +541,7 @@
" )\n",
"\n",
" for label in level_encoded_maps.keys() & level_highres_maps.keys():\n",
" summaries[(f\"{writer_path}/{label}/run_length\")].extend(\n",
" summaries[f\"{writer_path}/{label}/run_length\"].extend(\n",
" list(\n",
" kom.image.metrics.mean_run_length(\n",
" level_encoded_maps[label]\n",
Expand Down Expand Up @@ -591,7 +591,7 @@
" A Numpy array containing the decoded data along with an additional axis to create Height, Width, Channel.\n",
"\n",
" Examples:\n",
" If we have an Data path array of (\"/tmp/0.mrc\",0) with shape (5,5) this will\n",
" If we have a Data path array of (\"/tmp/0.mrc\",0) with shape (5,5) this will\n",
" return the numpy array with the shape (5,5,1)\n",
" \"\"\"\n",
" mrc_file = mrcfile.mmap(data_paths[0])\n",
Expand All @@ -616,9 +616,9 @@
" A list of paths and frames.\n",
"\n",
" Example:\n",
" If we have an MRC file file shape 1,2,3 at /tmp/0.mrc then::\n",
" If we have an MRCfile shape 1,2,3 at /tmp/0.mrc then::\n",
"\n",
" data = get_data_paths_and_frames(\"/tmp/0.mrc)\n",
" data = get_data_paths_and_frames(\"/tmp/0.mrc\")\n",
"\n",
" data will be:\n",
" [(\"/tmp/0.mrc\",0), (\"/tmp/0.mrc\",1),(\"/tmp/0.mrc\",2)]\n",
Expand Down Expand Up @@ -742,7 +742,7 @@
" def __call__(self, frame: np.ndarray) -> np.ndarray:\n",
" \"\"\"Batch to downsample from a high-resolution image.\n",
" Args:\n",
" batch: High resolution batch to downsample. Shape [height, width, channel]\n",
" frame: High resolution batch to downsample. Shape [height, width, channel]\n",
" Return:\n",
" np.ndarray: Downsampled batch: Shape [height, width, channel]\n",
" \"\"\"\n",
Expand Down Expand Up @@ -1012,7 +1012,7 @@
"source": [
"# Train dataset gets transformed for the creation of maps\n",
"train_dataset = MRCFileDataset(train_data, data_transforms)\n",
"# Test dataset does not need transformation as this is used to evalaute the performance of the model so only needs highres images\n",
"# Test dataset does not need transformation as this is used to evaluate the performance of the model so only needs highres images\n",
"test_data = MRCFileDataset(train_data)"
],
"outputs": [],
Expand Down Expand Up @@ -1116,7 +1116,7 @@
" padding=config[\"padding\"],\n",
" model_fn=ConvSRM(config[\"neighbourhood\"]),\n",
" predictions_fn=RegressionPredictor,\n",
").init(train_dataloader)"
").init(train_dataloader, seed = config[\"seed\"])"
],
"outputs": [],
"execution_count": null
Expand Down Expand Up @@ -1209,7 +1209,7 @@
" f\"baseline/{mode}/run_length\",\n",
" )\n",
" for highres in tqdm(dataset):\n",
" summaries[(f\"{writer_path}\")].extend(\n",
" summaries[f\"{writer_path}\"].extend(\n",
" list(kom.image.metrics.mean_run_length(highres).flatten())\n",
" )\n",
" return jnp.array(summaries[writer_path]).flatten().mean()"
Expand Down Expand Up @@ -1259,7 +1259,7 @@
" )\n",
"\n",
" for label in level_encoded_maps.keys() & level_highres_maps.keys():\n",
" summaries[(f\"{writer_path}/{label}/run_length\")].extend(\n",
" summaries[f\"{writer_path}/{label}/run_length\"].extend(\n",
" list(\n",
" kom.image.metrics.mean_run_length(\n",
" level_encoded_maps[label]\n",
Expand Down

0 comments on commit 25c5fcd

Please sign in to comment.