Skip to content

Commit

Permalink
bug fix
Browse files Browse the repository at this point in the history
  • Loading branch information
gaushh committed Dec 19, 2022
1 parent cc7b4d8 commit 76426a2
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 3 deletions.
6 changes: 3 additions & 3 deletions notebooks/try.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@
},
{
"cell_type": "code",
"execution_count": 25,
"execution_count": 3,
"outputs": [],
"source": [
"import torch\n",
Expand Down Expand Up @@ -272,7 +272,7 @@
},
{
"cell_type": "code",
"execution_count": 28,
"execution_count": 1,
"outputs": [],
"source": [
"from transformers import BertForMaskedLM\n",
Expand All @@ -286,7 +286,7 @@
},
{
"cell_type": "code",
"execution_count": 44,
"execution_count": 4,
"outputs": [],
"source": [
"class BertSelfAttention(nn.Module):\n",
Expand Down
1 change: 1 addition & 0 deletions src/modelling/architecture.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import math
import torch
import torch.nn as nn
from typing import List, Optional, Tuple, Union

class BertSelfAttention(nn.Module):
def __init__(self, config, position_embedding_type=None):
Expand Down
1 change: 1 addition & 0 deletions src/modelling/train_bert.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from architecture import BertSelfAttention
import wandb


with open("../config/exp02_config.yaml", "r") as yamlfile:
config = yaml.load(yamlfile, Loader=yaml.FullLoader)

Expand Down

0 comments on commit 76426a2

Please sign in to comment.