From fbe261bbd5e111aa68ab1bb27573426745ae8323 Mon Sep 17 00:00:00 2001 From: kengz Date: Tue, 22 Dec 2020 23:09:10 -0800 Subject: [PATCH 1/4] support scalar output tst --- torcharc/module/transformer/pytorch_tst.py | 12 +++++++++--- torcharc/module/transformer/tst.py | 14 ++++++++++---- 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/torcharc/module/transformer/pytorch_tst.py b/torcharc/module/transformer/pytorch_tst.py index 86afe9d..76a5a06 100644 --- a/torcharc/module/transformer/pytorch_tst.py +++ b/torcharc/module/transformer/pytorch_tst.py @@ -47,6 +47,7 @@ def __init__( q: int = 8, # Dimension of queries and keys. v: int = 8, # Dimension of values. chunk_mode: bool = 'chunk', + scalar_output: bool = False, ) -> None: super().__init__() @@ -66,6 +67,7 @@ def __init__( self.decoders = nn.TransformerDecoder(decoder_layer, num_layers=num_decoder_layers) self.out_linear = nn.Linear(d_model, out_channels) + self.scalar_output = scalar_output def forward(self, x: torch.Tensor) -> torch.Tensor: ''' @@ -78,9 +80,13 @@ def forward(self, x: torch.Tensor) -> torch.Tensor: encoding = self.encoders(encoding.transpose(0, 1)) decoding = encoding - if self.pe is not None: # position encoding - decoding = self.pe(decoding.transpose(0, 1)).transpose(0, 1) - decoding = self.decoders(decoding, encoding).transpose(0, 1) + if len(self.decoders.layers): + if self.pe is not None: # position encoding + decoding = self.pe(decoding.transpose(0, 1)).transpose(0, 1) + decoding = self.decoders(decoding, encoding).transpose(0, 1) + + if self.scalar_output: # if want scalar instead of seq output, take the first index from seq + decoding = decoding[:, 0, :] output = self.out_linear(decoding) return output diff --git a/torcharc/module/transformer/tst.py b/torcharc/module/transformer/tst.py index 4133c38..4ee8433 100644 --- a/torcharc/module/transformer/tst.py +++ b/torcharc/module/transformer/tst.py @@ -49,6 +49,7 @@ def __init__( q: int = 8, # Dimension of queries and keys. v: int = 8, # Dimension of values. chunk_mode: bool = 'chunk', + scalar_output: bool = False, ) -> None: super().__init__() @@ -77,6 +78,7 @@ def __init__( activation=activation, chunk_mode=chunk_mode) for _ in range(num_decoder_layers)]) self.out_linear = nn.Linear(d_model, out_channels) + self.scalar_output = scalar_output def forward(self, x: torch.Tensor) -> torch.Tensor: ''' @@ -89,10 +91,14 @@ def forward(self, x: torch.Tensor) -> torch.Tensor: encoding = self.encoders(encoding) decoding = encoding - if self.pe is not None: # position encoding - decoding = self.pe(decoding) - for layer in self.decoders: - decoding = layer(decoding, encoding) + if len(self.decoders): + if self.pe is not None: # position encoding + decoding = self.pe(decoding) + for layer in self.decoders: + decoding = layer(decoding, encoding) + + if self.scalar_output: # if want scalar instead of seq output, take the first index from seq + decoding = decoding[:, 0, :] output = self.out_linear(decoding) return output From f9f5ad71b25834a6aef320ae1740463f95653746 Mon Sep 17 00:00:00 2001 From: kengz Date: Tue, 22 Dec 2020 23:09:30 -0800 Subject: [PATCH 2/4] bump version --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index c187952..9258551 100644 --- a/setup.py +++ b/setup.py @@ -35,7 +35,7 @@ def run_tests(self): setup( name='torcharc', - version='0.0.5', + version='0.0.6', description='Build PyTorch networks by specifying architectures.', long_description='https://github.com/kengz/torcharc', keywords='torcharc', From e71ec95212e17bb80397bc855446eaa491e9af05 Mon Sep 17 00:00:00 2001 From: kengz Date: Tue, 29 Dec 2020 22:00:44 -0500 Subject: [PATCH 3/4] update CI --- .github/workflows/ci.yml | 74 ++++++++++++++++++++++------------------ 1 file changed, 40 insertions(+), 34 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2de99cb..5eb1080 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,43 +2,49 @@ name: CI on: push: - branches: [ master ] + branches: [main] pull_request: - branches: [ master ] + branches: [main] jobs: build: runs-on: ubuntu-latest + steps: - - uses: actions/checkout@v2 - - name: Cache Conda - uses: actions/cache@v1 - with: - path: /usr/share/miniconda/envs/torcharc - key: ${{ runner.os }}-conda-${{ hashFiles('environment.yml') }} - restore-keys: | - ${{ runner.os }}-conda- - - name: Setup Conda dependencies - uses: goanpeca/setup-miniconda@v1 - with: - activate-environment: torcharc - environment-file: environment.yml - python-version: 3.8 - auto-activate-base: false - - name: Conda info - shell: bash -l {0} - run: | - conda info - conda list - - name: Setup flake8 annotations - uses: rbialon/flake8-annotations@v1 - - name: Lint with flake8 - shell: bash -l {0} - run: | - pip install flake8 - # exit-zero treats all errors as warnings. - flake8 . --ignore=E501 --count --exit-zero --statistics - - name: Run tests - shell: bash -l {0} - run: | - python setup.py test + - uses: actions/checkout@v2 + + - name: Cache Conda + uses: actions/cache@v2 + with: + path: /usr/share/miniconda/envs/torcharc + key: ${{ runner.os }}-conda-${{ hashFiles('environment.yml') }} + restore-keys: | + ${{ runner.os }}-conda- + + - name: Setup Conda dependencies + uses: conda-incubator/setup-miniconda@v2 + with: + activate-environment: torcharc + environment-file: environment.yml + python-version: 3.8 + auto-activate-base: false + + - name: Conda info + shell: bash -l {0} + run: | + conda info + conda list + + - name: Setup flake8 annotations + uses: rbialon/flake8-annotations@v1 + - name: Lint with flake8 + shell: bash -l {0} + run: | + pip install flake8 + # exit-zero treats all errors as warnings. + flake8 . --ignore=E501 --count --exit-zero --statistics + + - name: Run tests + shell: bash -l {0} + run: | + python setup.py test From 5170718f717d485305da7ed0438132130240887a Mon Sep 17 00:00:00 2001 From: kengz Date: Tue, 29 Dec 2020 22:07:43 -0500 Subject: [PATCH 4/4] address flake8 --- torcharc/module_builder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/torcharc/module_builder.py b/torcharc/module_builder.py index 3f4f494..dee25c6 100644 --- a/torcharc/module_builder.py +++ b/torcharc/module_builder.py @@ -91,7 +91,7 @@ def infer_in_shape(arc: dict, xs: Union[torch.Tensor, NamedTuple]) -> None: arc.update(in_shape=in_shape) elif nn_type == 'FiLMMerge': assert ps.is_tuple(xs) - assert len(arc['in_names']) == 2, f'FiLMMerge in_names should only specify 2 keys for feature and conditioner' + assert len(arc['in_names']) == 2, 'FiLMMerge in_names should only specify 2 keys for feature and conditioner' shapes = {name: list(x.shape)[1:] for name, x in xs._asdict().items() if name in arc['in_names']} arc.update(shapes=shapes) else: