diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..d9a10c0d8e868ebf8da0b3dc95bb0be634c34bfe --- /dev/null +++ b/LICENSE @@ -0,0 +1,176 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/README.md b/README.md index 66a8a726bcbaa30fd0f252a4747a24d578587268..d426eed8da1f7987403f508faace51e76d863046 100644 --- a/README.md +++ b/README.md @@ -1,26 +1,39 @@ # Optimus Prime -Implementation of the Transformer architecture and different variations on it, -as well as the training loop, written from scratch in PyTorch. +(Yet another) PyTorch framework for training large language models. -Most of the code is easy to read and should be self-explanatory, as it is -documented. +## How to use -There is an example of how to use the 'framework' in -`optimus/example_training.py`. +### Training -## Training a custom model with a custom dataset +An example on how to use the framework can be found in `training.py`. Feel free +to adapt as needed. Also see [Custom training](#custom-training). -The requirements to train a model of a custom size from scratch are: -1. Get a dataset from wherever, and process it to work with the other components - of the 'framework'. See `optimus/datasets/wikitext103.py` for an example. -2. Create a tokenizer for the model, or use the existing LLama 2 tokenizer - (`optimus/llama32K.model`). To create a tokenizer, see - `optimus/tokenizer.py`. Creating a tokenizer requires a dataset ready. -3. Create the model, and specify the training parameters (see - `optimus/example_training.py` for an example of what options you can - provide). Additionally, `optimus/trainer.py` can be directly modified, for - options currently not exposed through an interface. +### Inference + +After training a model (or getting hold of one from other sources), there's an +example on how to run inference can be found in `inference.py`. Feel free to +adapt as needed. + +## Basic building blocks + +As its parent PyTorch, the framework is split between a number of modules. The +most important modules are the `OptimusDataLoader`, the `Dataset`s, the +`Trainer`, the tokenizers and the models. These can be combined and adapted in +any way, shape or form to train a model from scratch. + +## Custom training + +The usual workflow for a user is to create and train a tokenizer (see +`optimus/tokenizers` for an example), model a dataset (see `optimus/datasets` +for an example), create a model architecture (see `optimus/models` as an +example) and use the data loader and the trainer modules to train the model. The +`Trainer` module has a number of useful options which can be used during +training (mixed precision training, checkpointing, gradient accumulation, +plotting the training loss etc.; see `optimus/trainer.py` for what the Trainer +is capable of). + +Of course, any number of the above can be used as defaults. > [!TIP] > You can choose which GPU's to train on, using the environment variable @@ -29,7 +42,11 @@ The requirements to train a model of a custom size from scratch are: ## Required packages -There are a number of packages required to run the thing. Get your closest +There are a number of packages required to run the framework. Get your closest Python retailer and ask him to run the following command: `pip install torch fire sentencepiece fastprogress matplotlib` + +## License + +See [LICENSE](LICENSE). diff --git a/optimus/llama32K.model b/llama32K.model similarity index 100% rename from optimus/llama32K.model rename to llama32K.model diff --git a/optimus/dataloader.py b/optimus/dataloader.py index b1e00c961547fdd071d294743b8545636833f823..7bcd74ee52ce2d5f5e35215db751ca6f899a1709 100644 --- a/optimus/dataloader.py +++ b/optimus/dataloader.py @@ -6,7 +6,7 @@ import torch from torch import Tensor from torch.utils.data import Dataset -from tokenizer import Tokenizer +from optimus.tokenizers import SentencePieceTokenizer class _OptimusDLIter(Iterator): @@ -113,7 +113,7 @@ class OptimusDataLoader(): def __init__(self, train_ds: Dataset, test_ds: Dataset, - tok: Tokenizer, + tok: SentencePieceTokenizer, bs: int, seq_len: int, shuffle: bool = True, @@ -132,7 +132,7 @@ class OptimusDataLoader(): Args: train_ds (Dataset): The training dataset. test_ds (Dataset): The testing dataset. - tok (Tokenizer): Tokenizer to use. + tok (SentencePieceTokenizer): Tokenizer to use. bs (int): Batch size. The number of samples per batch to load. seq_len (int): Sequence length. Also referred to as context length. shuffle (bool): Whether to shuffle the training data before diff --git a/optimus/datasets/__init__.py b/optimus/datasets/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0d9084efadeafaf1e00c798a57ffa82c4a299c4c --- /dev/null +++ b/optimus/datasets/__init__.py @@ -0,0 +1,2 @@ +from .tinystories import TinyStoriesDataset +from .wikitext103 import WikiText103Dataset diff --git a/optimus/datasets/tinystories.py b/optimus/datasets/tinystories.py index 4928a20bda06587f0ce86cedce6f1e9e32f40ad8..ba895677b3f3a5418f3730615346efbeb7b52198 100644 --- a/optimus/datasets/tinystories.py +++ b/optimus/datasets/tinystories.py @@ -2,7 +2,7 @@ import os from torch.utils.data import Dataset, DataLoader -from datasets.dataset_utils import * +from .dataset_utils import * URL = { diff --git a/optimus/datasets/wikitext103.py b/optimus/datasets/wikitext103.py index 2743c61e7b7f4b5b293d16e776f22ba9fff05f51..62229c4cf6ec09a2deacddbf012a8b96568db47d 100644 --- a/optimus/datasets/wikitext103.py +++ b/optimus/datasets/wikitext103.py @@ -3,7 +3,7 @@ import re from torch.utils.data import Dataset, DataLoader -from datasets.dataset_utils import * +from .dataset_utils import * URL = 'https://s3.amazonaws.com/research.metamind.io/wikitext/wikitext-103-v1.zip' diff --git a/optimus/models/__init__.py b/optimus/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ce35655a73bbb58ebef9b2c672180e023763e737 --- /dev/null +++ b/optimus/models/__init__.py @@ -0,0 +1 @@ +from .optimus import OptimusTransformer diff --git a/optimus/model.py b/optimus/models/optimus.py similarity index 99% rename from optimus/model.py rename to optimus/models/optimus.py index 5a432655c81579bcc42b0b001db2750bd8d0d735..36ce3b08e834d56658c824469f505da6a5ec04b5 100644 --- a/optimus/model.py +++ b/optimus/models/optimus.py @@ -155,7 +155,7 @@ class TransformerBlock(nn.Module): return x -class Transformer(nn.Module): +class OptimusTransformer(nn.Module): def __init__(self, vocab_sz: int, dim: int = 512, diff --git a/optimus/tokenizers/__init__.py b/optimus/tokenizers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..aee6674dbd4384a2df2a3bbce6e31195e4960177 --- /dev/null +++ b/optimus/tokenizers/__init__.py @@ -0,0 +1 @@ +from .sentencepiece import SentencePieceTokenizer diff --git a/optimus/tokenizer.py b/optimus/tokenizers/sentencepiece.py similarity index 96% rename from optimus/tokenizer.py rename to optimus/tokenizers/sentencepiece.py index 0a9ac95a337941ad3f732fcaa89d881f50eb8ca0..6d9a8068cf260ec88a892ff255f23df0736a4cbc 100644 --- a/optimus/tokenizer.py +++ b/optimus/tokenizers/sentencepiece.py @@ -5,7 +5,7 @@ from collections.abc import Iterable import sentencepiece as spm -class Tokenizer(): +class SentencePieceTokenizer(): def __init__(self, model_path: str = 'optimus.model'): """ SentencePiece tokenizer. @@ -125,10 +125,10 @@ if __name__=='__main__': filename = './wikitext-103/wiki.train.tokens' with open(filename, 'r') as f: lines = f.readlines() - Tokenizer.train(iter(lines), vocab_size=16000) + SentencePieceTokenizer.train(iter(lines), vocab_size=16000) else: - tok = Tokenizer(model_path='./optimus.model') + tok = SentencePieceTokenizer(model_path='./optimus.model') print(len(tok)) print(tok.encode("this is some sunny day", False, True)) print(tok.encode_as_pieces("this is some sunny day")) diff --git a/optimus/trainer.py b/optimus/trainer.py index 626c214da88c086071c6f27f2bf77adcbfc79223..abfbaafa2bdf7d0c59bde5c9d1848d8315c4affd 100644 --- a/optimus/trainer.py +++ b/optimus/trainer.py @@ -7,7 +7,8 @@ import torch.nn as nn import torch.optim as optim from fastprogress.fastprogress import master_bar, progress_bar, format_time -from dataloader import OptimusDataLoader +from optimus.dataloader import OptimusDataLoader +from fastprogress.fastprogress import master_bar, progress_bar, format_time class Trainer(): diff --git a/optimus/optimus16K-wikitext103.model b/optimus16K-wikitext103.model similarity index 100% rename from optimus/optimus16K-wikitext103.model rename to optimus16K-wikitext103.model diff --git a/optimus/optimus60K-wikitext103.model b/optimus60K-wikitext103.model similarity index 100% rename from optimus/optimus60K-wikitext103.model rename to optimus60K-wikitext103.model diff --git a/optimus/example_training.py b/training.py similarity index 86% rename from optimus/example_training.py rename to training.py index 9bfed385050375712931ced5b290d8d8c2f0d390..3efbfd21d7813266790faab6f789fa8d4d8a3d5a 100644 --- a/optimus/example_training.py +++ b/training.py @@ -2,11 +2,11 @@ import fire import torch from torch import nn -from datasets.wikitext103 import WikiText103Dataset -from tokenizer import Tokenizer -from dataloader import OptimusDataLoader -from model import Transformer -from trainer import Trainer +from optimus.datasets import WikiText103Dataset +from optimus.tokenizers import SentencePieceTokenizer +from optimus.dataloader import OptimusDataLoader +from optimus.models import OptimusTransformer +from optimus.trainer import Trainer def main(batch_size: int = 8, @@ -62,7 +62,7 @@ def main(batch_size: int = 8, f"Please see '--help' if you want to change these settings") # load tokenizer - tok = Tokenizer(model_path=tokenizer_path) + tok = SentencePieceTokenizer(model_path=tokenizer_path) # load dataset splits train_ds = WikiText103Dataset(split='train') @@ -78,12 +78,12 @@ def main(batch_size: int = 8, device=device) # create model and move to device - model = Transformer(len(tok), - n_layers=n_layers, - dim=dim, - n_heads=n_heads, - p_drop=dropout, - weight_tying=False) + model = OptimusTransformer(len(tok), + n_layers=n_layers, + dim=dim, + n_heads=n_heads, + p_drop=dropout, + weight_tying=False) model = model.to(device) _total_params = sum(p.numel() for p in model.parameters())