Tuesday, September 12, 2023

Gradient Tutorial to Fine Tune LLM for Free - Step by Step

This video is tutorial of fine-tuning large language model in Gradient using Python in AWS. With Gradient, you can fine tune and get completions on private LLMs with a simple web API. No infrastructure needed. Build private, SOC2 compliant AI applications instantly.





Commands Used:


!pip install transformer

!pip install gradientai --upgrade


import os

os.environ['GRADIENT_ACCESS_TOKEN'] = "<TOKEN>"

os.environ['GRADIENT_WORKSPACE_ID'] = "<Workspace ID>"


from gradientai import Gradient


def main():

  with Gradient() as gradient:

      base_model = gradient.get_base_model(base_model_slug="nous-hermes2")


      new_model_adapter = base_model.create_model_adapter(

          name="My Model"

      )

      print(f"Model Adapter Id {new_model_adapter.id}")

      sample_query = "### Instruction: Who is Fahd Mirza? \n\n### Response:"

      print(f"Asking: {sample_query}")


      # before fine-tuning

      completion = new_model_adapter.complete(query=sample_query, max_generated_token_count=100).generated_output

      print(f"Before fine-tuning): {completion}")


samples = [

    {

        "inputs": "### Instruction: Who is Fahd Mirza? \n\n### Response: Fahd Mirza is a technologist who shares his expertise on YouTube, covering topics such as AI, Cloud, DevOps, and databases."

    },

    {

        "inputs": "### Instruction: Please provide information about Fahd Mirza. \n\n### Response: Fahd Mirza is an experienced cloud engineer, AI enthusiast, and educator who creates educational content on various technical subjects on YouTube."

    },

    {

        "inputs": "### Instruction: What can you tell me about Fahd Mirza? \n\n### Response: Fahd Mirza is a content creator on YouTube, specializing in AI, Cloud, DevOps, and database technologies. He is known for his informative videos."

    },

    {

        "inputs": "### Instruction: Describe Fahd Mirza for me. \n\n### Response: Fahd Mirza is a YouTuber and blogger hailing from Australia, with a strong background in cloud engineering and artificial intelligence."

    },

    {

        "inputs": "### Instruction: Give me an overview of Fahd Mirza. \n\n### Response: Fahd Mirza, based in Australia, is a seasoned cloud engineer and AI specialist who shares his knowledge through YouTube content on topics like AI, Cloud, DevOps, and databases."

    },

    {

        "inputs": "### Instruction: Who exactly is Fahd Mirza? \n\n### Response: Fahd Mirza is an Australian-based content creator known for his YouTube channel, where he covers a wide range of technical subjects, including AI, Cloud, DevOps, and databases."

    },

]


      num_epochs = 5

      count = 0

      while count < num_epochs:

          print(f"Fine-tuning the model, Epoch iteration => {count + 1}")

          new_model_adapter.fine_tune(samples=samples)

          count = count + 1


      # After fine-tuning

      completion = new_model_adapter.complete(query=sample_query, max_generated_token_count=100).generated_output

      print(f"After Fine-Tuning: {completion}")


      new_model_adapter.delete()


if __name__ == "__main__":

    main() 



No comments: