From 688307fdddcb8cc2e013a2c6e4c830af47fbb1ee Mon Sep 17 00:00:00 2001 From: Roman Ageev Date: Wed, 4 Oct 2023 17:24:50 +0800 Subject: [PATCH] fix: doc links and naming Fixed docs links and naming --- docs/docs/overview/intro.md | 24 ++++++++++++------------ docs/docusaurus.config.js | 4 ++-- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/docs/docs/overview/intro.md b/docs/docs/overview/intro.md index bfac097..5b949da 100644 --- a/docs/docs/overview/intro.md +++ b/docs/docs/overview/intro.md @@ -29,7 +29,7 @@ pip install xturing **Welcome to xTuring: Personalize AI your way** -In the world of AI, personalization is incredibly important for making AI truly powerful. This is where xTuring comes in – it's a special open-source software that helps you make AI models, called Large Language Models (LLMs), work exactly the way you want them to. +In the world of AI, personalization is incredibly important for making AI truly powerful. This is where xTuring comes in – it's a special open-source software that helps you make AI models, called Large Language Models (LLMs), work exactly the way you want them to. What's great about xTuring is that it's super easy to use. It has a simple interface that's designed to help you customize LLMs for your specific needs, whether it's for your own data or applications. Basically, xTuring gives you complete control over personalizing AI, making it work just the way you need it to. @@ -53,16 +53,16 @@ To get started with xTuring, check out the [Quickstart](/overview/quickstart) gu | Model | Examples | | --- | --- | -| Bloom | [Bloom fine-tuning on Alpaca dataset with/without LoRA and with/without INT8](https://github.com/stochasticai/xturing/tree/main/examples/bloom) | -| Cerebras-GPT | [Cerebras-GPT fine-tuning on Alpaca dataset with/without LoRA and with/without INT8](https://github.com/stochasticai/xturing/tree/main/examples/cerebras) | -| Falcon | [Falcon 7B fine-tuning on Alpaca dataset with/without LoRA and with/without INT8](https://github.com/stochasticai/xturing/tree/main/examples/falcon) | -| Galactica | [Galactica fine-tuning on Alpaca dataset with/without LoRA and with/without INT8](https://github.com/stochasticai/xturing/tree/main/examples/galactica) | -| Generic Wrapper | [Any large language model fine-tuning on Alpaca dataset with/without LoRA and with/without INT8](https://github.com/stochasticai/xturing/tree/main/examples/generic) | -| GPT-J | [GPT-J 6B LoRA fine-tuning with/without INT8 ](https://github.com/stochasticai/xturing/tree/main/examples/gptj) | -| GPT-2 | [GPT-2 fine-tuning on Alpaca dataset with/without LoRA and with/without INT8](https://github.com/stochasticai/xturing/tree/main/examples/gpt2) | -| LLaMA | [LLaMA 7B fine-tuning on Alpaca dataset with/without LoRA and with/without INT8](https://github.com/stochasticai/xturing/tree/main/examples/llama) | -| LLaMA 2 | [LLaMA 2 7B fine-tuning on Alpaca dataset with/without LoRA and with/without INT8](https://github.com/stochasticai/xturing/tree/main/examples/llama2) | -| OPT | [OPT fine-tuning on Alpaca dataset with/without LoRA and with/without INT8](https://github.com/stochasticai/xturing/tree/main/examples/opt) | +| Bloom | [Bloom fine-tuning on Alpaca dataset with/without LoRA and with/without INT8](https://github.com/stochasticai/xturing/tree/main/examples/models/bloom) | +| Cerebras-GPT | [Cerebras-GPT fine-tuning on Alpaca dataset with/without LoRA and with/without INT8](https://github.com/stochasticai/xturing/tree/main/examples/models/cerebras) | +| Falcon | [Falcon 7B fine-tuning on Alpaca dataset with/without LoRA and with/without INT8](https://github.com/stochasticai/xturing/tree/main/examples/models/falcon) | +| Galactica | [Galactica fine-tuning on Alpaca dataset with/without LoRA and with/without INT8](https://github.com/stochasticai/xturing/tree/main/examples/models/galactica) | +| Generic Wrapper | [Any large language model fine-tuning on Alpaca dataset with/without LoRA and with/without INT8](https://github.com/stochasticai/xturing/tree/main/examples/models/generic) | +| GPT-J | [GPT-J 6B LoRA fine-tuning with/without INT8 ](https://github.com/stochasticai/xturing/tree/main/examples/models/gptj) | +| GPT-2 | [GPT-2 fine-tuning on Alpaca dataset with/without LoRA and with/without INT8](https://github.com/stochasticai/xturing/tree/main/examples/models/gpt2) | +| LLaMA | [LLaMA 7B fine-tuning on Alpaca dataset with/without LoRA and with/without INT8](https://github.com/stochasticai/xturing/tree/main/examples/models/llama) | +| LLaMA 2 | [LLaMA 2 7B fine-tuning on Alpaca dataset with/without LoRA and with/without INT8](https://github.com/stochasticai/xturing/tree/main/examples/models/llama2) | +| OPT | [OPT fine-tuning on Alpaca dataset with/without LoRA and with/without INT8](https://github.com/stochasticai/xturing/tree/main/examples/models/opt) | xTuring is licensed under [Apache 2.0](https://github.com/stochasticai/xturing/blob/main/LICENSE) @@ -85,4 +85,4 @@ The people who created xTuring come from a place called Stochastic, where lots o **Here to Help You Succeed**: Our job doesn't stop with making xTuring. We're here to help you learn and use AI in the best way possible. We want you to feel confident using our tool in the fast-changing world of AI. -[Come Work with Us](/contributing) and be part of the future of AI with xTuring. We're all about new ideas and making AI better for everyone. We're here to help you every step of the way. \ No newline at end of file +[Come Work with Us](/contributing) and be part of the future of AI with xTuring. We're all about new ideas and making AI better for everyone. We're here to help you every step of the way. diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index 3892865..688c45a 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -68,7 +68,7 @@ const config = { items: [ { href: 'https://github.com/stochasticai/xturing', - label: 'xTuring', + label: 'GitHub', position: 'right', } ], @@ -106,7 +106,7 @@ const config = { title: 'More', items: [ { - label: 'Github', + label: 'GitHub', href: 'https://github.com/stochasticai/xturing', }, ],