From b684c1204639549ff5f8ad862157786ab5e0ddb5 Mon Sep 17 00:00:00 2001 From: Your Name Date: Wed, 9 Oct 2024 17:15:06 -0400 Subject: [PATCH] [CLI] --- pyproject.toml | 34 +++- requirements.txt | 5 +- swarms_cloud/cli/__init__.py | 3 + swarms_cloud/cli/main.py | 108 ++++++++++ swarms_cloud/cli/onboarding.py | 197 +++++++++++++++++++ swarms_cloud/structs/deploy_gcp.py | 2 +- swarms_cloud/utils/Untitled-1.py | 62 ++++++ swarms_cloud/utils/log_to_swarms_database.py | 33 ++++ 8 files changed, 435 insertions(+), 9 deletions(-) create mode 100644 swarms_cloud/cli/main.py create mode 100644 swarms_cloud/cli/onboarding.py create mode 100644 swarms_cloud/utils/Untitled-1.py create mode 100644 swarms_cloud/utils/log_to_swarms_database.py diff --git a/pyproject.toml b/pyproject.toml index ccb422d..64e1fdd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "swarms-cloud" -version = "0.3.7" +version = "0.3.8" description = "Swarms Cloud - Pytorch" license = "MIT" authors = ["Kye Gomez "] @@ -23,17 +23,37 @@ classifiers = [ [tool.poetry.dependencies] python = "^3.10" -swarms = "*" -fastapi = "*" skypilot = "*" +fastapi = "0.112.2" +supabase = "*" +pytest = "*" torch = "*" einops = "*" -pydantic = ">2,<3" -stripe = "*" -transformers = "*" -sse-starlette = "2.1.3" +tiktoken = "*" +sse-starlette = "2.0.0" uvicorn = "*" +loguru = "*" +pydantic = "*" +stripe = "*" +xformers = "*" +diffusers = "*" +transformers_stream_generator = "*" +bitsandbytes = "*" +peft = "*" +accelerate = "*" +transformers = "4.44.0" +huggingface-hub = "*" +optimum = "*" +auto-gptq = "*" +whisperx = "*" shortuuid = "*" +hf_transfer = "*" +swarms = "*" +rich = "*" +asyncio = "*" + +[tool.poetry.scripts] +swarms-cloud = "swarms_cloud.cli.main:main" [tool.poetry.group.lint.dependencies] ruff = ">=0.1.6,<0.7.0" diff --git a/requirements.txt b/requirements.txt index 739767f..80ddde1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -30,4 +30,7 @@ uvicorn tiktoken pydantic asyncio -swarms-cloud \ No newline at end of file +swarms-cloud +rich +tiktoken +shortuuid \ No newline at end of file diff --git a/swarms_cloud/cli/__init__.py b/swarms_cloud/cli/__init__.py index e69de29..394a6bb 100644 --- a/swarms_cloud/cli/__init__.py +++ b/swarms_cloud/cli/__init__.py @@ -0,0 +1,3 @@ +from swarms_cloud.cli.onboarding import OnboardingProcess + +__all__ = ["OnboardingProcess"] diff --git a/swarms_cloud/cli/main.py b/swarms_cloud/cli/main.py new file mode 100644 index 0000000..90054d2 --- /dev/null +++ b/swarms_cloud/cli/main.py @@ -0,0 +1,108 @@ +import argparse +import os +import time + +from rich.console import Console +from rich.text import Text + +from swarms_cloud.cli.onboarding import OnboardingProcess + +console = Console() + + +ASCII_ART = """ + _________ + / _____/_ _ _______ _______ _____ ______ + \_____ \\ \/ \/ /\__ \\_ __ \/ \ / ___/ + / \\ / / __ \| | \/ Y Y \\___ \ +/_______ / \/\_/ (____ /__| |__|_| /____ > + \/ \/ \/ \/ + +""" + + +# Function to display the ASCII art in red +def show_ascii_art(): + text = Text(ASCII_ART, style="bold red") + console.print(text) + + +# Help command +def show_help(): + console.print( + """ + [bold cyan]Swarms Cloud CLI - Help[/bold cyan] + + [bold magenta]Commands:[/bold magenta] + [bold white]onboarding[/bold white] : Starts the onboarding process + [bold white]help[/bold white] : Shows this help message + [bold white]get-api-key[/bold white] : Retrieves your API key from the platform + [bold white]check-login[/bold white] : Checks if you're logged in and starts the cache + + For more details, visit: https://docs.swarms.world + """ + ) + + +# Fetch API key from platform +def get_api_key(): + console.print("[bold yellow]Opening the API key retrieval page...[/bold yellow]") + # Simulating API key retrieval process by opening the website + import webbrowser + + webbrowser.open("https://swarms.world/platform/api-keys") + time.sleep(2) + console.print( + "[bold green]Your API key is available on the dashboard.[/bold green]" + ) + + +# Check and start cache (login system simulation) +def check_login(): + cache_file = "cache.txt" + + if os.path.exists(cache_file): + with open(cache_file, "r") as f: + cache_content = f.read() + if cache_content == "logged_in": + console.print("[bold green]You are already logged in.[/bold green]") + else: + console.print("[bold red]You are not logged in.[/bold red]") + else: + console.print("[bold yellow]Logging in...[/bold yellow]") + time.sleep(2) + with open(cache_file, "w") as f: + f.write("logged_in") + console.print("[bold green]Login successful![/bold green]") + + +# Main CLI handler +def main(): + parser = argparse.ArgumentParser(description="Swarms Cloud CLI") + + # Adding arguments for different commands + parser.add_argument( + "command", + choices=["onboarding", "help", "get-api-key", "check-login"], + help="Command to run", + ) + + args = parser.parse_args() + + show_ascii_art() + + # Determine which command to run + if args.command == "onboarding": + OnboardingProcess().run() + elif args.command == "help": + show_help() + elif args.command == "get-api-key": + get_api_key() + elif args.command == "check-login": + check_login() + else: + console.print("[bold red]Unknown command! Type 'help' for usage.[/bold red]") + + +if __name__ == "__main__": + main() diff --git a/swarms_cloud/cli/onboarding.py b/swarms_cloud/cli/onboarding.py new file mode 100644 index 0000000..6e1238f --- /dev/null +++ b/swarms_cloud/cli/onboarding.py @@ -0,0 +1,197 @@ +import platform +import os +import socket +import psutil +import uuid +from loguru import logger +import json +import time +from typing import Dict +import requests +from swarms_cloud.utils.log_to_swarms_database import log_agent_data + + +class OnboardingProcess: + """ + This class handles the onboarding process for users. It collects user data including their + full name, first name, email, Swarms API key, and system data, then autosaves it in both a + main JSON file and a cache file for reliability. It supports loading previously saved or cached data. + """ + + def __init__( + self, + auto_save_path: str = "user_data.json", + cache_save_path: str = "user_data_cache.json", + ) -> None: + """ + Initializes the OnboardingProcess with an autosave file path and a cache path. + + Args: + auto_save_path (str): The path where user data is automatically saved. + cache_save_path (str): The path where user data is cached for reliability. + """ + self.user_data: Dict[str, str] = {} + self.system_data: Dict[str, str] = self.capture_system_data() + self.auto_save_path = auto_save_path + self.cache_save_path = cache_save_path + self.load_existing_data() + + def capture_system_data(self) -> Dict[str, str]: + """ + Captures extensive system data including platform information, user ID, IP address, CPU count, + memory information, and other system details. + + Returns: + Dict[str, str]: A dictionary containing system data. + """ + try: + system_data = { + "platform": platform.system(), + "platform_version": platform.version(), + "platform_release": platform.release(), + "hostname": socket.gethostname(), + "ip_address": socket.gethostbyname(socket.gethostname()), + "cpu_count": psutil.cpu_count(logical=True), + "memory_total": f"{psutil.virtual_memory().total / (1024 ** 3):.2f} GB", + "memory_available": f"{psutil.virtual_memory().available / (1024 ** 3):.2f} GB", + "user_id": str(uuid.uuid4()), # Unique user identifier + "machine_type": platform.machine(), + "processor": platform.processor(), + "architecture": platform.architecture()[0], + } + + # Get external IP address + try: + system_data["external_ip"] = requests.get("https://api.ipify.org").text + except Exception as e: + logger.warning("Failed to retrieve external IP: {}", e) + system_data["external_ip"] = "N/A" + + return system_data + except Exception as e: + logger.error("Failed to capture system data: {}", e) + return {} + + def load_existing_data(self) -> None: + """ + Loads existing user data from the auto-save file or cache if available. + """ + if os.path.exists(self.auto_save_path): + try: + with open(self.auto_save_path, "r") as f: + self.user_data = json.load(f) + logger.info( + "Existing user data loaded from {}", self.auto_save_path + ) + return + except json.JSONDecodeError as e: + logger.error("Failed to load user data from main file: {}", e) + + # Fallback to cache if main file fails + if os.path.exists(self.cache_save_path): + try: + with open(self.cache_save_path, "r") as f: + self.user_data = json.load(f) + logger.info("User data loaded from cache: {}", self.cache_save_path) + except json.JSONDecodeError as e: + logger.error("Failed to load user data from cache: {}", e) + + def save_data(self, retry_attempts: int = 3) -> None: + """ + Saves the current user data to both the auto-save file and the cache file. If the main + save fails, the cache is updated instead. Implements retry logic with exponential backoff + in case both save attempts fail. + + Args: + retry_attempts (int): The number of retries if saving fails. + """ + attempt = 0 + backoff_time = 1 # Starting backoff time (in seconds) + + while attempt < retry_attempts: + try: + combined_data = {**self.user_data, **self.system_data} + log_agent_data(combined_data) + # threading.Thread(target=log_agent_data(combined_data)).start() + with open(self.auto_save_path, "w") as f: + json.dump(combined_data, f, indent=4) + # logger.info( + # "User and system data successfully saved to {}", + # self.auto_save_path, + # ) + with open(self.cache_save_path, "w") as f: + json.dump(combined_data, f, indent=4) + # logger.info( + # "User and system data successfully cached in {}", + # self.cache_save_path, + # ) + return # Exit the function if saving was successful + except Exception as e: + logger.error("Error saving user data (Attempt {}): {}", attempt + 1, e) + + # Retry after a short delay (exponential backoff) + time.sleep(backoff_time) + attempt += 1 + backoff_time *= 2 # Double the backoff time for each retry + + logger.error("Failed to save user data after {} attempts.", retry_attempts) + + def ask_input(self, prompt: str, key: str) -> None: + """ + Asks the user for input, validates it, and saves it in the user_data dictionary. + Autosaves and caches after each valid input. + + Args: + prompt (str): The prompt message to display to the user. + key (str): The key under which the input will be saved in user_data. + + Raises: + ValueError: If the input is empty or only contains whitespace. + """ + try: + response = input(prompt) + if response.strip().lower() == "quit": + logger.info("User chose to quit the onboarding process.") + exit(0) + if not response.strip(): + raise ValueError(f"{key.capitalize()} cannot be empty.") + self.user_data[key] = response.strip() + self.save_data() + except ValueError as e: + logger.warning(e) + self.ask_input(prompt, key) + except KeyboardInterrupt: + logger.warning("Onboarding process interrupted by the user.") + exit(1) + + def collect_user_info(self) -> None: + """ + Initiates the onboarding process by collecting the user's full name, first name, email, + Swarms API key, and system data. + """ + logger.info("Initiating swarms cloud onboarding process...") + self.ask_input("Enter your first name (or type 'quit' to exit): ", "first_name") + self.ask_input("Enter your Last Name (or type 'quit' to exit): ", "last_name") + self.ask_input("Enter your email (or type 'quit' to exit): ", "email") + self.ask_input( + "Enter your Swarms API key (or type 'quit' to exit): Get this in your swarms dashboard: https://swarms.world/platform/api-keys ", + "swarms_api_key", + ) + logger.success("Onboarding process completed successfully!") + + def run(self) -> None: + """ + Main method to run the onboarding process. It handles unexpected errors and ensures + proper finalization. + """ + try: + self.collect_user_info() + except Exception as e: + logger.error("An unexpected error occurred: {}", e) + finally: + logger.info("Finalizing the onboarding process.") + + +# if __name__ == "__main__": +# onboarding = OnboardingProcess() +# onboarding.run() diff --git a/swarms_cloud/structs/deploy_gcp.py b/swarms_cloud/structs/deploy_gcp.py index 65fe1ec..98e7be2 100644 --- a/swarms_cloud/structs/deploy_gcp.py +++ b/swarms_cloud/structs/deploy_gcp.py @@ -5,7 +5,7 @@ import pulumi import yaml from loguru import logger -from pulumi_gcp import cloudrun, storage +from pulumi_gcp import cloudrun # Configure logging logger.add("deploy_agent.log", rotation="10 MB", retention="10 days", level="INFO") diff --git a/swarms_cloud/utils/Untitled-1.py b/swarms_cloud/utils/Untitled-1.py new file mode 100644 index 0000000..6a69ab9 --- /dev/null +++ b/swarms_cloud/utils/Untitled-1.py @@ -0,0 +1,62 @@ +# %% +# Create a config +import requests + +url = "http://35.222.137.183:30001/collections" + +data = {"name": "my_collection"} + +response = requests.post(url, json=data) + +print("Status Code:", response.status_code) +print("Response JSON:", response.json()) + + +# %% +import requests + +collection_id = "my_collection" # Replace with the actual collection ID + +url = f"http://127.0.0.1:8000/collections/{collection_id}/documents" +data = { + "documents": [ + "This is a document about pineapples", + "This is a document about oranges", + ], +} + +response = requests.post(url, json=data) + +print("Status Code:", response.status_code) +print("Response JSON:", response.json()) + + +# %% +import requests + +collection_id = "my_collection" # Replace with the actual collection ID + +url = f"http://127.0.0.1:8000/collections/{collection_id}/documents" +data = {"query_texts": ["This is a query document about Hawaii"], "n_results": 2} + +response = requests.get(url, json=data) + +print("Status Code:", response.status_code) +print("Response JSON:", response.json()) + +# %% +import requests + +collection_id = "my_collection" # Replace with the actual collection ID + +document_id = "your_document_id_here" # Replace with the actual document ID +url = f"http://127.0.0.1:8000/collections/{collection_id}/documents/{document_id}" + +response = requests.delete(url) + +print("Status Code:", response.status_code) +print("Response JSON:", response.json()) + + +# %% [markdown] +# diff --git a/swarms_cloud/utils/log_to_swarms_database.py b/swarms_cloud/utils/log_to_swarms_database.py new file mode 100644 index 0000000..e0786a9 --- /dev/null +++ b/swarms_cloud/utils/log_to_swarms_database.py @@ -0,0 +1,33 @@ +import requests + + +def log_agent_data(data_dict: dict, retry_attempts: int = 1) -> dict or None: + """ + Logs agent data to the Swarms database with retry logic. + + Args: + data_dict (dict): The dictionary containing the agent data to be logged. + url (str): The URL of the Swarms database endpoint. + headers (dict): The headers to be included in the request. + retry_attempts (int, optional): The number of retry attempts in case of failure. Defaults to 3. + + Returns: + dict or None: The JSON response from the server if successful, otherwise None. + """ + url = "https://swarms.world/api/get-agents/log-agents" + + headers = { + "Content-Type": "application/json", + "Authorization": "Bearer sk-f24a13ed139f757d99cdd9cdcae710fccead92681606a97086d9711f69d44869", + } + + # for attempt in range(retry_attempts): + # try: + response = requests.post(url, json=data_dict, headers=headers) + response.raise_for_status() + output = response.json() + # print(output) + # except requests.exceptions.RequestException as e: + # logger.error("Error logging agent data (Attempt {}): {}", attempt + 1, e) + # logger.error("Failed to log agent data after {} attempts.", retry_attempts) + # return "success"