|
| 1 | +#' |
| 2 | +#' @export |
| 3 | +ai.get_ollama_models <- function(models=NULL) { |
| 4 | + available.models <- system("ollama list | tail -n +2 | cut -d' ' -f 1", intern=TRUE) |
| 5 | + if(!is.null(models)) available.models <- intersect(models,available.models) |
| 6 | + return(available.models) |
| 7 | +} |
| 8 | + |
| 9 | +OLLAMA_MODELS = ai.get_ollama_models() |
| 10 | +DEFAULT_LLM = "gpt-5-nano" |
| 11 | + |
| 12 | +if(0) { |
| 13 | + model="gpt-5-nano";prompt=NULL |
| 14 | + model="gemma3:1b";prompt=NULL |
| 15 | + model="grok-4-fast-non-reasoning";prompt=NULL |
| 16 | +} |
| 17 | + |
| 18 | +#' @export |
| 19 | +ai.get_remote_models <- function(models=NULL) { |
| 20 | + keys <- NULL |
| 21 | + |
| 22 | + dbg("[ai.get_remote_models] models = ",models) |
| 23 | + dbg("[ai.get_remote_models] len.models = ",length(models)) |
| 24 | + dbg("[ai.get_remote_models] OPENAI_API_KEY = ",Sys.getenv("OPENAI_API_KEY")) |
| 25 | + dbg("[ai.get_remote_models] XAI_API_KEY = ",Sys.getenv("XAI_API_KEY")) |
| 26 | + dbg("[ai.get_remote_models] GROQ_API_KEY = ",Sys.getenv("GROQ_API_KEY")) |
| 27 | + dbg("[ai.get_remote_models] GEMINI_API_KEY = ",Sys.getenv("GEMINI_API_KEY")) |
| 28 | + |
| 29 | + if (Sys.getenv("OPENAI_API_KEY")!="") keys <- c(keys,"gpt-.*") |
| 30 | + if (Sys.getenv("XAI_API_KEY")!="") keys <- c(keys,"grok-.*") |
| 31 | + if (Sys.getenv("GROQ_API_KEY")!="") keys <- c(keys,"groq:.*") |
| 32 | + if (Sys.getenv("GEMINI_API_KEY")!="") keys <- c(keys,"gemini-.*") |
| 33 | + |
| 34 | + if(is.null(models) || length(models)==0 || models[1]=="" ) { |
| 35 | + models <- keys |
| 36 | + } else if(!is.null(keys)) { |
| 37 | + regex <- paste0("^",keys,collapse="|") |
| 38 | + models <- grep(regex,models,value=TRUE) |
| 39 | + } else { |
| 40 | + models <- NULL |
| 41 | + } |
| 42 | + models |
| 43 | +} |
| 44 | + |
| 45 | +#' @export |
| 46 | +ai.get_models <- function(models=NULL) { |
| 47 | + local.models <- ai.get_ollama_models(models) |
| 48 | + remote.models <- ai.get_remote_models(models) |
| 49 | + if(!is.null(models)) { |
| 50 | + models <- models[ models %in% c(local.models, remote.models)] |
| 51 | + } else { |
| 52 | + models <- c(local.models, remote.models) |
| 53 | + } |
| 54 | + return(models) |
| 55 | +} |
| 56 | + |
| 57 | +#' @export |
| 58 | +ai.model_is_available <- function(model) { |
| 59 | + model %in% ai.get_models(models=model) |
| 60 | +} |
| 61 | + |
| 62 | +#' @export |
| 63 | +ai.ask <- function(question, model=DEFAULT_LLM, prompt=NULL) { |
| 64 | + chat <- NULL |
| 65 | + if(inherits(model, "Chat")) { |
| 66 | + chat <- model |
| 67 | + } else if(is.character(model)) { |
| 68 | + if (model %in% OLLAMA_MODELS) { |
| 69 | + chat <- ellmer::chat_ollama(model = model, system_prompt = prompt) |
| 70 | + } else if (grepl("^gpt",model) && Sys.getenv("OPENAI_API_KEY")!="") { |
| 71 | + message("warning: using remote GPT model:", model) |
| 72 | + chat <- ellmer::chat_openai( |
| 73 | + model = model, system_prompt = prompt, |
| 74 | + api_key = Sys.getenv("OPENAI_API_KEY") ) |
| 75 | + } else if (grepl("^grok",model) && Sys.getenv("XAI_API_KEY")!="") { |
| 76 | + chat <- ellmer::chat_openai( |
| 77 | + model = model, system_prompt = prompt, |
| 78 | + api_key = Sys.getenv("XAI_API_KEY"), |
| 79 | + base_url="https://api.x.ai/v1/") |
| 80 | + } else if (grepl("^groq",model) && Sys.getenv("GROQ_API_KEY")!="") { |
| 81 | + model <- sub("groq:","",model) |
| 82 | + chat <- ellmer::chat_groq( |
| 83 | + model = model, system_prompt = prompt, |
| 84 | + api_key = Sys.getenv("GROQ_API_KEY") |
| 85 | + ) |
| 86 | + } else if (grepl("^gemini",model) && Sys.getenv("GEMINI_API_KEY")!="") { |
| 87 | + chat <- ellmer::chat_google_gemini( |
| 88 | + model = model, system_prompt = prompt, |
| 89 | + api_key = Sys.getenv("GEMINI_API_KEY") |
| 90 | + ) |
| 91 | + } |
| 92 | + } |
| 93 | + |
| 94 | + if(is.null(chat)) { |
| 95 | + message("ERROR. could not create model ", model) |
| 96 | + return(NULL) |
| 97 | + } |
| 98 | + . <- chat$chat(question, echo=FALSE) |
| 99 | + chat$last_turn()@text |
| 100 | +} |
| 101 | + |
| 102 | +#' @export |
| 103 | +ai.genesets_summary <- function(gsets, pheno=NULL, model=DEFAULT_LLM, |
| 104 | + detail=1, html=FALSE, verbose=1) { |
| 105 | + q <- "Extract the main biological function of this list of gene sets that were found by doing geneset enrichment. Just give the answer. Do not acknowledge." |
| 106 | + if(!is.null(pheno)) q <- paste0(q, "Discuss in relation with the phenotype: '",pheno,"'.") |
| 107 | + if(detail==0) q <- paste(q, "Be very very short.") |
| 108 | + if(detail==1) q <- paste(q, "Describe in one short paragraph.") |
| 109 | + if(detail>=2) q <- paste(q, "Describe in detail.") |
| 110 | + if(html) q <- paste(q, "Use HTML formatting.") |
| 111 | + if(verbose>0) cat("Question:",q,"... \n") |
| 112 | + ss <- paste(gsets, collapse='; ') |
| 113 | + q <- paste(q, "These are the genesets: <list>",ss,"</list>. ") |
| 114 | + r <- ai.ask(q, model=model) |
| 115 | + #r <- ai.ask(q, model="gemma3:270m") |
| 116 | + #r <- ai.ask(q, model="gemma3:1b") |
| 117 | + return(r) |
| 118 | +} |
| 119 | + |
| 120 | +num=3 |
| 121 | +#' @export |
| 122 | +ai.genesets_keywords <- function(gsets, num=3, pheno=NULL, model=DEFAULT_LLM) { |
| 123 | + ss <- paste(gsets, collapse='; ') |
| 124 | + q <- paste0("Extract ",num," keywords describing the following collection of gene sets. ") |
| 125 | + q <- paste0(q, "These are the genesets: <list>",ss,"</list>. ") |
| 126 | + r <- ai.ask(q, model=model) |
| 127 | + return(r) |
| 128 | +} |
| 129 | + |
| 130 | + |
0 commit comments